hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d9a1f3b0cf83d1115ed19f3acdb5e35f75ece5c0
| 252,781
|
py
|
Python
|
kubernetes_asyncio/client/api/rbac_authorization_v1_api.py
|
dineshsonachalam/kubernetes_asyncio
|
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
|
[
"Apache-2.0"
] | 1
|
2021-02-25T04:36:18.000Z
|
2021-02-25T04:36:18.000Z
|
kubernetes_asyncio/client/api/rbac_authorization_v1_api.py
|
hubo1016/kubernetes_asyncio
|
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/api/rbac_authorization_v1_api.py
|
hubo1016/kubernetes_asyncio
|
d57e9e9be11f6789e1ce8d5b161acb64d29acf35
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.12.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kubernetes_asyncio.client.api_client import ApiClient
class RbacAuthorizationV1Api(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_cluster_role(self, body, **kwargs): # noqa: E501
"""create_cluster_role # noqa: E501
create a ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_role(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1ClusterRole body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_role_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_role_with_http_info(body, **kwargs) # noqa: E501
return data
def create_cluster_role_with_http_info(self, body, **kwargs): # noqa: E501
"""create_cluster_role # noqa: E501
create a ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_role_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1ClusterRole body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_role`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_role_binding(self, body, **kwargs): # noqa: E501
"""create_cluster_role_binding # noqa: E501
create a ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_role_binding(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1ClusterRoleBinding body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_cluster_role_binding_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_cluster_role_binding_with_http_info(body, **kwargs) # noqa: E501
return data
def create_cluster_role_binding_with_http_info(self, body, **kwargs): # noqa: E501
"""create_cluster_role_binding # noqa: E501
create a ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_cluster_role_binding_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1ClusterRoleBinding body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role(self, namespace, body, **kwargs): # noqa: E501
"""create_namespaced_role # noqa: E501
create a Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_role(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_role_with_http_info(namespace, body, **kwargs) # noqa: E501
else:
(data) = self.create_namespaced_role_with_http_info(namespace, body, **kwargs) # noqa: E501
return data
def create_namespaced_role_with_http_info(self, namespace, body, **kwargs): # noqa: E501
"""create_namespaced_role # noqa: E501
create a Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_role_with_http_info(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role_binding(self, namespace, body, **kwargs): # noqa: E501
"""create_namespaced_role_binding # noqa: E501
create a RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_role_binding(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs) # noqa: E501
else:
(data) = self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs) # noqa: E501
return data
def create_namespaced_role_binding_with_http_info(self, namespace, body, **kwargs): # noqa: E501
"""create_namespaced_role_binding # noqa: E501
create a RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_role_binding_with_http_info(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'include_uninitialized', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_role(self, name, body, **kwargs): # noqa: E501
"""delete_cluster_role # noqa: E501
delete a ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_role(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.delete_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501
return data
def delete_cluster_role_with_http_info(self, name, body, **kwargs): # noqa: E501
"""delete_cluster_role # noqa: E501
delete a ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_role_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_role_binding(self, name, body, **kwargs): # noqa: E501
"""delete_cluster_role_binding # noqa: E501
delete a ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_role_binding(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.delete_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501
return data
def delete_cluster_role_binding_with_http_info(self, name, body, **kwargs): # noqa: E501
"""delete_cluster_role_binding # noqa: E501
delete a ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_role_binding_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_cluster_role(self, **kwargs): # noqa: E501
"""delete_collection_cluster_role # noqa: E501
delete collection of ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_cluster_role(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_cluster_role_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_collection_cluster_role_with_http_info(**kwargs) # noqa: E501
return data
def delete_collection_cluster_role_with_http_info(self, **kwargs): # noqa: E501
"""delete_collection_cluster_role # noqa: E501
delete collection of ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_cluster_role_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_cluster_role" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_cluster_role_binding(self, **kwargs): # noqa: E501
"""delete_collection_cluster_role_binding # noqa: E501
delete collection of ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_cluster_role_binding(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_cluster_role_binding_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_collection_cluster_role_binding_with_http_info(**kwargs) # noqa: E501
return data
def delete_collection_cluster_role_binding_with_http_info(self, **kwargs): # noqa: E501
"""delete_collection_cluster_role_binding # noqa: E501
delete collection of ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_cluster_role_binding_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_role(self, namespace, **kwargs): # noqa: E501
"""delete_collection_namespaced_role # noqa: E501
delete collection of Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_namespaced_role(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501
else:
(data) = self.delete_collection_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501
return data
def delete_collection_namespaced_role_with_http_info(self, namespace, **kwargs): # noqa: E501
"""delete_collection_namespaced_role # noqa: E501
delete collection of Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_namespaced_role_with_http_info(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_role_binding(self, namespace, **kwargs): # noqa: E501
"""delete_collection_namespaced_role_binding # noqa: E501
delete collection of RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_namespaced_role_binding(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501
else:
(data) = self.delete_collection_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501
return data
def delete_collection_namespaced_role_binding_with_http_info(self, namespace, **kwargs): # noqa: E501
"""delete_collection_namespaced_role_binding # noqa: E501
delete collection of RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_namespaced_role_binding_with_http_info(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role(self, name, namespace, body, **kwargs): # noqa: E501
"""delete_namespaced_role # noqa: E501
delete a Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_role(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data
def delete_namespaced_role_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501
"""delete_namespaced_role # noqa: E501
delete a Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_role_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role_binding(self, name, namespace, body, **kwargs): # noqa: E501
"""delete_namespaced_role_binding # noqa: E501
delete a RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_role_binding(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data
def delete_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501
"""delete_namespaced_role_binding # noqa: E501
delete a RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_role_binding_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) # noqa: E501
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents'])) # noqa: E501
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs): # noqa: E501
"""get_api_resources # noqa: E501
get available resources # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_resources_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_api_resources_with_http_info(**kwargs) # noqa: E501
return data
def get_api_resources_with_http_info(self, **kwargs): # noqa: E501
"""get_api_resources # noqa: E501
get available resources # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_role(self, **kwargs): # noqa: E501
"""list_cluster_role # noqa: E501
list or watch objects of kind ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_role(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_cluster_role_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_cluster_role_with_http_info(**kwargs) # noqa: E501
return data
def list_cluster_role_with_http_info(self, **kwargs): # noqa: E501
"""list_cluster_role # noqa: E501
list or watch objects of kind ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_role_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_role" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_role_binding(self, **kwargs): # noqa: E501
"""list_cluster_role_binding # noqa: E501
list or watch objects of kind ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_role_binding(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_cluster_role_binding_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_cluster_role_binding_with_http_info(**kwargs) # noqa: E501
return data
def list_cluster_role_binding_with_http_info(self, **kwargs): # noqa: E501
"""list_cluster_role_binding # noqa: E501
list or watch objects of kind ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_cluster_role_binding_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBindingList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role(self, namespace, **kwargs): # noqa: E501
"""list_namespaced_role # noqa: E501
list or watch objects of kind Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_role(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501
else:
(data) = self.list_namespaced_role_with_http_info(namespace, **kwargs) # noqa: E501
return data
def list_namespaced_role_with_http_info(self, namespace, **kwargs): # noqa: E501
"""list_namespaced_role # noqa: E501
list or watch objects of kind Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_role_with_http_info(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role_binding(self, namespace, **kwargs): # noqa: E501
"""list_namespaced_role_binding # noqa: E501
list or watch objects of kind RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_role_binding(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501
else:
(data) = self.list_namespaced_role_binding_with_http_info(namespace, **kwargs) # noqa: E501
return data
def list_namespaced_role_binding_with_http_info(self, namespace, **kwargs): # noqa: E501
"""list_namespaced_role_binding # noqa: E501
list or watch objects of kind RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_role_binding_with_http_info(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_binding_for_all_namespaces(self, **kwargs): # noqa: E501
"""list_role_binding_for_all_namespaces # noqa: E501
list or watch objects of kind RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_role_binding_for_all_namespaces(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_role_binding_for_all_namespaces_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_role_binding_for_all_namespaces_with_http_info(**kwargs) # noqa: E501
return data
def list_role_binding_for_all_namespaces_with_http_info(self, **kwargs): # noqa: E501
"""list_role_binding_for_all_namespaces # noqa: E501
list or watch objects of kind RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_role_binding_for_all_namespaces_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_binding_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/rolebindings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_for_all_namespaces(self, **kwargs): # noqa: E501
"""list_role_for_all_namespaces # noqa: E501
list or watch objects of kind Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_role_for_all_namespaces(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_role_for_all_namespaces_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_role_for_all_namespaces_with_http_info(**kwargs) # noqa: E501
return data
def list_role_for_all_namespaces_with_http_info(self, **kwargs): # noqa: E501
"""list_role_for_all_namespaces # noqa: E501
list or watch objects of kind Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_role_for_all_namespaces_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if '_continue' in params:
query_params.append(('continue', params['_continue'])) # noqa: E501
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector'])) # noqa: E501
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized'])) # noqa: E501
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version'])) # noqa: E501
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501
if 'watch' in params:
query_params.append(('watch', params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_role(self, name, body, **kwargs): # noqa: E501
"""patch_cluster_role # noqa: E501
partially update the specified ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_role(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501
return data
def patch_cluster_role_with_http_info(self, name, body, **kwargs): # noqa: E501
"""patch_cluster_role # noqa: E501
partially update the specified ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_role_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_role_binding(self, name, body, **kwargs): # noqa: E501
"""patch_cluster_role_binding # noqa: E501
partially update the specified ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_role_binding(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501
return data
def patch_cluster_role_binding_with_http_info(self, name, body, **kwargs): # noqa: E501
"""patch_cluster_role_binding # noqa: E501
partially update the specified ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_cluster_role_binding_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_role # noqa: E501
partially update the specified Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_role(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data
def patch_namespaced_role_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_role # noqa: E501
partially update the specified Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_role_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role_binding(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_role_binding # noqa: E501
partially update the specified RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_role_binding(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data
def patch_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_role_binding # noqa: E501
partially update the specified RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_role_binding_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_cluster_role(self, name, **kwargs): # noqa: E501
"""read_cluster_role # noqa: E501
read the specified ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_cluster_role(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_cluster_role_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.read_cluster_role_with_http_info(name, **kwargs) # noqa: E501
return data
def read_cluster_role_with_http_info(self, name, **kwargs): # noqa: E501
"""read_cluster_role # noqa: E501
read the specified ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_cluster_role_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_cluster_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_cluster_role_binding(self, name, **kwargs): # noqa: E501
"""read_cluster_role_binding # noqa: E501
read the specified ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_cluster_role_binding(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_cluster_role_binding_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.read_cluster_role_binding_with_http_info(name, **kwargs) # noqa: E501
return data
def read_cluster_role_binding_with_http_info(self, name, **kwargs): # noqa: E501
"""read_cluster_role_binding # noqa: E501
read the specified ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_cluster_role_binding_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_cluster_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_role # noqa: E501
read the specified Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_role(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_namespaced_role_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.read_namespaced_role_with_http_info(name, namespace, **kwargs) # noqa: E501
return data
def read_namespaced_role_with_http_info(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_role # noqa: E501
read the specified Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_role_with_http_info(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role_binding(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_role_binding # noqa: E501
read the specified RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_role_binding(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs) # noqa: E501
return data
def read_namespaced_role_binding_with_http_info(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_role_binding # noqa: E501
read the specified RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_role_binding_with_http_info(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_role(self, name, body, **kwargs): # noqa: E501
"""replace_cluster_role # noqa: E501
replace the specified ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_role(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param V1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.replace_cluster_role_with_http_info(name, body, **kwargs) # noqa: E501
return data
def replace_cluster_role_with_http_info(self, name, body, **kwargs): # noqa: E501
"""replace_cluster_role # noqa: E501
replace the specified ClusterRole # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_role_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRole (required)
:param V1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterroles/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_role_binding(self, name, body, **kwargs): # noqa: E501
"""replace_cluster_role_binding # noqa: E501
replace the specified ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_role_binding(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param V1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.replace_cluster_role_binding_with_http_info(name, body, **kwargs) # noqa: E501
return data
def replace_cluster_role_binding_with_http_info(self, name, body, **kwargs): # noqa: E501
"""replace_cluster_role_binding # noqa: E501
replace the specified ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_cluster_role_binding_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param V1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role(self, name, namespace, body, **kwargs): # noqa: E501
"""replace_namespaced_role # noqa: E501
replace the specified Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_role(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data
def replace_namespaced_role_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501
"""replace_namespaced_role # noqa: E501
replace the specified Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_role_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/roles/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role_binding(self, name, namespace, body, **kwargs): # noqa: E501
"""replace_namespaced_role_binding # noqa: E501
replace the specified RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_role_binding(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data
def replace_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs): # noqa: E501
"""replace_namespaced_role_binding # noqa: E501
replace the specified RoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_role_binding_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role_binding`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role_binding`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
if 'namespace' in params:
path_params['namespace'] = params['namespace'] # noqa: E501
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/rbac.authorization.k8s.io/v1/namespaces/{namespace}/rolebindings/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 66.661656
| 1,390
| 0.685004
| 33,396
| 252,781
| 5.055426
| 0.013355
| 0.030942
| 0.018326
| 0.01598
| 0.996659
| 0.99647
| 0.996298
| 0.995386
| 0.99522
| 0.994657
| 0
| 0.012057
| 0.243709
| 252,781
| 3,791
| 1,391
| 66.67924
| 0.871061
| 0.556818
| 0
| 0.864838
| 1
| 0.003918
| 0.245771
| 0.083494
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030852
| false
| 0
| 0.001959
| 0
| 0.078844
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d9ca842cbdc63c54359e746c423beca4af1124b3
| 118,727
|
py
|
Python
|
octavia/tests/unit/controller/worker/v2/tasks/test_database_tasks.py
|
mauroseb/octavia
|
8f032d884e0f89ac69d5b6e5f5b77d19ee6eb1d7
|
[
"Apache-2.0"
] | null | null | null |
octavia/tests/unit/controller/worker/v2/tasks/test_database_tasks.py
|
mauroseb/octavia
|
8f032d884e0f89ac69d5b6e5f5b77d19ee6eb1d7
|
[
"Apache-2.0"
] | null | null | null |
octavia/tests/unit/controller/worker/v2/tasks/test_database_tasks.py
|
mauroseb/octavia
|
8f032d884e0f89ac69d5b6e5f5b77d19ee6eb1d7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import random
from cryptography import fernet
import mock
from oslo_db import exception as odb_exceptions
from oslo_utils import uuidutils
from sqlalchemy.orm import exc
from taskflow.types import failure
from octavia.common import constants
from octavia.common import data_models
from octavia.common import utils
from octavia.controller.worker.v2.tasks import database_tasks
from octavia.db import repositories as repo
import octavia.tests.unit.base as base
AMP_ID = uuidutils.generate_uuid()
COMPUTE_ID = uuidutils.generate_uuid()
LB_ID = uuidutils.generate_uuid()
SERVER_GROUP_ID = uuidutils.generate_uuid()
LB_NET_IP = '192.0.2.2'
LISTENER_ID = uuidutils.generate_uuid()
POOL_ID = uuidutils.generate_uuid()
HM_ID = uuidutils.generate_uuid()
MEMBER_ID = uuidutils.generate_uuid()
PORT_ID = uuidutils.generate_uuid()
SUBNET_ID = uuidutils.generate_uuid()
VRRP_PORT_ID = uuidutils.generate_uuid()
HA_PORT_ID = uuidutils.generate_uuid()
L7POLICY_ID = uuidutils.generate_uuid()
L7RULE_ID = uuidutils.generate_uuid()
VIP_IP = '192.0.5.2'
VRRP_IP = '192.0.5.3'
HA_IP = '192.0.5.4'
AMP_ROLE = 'FAKE_ROLE'
VRRP_ID = random.randrange(255)
VRRP_PRIORITY = random.randrange(100)
CACHED_ZONE = 'zone1'
IMAGE_ID = uuidutils.generate_uuid()
COMPUTE_FLAVOR = uuidutils.generate_uuid()
_amphora_mock = mock.MagicMock()
_amphora_mock.id = AMP_ID
_amphora_mock.compute_id = COMPUTE_ID
_amphora_mock.lb_network_ip = LB_NET_IP
_amphora_mock.vrrp_ip = VRRP_IP
_amphora_mock.ha_ip = HA_IP
_amphora_mock.ha_port_id = HA_PORT_ID
_amphora_mock.vrrp_port_id = VRRP_PORT_ID
_amphora_mock.role = AMP_ROLE
_amphora_mock.vrrp_id = VRRP_ID
_amphora_mock.vrrp_priority = VRRP_PRIORITY
_amphorae = [_amphora_mock]
_loadbalancer_mock = mock.MagicMock()
_loadbalancer_mock.id = LB_ID
_loadbalancer_mock.amphorae = [_amphora_mock]
_l7policy_mock = mock.MagicMock()
_l7policy_mock.id = L7POLICY_ID
_l7rule_mock = mock.MagicMock()
_l7rule_mock.id = L7RULE_ID
_listener_mock = mock.MagicMock()
_listener_to_dict_mock = mock.MagicMock(
return_value={'id': LISTENER_ID})
_listener_mock.id = LISTENER_ID
_listener_mock.to_dict = _listener_to_dict_mock
_tf_failure_mock = mock.Mock(spec=failure.Failure)
_vip_mock = mock.MagicMock()
_vip_mock.port_id = PORT_ID
_vip_mock.subnet_id = SUBNET_ID
_vip_mock.ip_address = VIP_IP
_vrrp_group_mock = mock.MagicMock()
_cert_mock = mock.MagicMock()
_compute_mock = mock.MagicMock()
_compute_mock.lb_network_ip = LB_NET_IP
_compute_mock.cached_zone = CACHED_ZONE
_compute_mock.image_id = IMAGE_ID
_compute_mock.compute_flavor = COMPUTE_FLAVOR
@mock.patch('octavia.db.repositories.AmphoraRepository.delete')
@mock.patch('octavia.db.repositories.AmphoraRepository.update')
@mock.patch('octavia.db.repositories.ListenerRepository.update')
@mock.patch('octavia.db.repositories.LoadBalancerRepository.update')
@mock.patch('octavia.db.api.get_session', return_value='TEST')
@mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG')
@mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID)
class TestDatabaseTasks(base.TestCase):
def setUp(self):
self.health_mon_mock = mock.MagicMock()
self.health_mon_mock.id = HM_ID
self.health_mon_mock.pool_id = POOL_ID
self.listener_mock = mock.MagicMock()
self.listener_mock.id = LISTENER_ID
self.loadbalancer_mock = mock.MagicMock()
self.loadbalancer_mock.id = LB_ID
self.member_mock = mock.MagicMock()
self.member_mock.id = MEMBER_ID
self.db_pool_mock = mock.MagicMock()
self.db_pool_mock.id = POOL_ID
self.db_pool_mock.health_monitor = self.health_mon_mock
self.member_mock = {
constants.MEMBER_ID: MEMBER_ID,
constants.POOL_ID: POOL_ID,
}
self.l7policy_mock = mock.MagicMock()
self.l7policy_mock.id = L7POLICY_ID
self.l7rule_mock = mock.MagicMock()
self.l7rule_mock.id = L7RULE_ID
self.l7rule_mock.l7policy = self.l7policy_mock
super(TestDatabaseTasks, self).setUp()
@mock.patch('octavia.db.repositories.AmphoraRepository.create',
return_value=_amphora_mock)
def test_create_amphora_in_db(self,
mock_create,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
create_amp_in_db = database_tasks.CreateAmphoraInDB()
amp_id = create_amp_in_db.execute()
repo.AmphoraRepository.create.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.PENDING_CREATE,
cert_busy=False)
self.assertEqual(_amphora_mock.id, amp_id)
# Test the revert
create_amp_in_db.revert(_tf_failure_mock)
self.assertFalse(mock_amphora_repo_delete.called)
mock_amphora_repo_delete.reset_mock()
create_amp_in_db.revert(result='AMP')
self.assertTrue(mock_amphora_repo_delete.called)
mock_amphora_repo_delete.assert_called_once_with(
'TEST',
id='AMP')
# Test revert with exception
mock_amphora_repo_delete.reset_mock()
mock_amphora_repo_delete.side_effect = Exception('fail')
create_amp_in_db.revert(result='AMP')
self.assertTrue(mock_amphora_repo_delete.called)
mock_amphora_repo_delete.assert_called_once_with(
'TEST',
id='AMP')
@mock.patch('octavia.db.repositories.ListenerRepository.delete')
def test_delete_listener_in_db(self,
mock_listener_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_listener = database_tasks.DeleteListenerInDB()
delete_listener.execute({constants.LISTENER_ID: LISTENER_ID})
repo.ListenerRepository.delete.assert_called_once_with(
'TEST',
id=LISTENER_ID)
# Test the revert
repo.ListenerRepository.delete.reset_mock()
delete_listener.revert({constants.LISTENER_ID: LISTENER_ID})
repo.ListenerRepository.delete.assert_not_called()
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
@mock.patch('octavia.db.repositories.HealthMonitorRepository.delete')
def test_delete_health_monitor_in_db(self,
mock_health_mon_repo_delete,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_health_mon = database_tasks.DeleteHealthMonitorInDB()
delete_health_mon.execute(self.health_mon_mock)
repo.HealthMonitorRepository.delete.assert_called_once_with(
'TEST', id=HM_ID)
# Test the revert
mock_health_mon_repo_delete.reset_mock()
delete_health_mon.revert(self.health_mon_mock)
repo.HealthMonitorRepository.update.assert_called_once_with(
'TEST', id=HM_ID, provisioning_status=constants.ERROR)
# Test Not Found Exception
mock_health_mon_repo_delete.reset_mock()
mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()]
delete_health_mon.execute(self.health_mon_mock)
repo.HealthMonitorRepository.delete.assert_called_once_with(
'TEST', id=HM_ID)
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
@mock.patch('octavia.db.repositories.HealthMonitorRepository.delete')
def test_delete_health_monitor_in_db_by_pool(self,
mock_health_mon_repo_delete,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool()
delete_health_mon.execute(self.db_pool_mock)
repo.HealthMonitorRepository.delete.assert_called_once_with(
'TEST',
id=HM_ID)
# Test the revert
mock_health_mon_repo_delete.reset_mock()
delete_health_mon.revert(self.db_pool_mock)
repo.HealthMonitorRepository.update.assert_called_once_with(
'TEST', id=HM_ID, provisioning_status=constants.ERROR)
# TODO(johnsom) fix once provisioning status added
# repo.HealthMonitorRepository.update.assert_called_once_with(
# 'TEST',
# POOL_ID,
# provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.delete')
def test_delete_member_in_db(self,
mock_member_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_member = database_tasks.DeleteMemberInDB()
delete_member.execute(self.member_mock)
repo.MemberRepository.delete.assert_called_once_with(
'TEST',
id=MEMBER_ID)
# Test the revert
mock_member_repo_delete.reset_mock()
delete_member.revert(self.member_mock)
# TODO(johnsom) Fix
# repo.MemberRepository.delete.assert_called_once_with(
# 'TEST',
# MEMBER_ID)
@mock.patch('octavia.db.repositories.PoolRepository.delete')
def test_delete_pool_in_db(self,
mock_pool_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_pool = database_tasks.DeletePoolInDB()
delete_pool.execute(POOL_ID)
repo.PoolRepository.delete.assert_called_once_with(
'TEST',
id=POOL_ID)
# Test the revert
mock_pool_repo_delete.reset_mock()
delete_pool.revert(POOL_ID)
# TODO(johnsom) Fix
# repo.PoolRepository.update.assert_called_once_with(
# 'TEST',
# POOL_ID,
# operating_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7PolicyRepository.delete')
def test_delete_l7policy_in_db(self,
mock_l7policy_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_l7policy = database_tasks.DeleteL7PolicyInDB()
delete_l7policy.execute(_l7policy_mock)
repo.L7PolicyRepository.delete.assert_called_once_with(
'TEST',
id=L7POLICY_ID)
# Test the revert
mock_l7policy_repo_delete.reset_mock()
delete_l7policy.revert(_l7policy_mock)
# TODO(sbalukoff) Fix
# repo.ListenerRepository.update.assert_called_once_with(
# 'TEST',
# LISTENER_ID,
# operating_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7RuleRepository.delete')
def test_delete_l7rule_in_db(self,
mock_l7rule_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
delete_l7rule = database_tasks.DeleteL7RuleInDB()
delete_l7rule.execute(_l7rule_mock)
repo.L7RuleRepository.delete.assert_called_once_with(
'TEST',
id=L7RULE_ID)
# Test the revert
mock_l7rule_repo_delete.reset_mock()
delete_l7rule.revert(_l7rule_mock)
# TODO(sbalukoff) Fix
# repo.ListenerRepository.update.assert_called_once_with(
# 'TEST',
# LISTENER_ID,
# operating_status=constants.ERROR)
@mock.patch('octavia.db.repositories.AmphoraRepository.get',
return_value=_amphora_mock)
def test_reload_amphora(self,
mock_amp_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
reload_amp = database_tasks.ReloadAmphora()
amp = reload_amp.execute(AMP_ID)
repo.AmphoraRepository.get.assert_called_once_with(
'TEST',
id=AMP_ID)
self.assertEqual(_amphora_mock, amp)
@mock.patch('octavia.db.repositories.LoadBalancerRepository.get',
return_value=_loadbalancer_mock)
def test_reload_load_balancer(self,
mock_lb_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
reload_lb = database_tasks.ReloadLoadBalancer()
lb = reload_lb.execute(LB_ID)
repo.LoadBalancerRepository.get.assert_called_once_with(
'TEST',
id=LB_ID)
self.assertEqual(_loadbalancer_mock, lb)
@mock.patch('octavia.db.repositories.LoadBalancerRepository.get',
return_value=_loadbalancer_mock)
@mock.patch('octavia.db.repositories.VipRepository.update')
def test_update_vip_after_allocation(self,
mock_vip_update,
mock_loadbalancer_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_vip = database_tasks.UpdateVIPAfterAllocation()
loadbalancer = update_vip.execute(LB_ID, _vip_mock)
self.assertEqual(_loadbalancer_mock, loadbalancer)
mock_vip_update.assert_called_once_with('TEST',
LB_ID,
port_id=PORT_ID,
subnet_id=SUBNET_ID,
ip_address=VIP_IP)
mock_loadbalancer_get.assert_called_once_with('TEST',
id=LB_ID)
def test_update_amphora_vip_data(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData()
update_amp_vip_data.execute(_amphorae)
mock_amphora_repo_update.assert_called_once_with(
'TEST',
AMP_ID,
vrrp_ip=VRRP_IP,
ha_ip=HA_IP,
vrrp_port_id=VRRP_PORT_ID,
ha_port_id=HA_PORT_ID,
vrrp_id=1)
def test_update_amphora_vip_data2(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData()
update_amp_vip_data2.execute(_amphorae[0])
mock_amphora_repo_update.assert_called_once_with(
'TEST',
AMP_ID,
vrrp_ip=VRRP_IP,
ha_ip=HA_IP,
vrrp_port_id=VRRP_PORT_ID,
ha_port_id=HA_PORT_ID,
vrrp_id=1)
def test_update_amp_failover_details(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails()
update_amp_fo_details.execute(_amphora_mock, _amphora_mock)
mock_amphora_repo_update.assert_called_once_with(
'TEST',
AMP_ID,
vrrp_ip=VRRP_IP,
ha_ip=HA_IP,
vrrp_port_id=VRRP_PORT_ID,
ha_port_id=HA_PORT_ID,
vrrp_id=VRRP_ID)
@mock.patch('octavia.db.repositories.AmphoraRepository.associate')
def test_associate_failover_amphora_with_lb_id(
self,
mock_associate,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID()
assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID)
mock_associate.assert_called_once_with('TEST',
load_balancer_id=LB_ID,
amphora_id=AMP_ID)
# Test revert
assoc_fo_amp_lb_id.revert(AMP_ID)
mock_amphora_repo_update.assert_called_once_with('TEST',
AMP_ID,
loadbalancer_id=None)
# Test revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
assoc_fo_amp_lb_id.revert(AMP_ID)
mock_amphora_repo_update.assert_called_once_with('TEST',
AMP_ID,
loadbalancer_id=None)
@mock.patch('octavia.db.repositories.AmphoraRepository.'
'allocate_and_associate',
side_effect=[_amphora_mock, None])
def test_map_loadbalancer_to_amphora(self,
mock_allocate_and_associate,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora()
amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)
repo.AmphoraRepository.allocate_and_associate.assert_called_once_with(
'TEST',
LB_ID,
None)
self.assertEqual(_amphora_mock.id, amp_id)
amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)
self.assertIsNone(amp_id)
# Test revert
map_lb_to_amp.revert(None, self.loadbalancer_mock.id)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test revert with exception
repo.LoadBalancerRepository.update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
map_lb_to_amp.revert(None, self.loadbalancer_mock.id)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.AmphoraRepository.'
'allocate_and_associate',
side_effect=[_amphora_mock, None])
def test_map_loadbalancer_to_amphora_with_az(self,
mock_allocate_and_associate,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora()
amp_id = map_lb_to_amp.execute(
self.loadbalancer_mock.id, availability_zone={
constants.COMPUTE_ZONE: 'fakeaz'})
repo.AmphoraRepository.allocate_and_associate.assert_called_once_with(
'TEST',
LB_ID,
'fakeaz')
self.assertEqual(_amphora_mock.id, amp_id)
amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)
self.assertIsNone(amp_id)
# Test revert
map_lb_to_amp.revert(None, self.loadbalancer_mock.id)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test revert with exception
repo.LoadBalancerRepository.update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
map_lb_to_amp.revert(None, self.loadbalancer_mock.id)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.AmphoraRepository.get',
return_value=_amphora_mock)
@mock.patch('octavia.db.repositories.LoadBalancerRepository.get',
return_value=_loadbalancer_mock)
def test_mark_lb_amphorae_deleted_in_db(self,
mock_loadbalancer_repo_get,
mock_amphora_repo_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_deleted_in_db = (database_tasks.
MarkLBAmphoraeDeletedInDB())
mark_amp_deleted_in_db.execute(_loadbalancer_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.DELETED)
@mock.patch('octavia.db.repositories.AmphoraRepository.get',
return_value=_amphora_mock)
@mock.patch('octavia.db.repositories.LoadBalancerRepository.get',
return_value=_loadbalancer_mock)
def test_mark_amphora_allocated_in_db(self,
mock_loadbalancer_repo_get,
mock_amphora_repo_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_allocated_in_db = (database_tasks.
MarkAmphoraAllocatedInDB())
mark_amp_allocated_in_db.execute(_amphora_mock,
self.loadbalancer_mock.id)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.AMPHORA_ALLOCATED,
compute_id=COMPUTE_ID,
lb_network_ip=LB_NET_IP,
load_balancer_id=LB_ID)
# Test the revert
mock_amphora_repo_update.reset_mock()
mark_amp_allocated_in_db.revert(None, _amphora_mock,
self.loadbalancer_mock.id)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
# Test the revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_allocated_in_db.revert(None, _amphora_mock,
self.loadbalancer_mock.id)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
def test_mark_amphora_booting_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB()
mark_amp_booting_in_db.execute(_amphora_mock.id,
_amphora_mock.compute_id)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.AMPHORA_BOOTING,
compute_id=COMPUTE_ID)
# Test the revert
mock_amphora_repo_update.reset_mock()
mark_amp_booting_in_db.revert(None, _amphora_mock.id,
_amphora_mock.compute_id)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.ERROR,
compute_id=COMPUTE_ID)
# Test the revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_booting_in_db.revert(None, _amphora_mock.id,
_amphora_mock.compute_id)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.ERROR,
compute_id=COMPUTE_ID)
def test_mark_amphora_deleted_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB()
mark_amp_deleted_in_db.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.DELETED)
# Test the revert
mock_amphora_repo_update.reset_mock()
mark_amp_deleted_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
# Test the revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_deleted_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
def test_mark_amphora_pending_delete_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_pending_delete_in_db = (database_tasks.
MarkAmphoraPendingDeleteInDB())
mark_amp_pending_delete_in_db.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.PENDING_DELETE)
# Test the revert
mock_amphora_repo_update.reset_mock()
mark_amp_pending_delete_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
# Test the revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_pending_delete_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
def test_mark_amphora_pending_update_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_pending_update_in_db = (database_tasks.
MarkAmphoraPendingUpdateInDB())
mark_amp_pending_update_in_db.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.PENDING_UPDATE)
# Test the revert
mock_amphora_repo_update.reset_mock()
mark_amp_pending_update_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
# Test the revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_pending_update_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
id=AMP_ID,
status=constants.ERROR)
def test_mark_amphora_ready_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
_amphora_mock.lb_network_ip = LB_NET_IP
mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB()
mark_amp_ready_in_db.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.AMPHORA_READY,
compute_id=COMPUTE_ID,
lb_network_ip=LB_NET_IP)
# Test the revert
mock_amphora_repo_update.reset_mock()
mark_amp_ready_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.ERROR,
compute_id=COMPUTE_ID,
lb_network_ip=LB_NET_IP)
# Test the revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_ready_in_db.revert(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
status=constants.ERROR,
compute_id=COMPUTE_ID,
lb_network_ip=LB_NET_IP)
@mock.patch('octavia.db.repositories.AmphoraRepository.get')
def test_update_amphora_info(self,
mock_amphora_repo_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_amphora_info = database_tasks.UpdateAmphoraInfo()
update_amphora_info.execute(AMP_ID, _compute_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
lb_network_ip=LB_NET_IP,
cached_zone=CACHED_ZONE,
image_id=IMAGE_ID,
compute_flavor=COMPUTE_FLAVOR)
repo.AmphoraRepository.get.assert_called_once_with(
'TEST',
id=AMP_ID)
def test_mark_listener_deleted_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_listener_deleted = database_tasks.MarkListenerDeletedInDB()
mark_listener_deleted.execute(self.listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
LISTENER_ID,
provisioning_status=constants.DELETED)
# Test the revert
mock_listener_repo_update.reset_mock()
mark_listener_deleted.revert(self.listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_listener_repo_update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
mark_listener_deleted.revert(self.listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
def test_mark_listener_pending_deleted_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_listener_pending_delete = (database_tasks.
MarkListenerPendingDeleteInDB())
mark_listener_pending_delete.execute(self.listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
LISTENER_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_listener_repo_update.reset_mock()
mark_listener_pending_delete.revert(self.listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_listener_repo_update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
mark_listener_pending_delete.revert(self.listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.ListenerRepository.'
'prov_status_active_if_not_error')
def test_mark_lb_and_listeners_active_in_db(self,
mock_list_not_error,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
listener_dict = {constants.LISTENER_ID: LISTENER_ID,
constants.LOADBALANCER_ID: LB_ID}
mark_lb_and_listeners_active = (database_tasks.
MarkLBAndListenersActiveInDB())
mark_lb_and_listeners_active.execute(LB_ID, [listener_dict])
mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
provisioning_status=constants.ACTIVE)
# Test with LB_ID from listeners
mock_loadbalancer_repo_update.reset_mock()
mock_list_not_error.reset_mock()
listener_dict = {constants.LISTENER_ID: LISTENER_ID,
constants.LOADBALANCER_ID: LB_ID}
mark_lb_and_listeners_active = (database_tasks.
MarkLBAndListenersActiveInDB())
mark_lb_and_listeners_active.execute(None, [listener_dict])
mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
provisioning_status=constants.ACTIVE)
# Test with no LB_ID
mock_loadbalancer_repo_update.reset_mock()
mark_lb_and_listeners_active.execute(None, [])
mock_loadbalancer_repo_update.assert_not_called()
# Test the revert
mock_loadbalancer_repo_update.reset_mock()
mock_listener_repo_update.reset_mock()
mark_lb_and_listeners_active.revert(LB_ID, [listener_dict])
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test the revert LB_ID from listeners
mock_loadbalancer_repo_update.reset_mock()
mock_listener_repo_update.reset_mock()
mark_lb_and_listeners_active.revert(None, [listener_dict])
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test the revert no LB_ID
mock_loadbalancer_repo_update.reset_mock()
mock_listener_repo_update.reset_mock()
mark_lb_and_listeners_active.revert(None, [])
mock_loadbalancer_repo_update.assert_not_called()
mock_listener_repo_update.assert_not_called()
# Test the revert with exceptions
mock_loadbalancer_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
mock_listener_repo_update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
mark_lb_and_listeners_active.revert(LB_ID, [listener_dict])
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration',
return_value=_cert_mock)
def test_update_amphora_db_cert_exp(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete,
mock_get_cert_exp):
update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration()
key = utils.get_six_compatible_server_certs_key_passphrase()
fer = fernet.Fernet(key)
_pem_mock = fer.encrypt(
utils.get_six_compatible_value('test_cert')
)
update_amp_cert.execute(_amphora_mock.id, _pem_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
cert_expiration=_cert_mock)
def test_update_amphora_cert_busy_to_false(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse()
amp_cert_busy_to_F.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST',
AMP_ID,
cert_busy=False)
def test_mark_LB_active_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_loadbalancer_active = database_tasks.MarkLBActiveInDB()
mark_loadbalancer_active.execute(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
provisioning_status=constants.ACTIVE)
self.assertEqual(0, repo.ListenerRepository.update.call_count)
# Test the revert
mock_loadbalancer_repo_update.reset_mock()
mark_loadbalancer_active.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertEqual(0, repo.ListenerRepository.update.call_count)
# Test the revert with exception
mock_loadbalancer_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
mark_loadbalancer_active.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertEqual(0, repo.ListenerRepository.update.call_count)
def test_mark_LB_active_in_db_by_listener(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
listener_dict = {'loadbalancer_id': LB_ID}
mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener()
mark_loadbalancer_active.execute(listener_dict)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
provisioning_status=constants.ACTIVE)
self.assertEqual(0, repo.ListenerRepository.update.call_count)
# Test the revert
mock_loadbalancer_repo_update.reset_mock()
mark_loadbalancer_active.revert(listener_dict)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertEqual(0, repo.ListenerRepository.update.call_count)
# Test the revert with exception
mock_loadbalancer_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
mark_loadbalancer_active.revert(listener_dict)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertEqual(0, repo.ListenerRepository.update.call_count)
def test_mark_LB_active_in_db_and_listeners(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
listeners = [data_models.Listener(id='listener1'),
data_models.Listener(id='listener2')]
lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners)
mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True)
mark_lb_active.execute(lb)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
lb.id,
provisioning_status=constants.ACTIVE)
self.assertEqual(2, repo.ListenerRepository.update.call_count)
repo.ListenerRepository.update.has_calls(
[mock.call('TEST', listeners[0].id,
provisioning_status=constants.ACTIVE),
mock.call('TEST', listeners[1].id,
provisioning_status=constants.ACTIVE)])
mock_loadbalancer_repo_update.reset_mock()
mock_listener_repo_update.reset_mock()
mark_lb_active.revert(lb)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=lb.id,
provisioning_status=constants.ERROR)
self.assertEqual(2, repo.ListenerRepository.update.call_count)
repo.ListenerRepository.update.has_calls(
[mock.call('TEST', listeners[0].id,
provisioning_status=constants.ERROR),
mock.call('TEST', listeners[1].id,
provisioning_status=constants.ERROR)])
@mock.patch('octavia.db.repositories.PoolRepository.update')
@mock.patch('octavia.db.repositories.MemberRepository.update')
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
@mock.patch('octavia.db.repositories.L7RuleRepository.update')
def test_mark_LB_active_in_db_full_graph(self,
mock_l7r_repo_update,
mock_l7p_repo_update,
mock_hm_repo_update,
mock_member_repo_update,
mock_pool_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
unused_pool = data_models.Pool(id='unused_pool')
members1 = [{constants.MEMBER_ID: 'member1'},
{constants.MEMBER_ID: 'member2'}]
health_monitor = data_models.HealthMonitor(id='hm1')
default_pool = data_models.Pool(id='default_pool',
members=members1,
health_monitor=health_monitor)
listener1 = data_models.Listener(id='listener1',
default_pool=default_pool)
members2 = [{constants.MEMBER_ID: 'member3'},
{constants.MEMBER_ID: 'member4'}]
redirect_pool = data_models.Pool(id='redirect_pool',
members=members2)
l7rules = [data_models.L7Rule(id='rule1')]
redirect_policy = data_models.L7Policy(id='redirect_policy',
redirect_pool=redirect_pool,
l7rules=l7rules)
l7policies = [redirect_policy]
listener2 = data_models.Listener(id='listener2',
l7policies=l7policies)
listener2.l7policies = l7policies
listeners = [listener1, listener2]
pools = [default_pool, redirect_pool, unused_pool]
lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners,
pools=pools)
mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True)
mark_lb_active.execute(lb)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
lb.id,
provisioning_status=constants.ACTIVE)
self.assertEqual(2, repo.ListenerRepository.update.call_count)
repo.ListenerRepository.update.has_calls(
[mock.call('TEST', listeners[0].id,
provisioning_status=constants.ACTIVE),
mock.call('TEST', listeners[1].id,
provisioning_status=constants.ACTIVE)])
self.assertEqual(2, repo.PoolRepository.update.call_count)
repo.PoolRepository.update.has_calls(
[mock.call('TEST', default_pool.id,
provisioning_status=constants.ACTIVE),
mock.call('TEST', redirect_pool.id,
provisioning_status=constants.ACTIVE)])
self.assertEqual(1, repo.HealthMonitorRepository.update.call_count)
repo.HealthMonitorRepository.update.has_calls(
[mock.call('TEST', health_monitor.id,
provisioning_status=constants.ACTIVE)])
self.assertEqual(1, repo.L7PolicyRepository.update.call_count)
repo.L7PolicyRepository.update.has_calls(
[mock.call('TEST', l7policies[0].id,
provisioning_status=constants.ACTIVE)])
self.assertEqual(1, repo.L7RuleRepository.update.call_count)
repo.L7RuleRepository.update.has_calls(
[mock.call('TEST', l7rules[0].id,
provisioning_status=constants.ACTIVE)])
mock_loadbalancer_repo_update.reset_mock()
mock_listener_repo_update.reset_mock()
mock_pool_repo_update.reset_mock()
mock_member_repo_update.reset_mock()
mock_hm_repo_update.reset_mock()
mock_l7p_repo_update.reset_mock()
mock_l7r_repo_update.reset_mock()
mark_lb_active.revert(lb)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=lb.id,
provisioning_status=constants.ERROR)
self.assertEqual(2, repo.ListenerRepository.update.call_count)
repo.ListenerRepository.update.has_calls(
[mock.call('TEST', listeners[0].id,
provisioning_status=constants.ERROR),
mock.call('TEST', listeners[1].id,
provisioning_status=constants.ERROR)])
self.assertEqual(2, repo.PoolRepository.update.call_count)
repo.PoolRepository.update.has_calls(
[mock.call('TEST', default_pool.id,
provisioning_status=constants.ERROR),
mock.call('TEST', redirect_pool.id,
provisioning_status=constants.ERROR)])
self.assertEqual(1, repo.HealthMonitorRepository.update.call_count)
repo.HealthMonitorRepository.update.has_calls(
[mock.call('TEST', health_monitor.id,
provisioning_status=constants.ERROR)])
self.assertEqual(1, repo.L7PolicyRepository.update.call_count)
repo.L7PolicyRepository.update.has_calls(
[mock.call('TEST', l7policies[0].id,
provisioning_status=constants.ERROR)])
self.assertEqual(1, repo.L7RuleRepository.update.call_count)
repo.L7RuleRepository.update.has_calls(
[mock.call('TEST', l7rules[0].id,
provisioning_status=constants.ERROR)])
def test_mark_LB_deleted_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB()
mark_loadbalancer_deleted.execute(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
provisioning_status=constants.DELETED)
# Test the revert
mock_loadbalancer_repo_update.reset_mock()
mark_loadbalancer_deleted.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_loadbalancer_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
mark_loadbalancer_deleted.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
def test_mark_LB_pending_deleted_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_loadbalancer_pending_delete = (database_tasks.
MarkLBPendingDeleteInDB())
mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_loadbalancer_repo_update.reset_mock()
mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_loadbalancer_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
def test_update_health_monitor_in_db(self,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_health_mon = database_tasks.UpdateHealthMonInDB()
update_health_mon.execute(self.health_mon_mock,
{'delay': 1, 'timeout': 2})
repo.HealthMonitorRepository.update.assert_called_once_with(
'TEST',
HM_ID,
delay=1, timeout=2)
# Test the revert
mock_health_mon_repo_update.reset_mock()
update_health_mon.revert(self.health_mon_mock)
repo.HealthMonitorRepository.update.assert_called_once_with(
'TEST',
HM_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_health_mon_repo_update.reset_mock()
mock_health_mon_repo_update.side_effect = Exception('fail')
update_health_mon.revert(self.health_mon_mock)
repo.HealthMonitorRepository.update.assert_called_once_with(
'TEST',
HM_ID,
provisioning_status=constants.ERROR)
def test_update_load_balancer_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_load_balancer = database_tasks.UpdateLoadbalancerInDB()
update_load_balancer.execute(self.loadbalancer_mock,
{'name': 'test', 'description': 'test2'})
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
name='test', description='test2')
# Test the revert
mock_loadbalancer_repo_update.reset_mock()
update_load_balancer.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_loadbalancer_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
update_load_balancer.revert(self.loadbalancer_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.VipRepository.update')
def test_update_vip_in_db_during_update_loadbalancer(self,
mock_vip_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_lb_update,
mock_listener_update,
mock_amphora_update,
mock_amphora_delete):
self.loadbalancer_mock.vip.load_balancer_id = LB_ID
update_load_balancer = database_tasks.UpdateLoadbalancerInDB()
update_load_balancer.execute(self.loadbalancer_mock,
{'name': 'test',
'description': 'test2',
'vip': {'qos_policy_id': 'fool'}})
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
LB_ID,
name='test', description='test2')
repo.VipRepository.update.assert_called_once_with('TEST', LB_ID,
qos_policy_id='fool')
def test_update_listener_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_listener = database_tasks.UpdateListenerInDB()
listener_dict = {constants.LISTENER_ID: LISTENER_ID}
update_listener.execute(listener_dict,
{'name': 'test', 'description': 'test2'})
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
LISTENER_ID,
name='test', description='test2')
# Test the revert
mock_listener_repo_update.reset_mock()
update_listener.revert(listener_dict)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
# Test the revert
mock_listener_repo_update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
update_listener.revert(listener_dict)
repo.ListenerRepository.update.assert_called_once_with(
'TEST',
id=LISTENER_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.update')
def test_update_member_in_db(self,
mock_member_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_member = database_tasks.UpdateMemberInDB()
update_member.execute(self.member_mock,
{'weight': 1, 'ip_address': '10.1.0.0'})
repo.MemberRepository.update.assert_called_once_with(
'TEST',
MEMBER_ID,
weight=1, ip_address='10.1.0.0')
# Test the revert
mock_member_repo_update.reset_mock()
update_member.revert(self.member_mock)
repo.MemberRepository.update.assert_called_once_with(
'TEST',
MEMBER_ID,
provisioning_status=constants.ERROR)
# Test the revert
mock_member_repo_update.reset_mock()
mock_member_repo_update.side_effect = Exception('fail')
update_member.revert(self.member_mock)
repo.MemberRepository.update.assert_called_once_with(
'TEST',
MEMBER_ID,
provisioning_status=constants.ERROR)
@mock.patch(
'octavia.db.repositories.Repositories.update_pool_and_sp')
def test_update_pool_in_db(self,
mock_repos_pool_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None}
update_dict = {'name': 'test', 'description': 'test2',
'session_persistence': sp_dict}
update_pool = database_tasks.UpdatePoolInDB()
update_pool.execute(POOL_ID,
update_dict)
repo.Repositories.update_pool_and_sp.assert_called_once_with(
'TEST',
POOL_ID,
update_dict)
# Test the revert
mock_repos_pool_update.reset_mock()
update_pool.revert(POOL_ID)
repo.Repositories.update_pool_and_sp.assert_called_once_with(
'TEST',
POOL_ID,
{'provisioning_status': constants.ERROR})
# Test the revert with exception
mock_repos_pool_update.reset_mock()
mock_repos_pool_update.side_effect = Exception('fail')
update_pool.revert(POOL_ID)
repo.Repositories.update_pool_and_sp.assert_called_once_with(
'TEST',
POOL_ID,
{'provisioning_status': constants.ERROR})
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
def test_update_l7policy_in_db(self,
mock_l7policy_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_l7policy = database_tasks.UpdateL7PolicyInDB()
update_l7policy.execute(self.l7policy_mock,
{'action': constants.L7POLICY_ACTION_REJECT})
repo.L7PolicyRepository.update.assert_called_once_with(
'TEST',
L7POLICY_ID,
action=constants.L7POLICY_ACTION_REJECT)
# Test the revert
mock_l7policy_repo_update.reset_mock()
update_l7policy.revert(self.l7policy_mock)
repo.L7PolicyRepository.update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.ERROR)
# Test the revert
mock_l7policy_repo_update.reset_mock()
mock_l7policy_repo_update.side_effect = Exception('fail')
update_l7policy.revert(self.l7policy_mock)
repo.L7PolicyRepository.update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7RuleRepository.update')
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
def test_update_l7rule_in_db(self,
mock_l7rule_repo_update,
mock_l7policy_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_l7rule = database_tasks.UpdateL7RuleInDB()
update_l7rule.execute(
self.l7rule_mock,
{'type': constants.L7RULE_TYPE_PATH,
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'value': '/api'})
repo.L7RuleRepository.update.assert_called_once_with(
'TEST',
L7RULE_ID,
type=constants.L7RULE_TYPE_PATH,
compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
value='/api')
# Test the revert
mock_l7rule_repo_update.reset_mock()
update_l7rule.revert(self.l7rule_mock)
repo.L7PolicyRepository.update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.ERROR)
# Test the revert
mock_l7rule_repo_update.reset_mock()
mock_l7rule_repo_update.side_effect = Exception('fail')
update_l7rule.revert(self.l7rule_mock)
repo.L7PolicyRepository.update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.ERROR)
def test_get_amphora_details(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
get_amp_details = database_tasks.GetAmphoraDetails()
new_amp = get_amp_details.execute(_amphora_mock)
self.assertEqual(AMP_ID, new_amp.id)
self.assertEqual(VRRP_IP, new_amp.vrrp_ip)
self.assertEqual(HA_IP, new_amp.ha_ip)
self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id)
self.assertEqual(AMP_ROLE, new_amp.role)
self.assertEqual(VRRP_ID, new_amp.vrrp_id)
self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority)
def test_mark_amphora_role_indb(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB()
mark_amp_master_indb.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role='MASTER',
vrrp_priority=constants.ROLE_MASTER_PRIORITY)
mock_amphora_repo_update.reset_mock()
mark_amp_master_indb.revert("BADRESULT", _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role=None, vrrp_priority=None)
mock_amphora_repo_update.reset_mock()
failure_obj = failure.Failure.from_exception(Exception("TESTEXCEPT"))
mark_amp_master_indb.revert(failure_obj, _amphora_mock)
self.assertFalse(repo.AmphoraRepository.update.called)
mock_amphora_repo_update.reset_mock()
mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB()
mark_amp_backup_indb.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role='BACKUP',
vrrp_priority=constants.ROLE_BACKUP_PRIORITY)
mock_amphora_repo_update.reset_mock()
mark_amp_backup_indb.revert("BADRESULT", _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role=None, vrrp_priority=None)
mock_amphora_repo_update.reset_mock()
mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB()
mark_amp_standalone_indb.execute(_amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role='STANDALONE',
vrrp_priority=None)
mock_amphora_repo_update.reset_mock()
mark_amp_standalone_indb.revert("BADRESULT", _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role=None, vrrp_priority=None)
# Test revert with exception
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mark_amp_standalone_indb.revert("BADRESULT", _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
'TEST', AMP_ID, role=None, vrrp_priority=None)
@mock.patch('octavia.db.repositories.AmphoraRepository.get')
def test_get_amphorae_from_loadbalancer(self,
mock_amphora_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
amp1 = mock.MagicMock()
amp1.id = uuidutils.generate_uuid()
amp2 = mock.MagicMock()
amp2.id = uuidutils.generate_uuid()
lb = mock.MagicMock()
lb.amphorae = [amp1, amp2]
mock_amphora_get.side_effect = [_amphora_mock, None]
get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer()
result = get_amps_from_lb_obj.execute(lb)
self.assertEqual([_amphora_mock], result)
@mock.patch('octavia.db.repositories.ListenerRepository.get')
def test_get_listeners_from_loadbalancer(self,
mock_listener_get,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mock_listener_get.return_value = _listener_mock
_loadbalancer_mock.listeners = [_listener_mock]
get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer()
result = get_list_from_lb_obj.execute(_loadbalancer_mock)
mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id)
self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result)
def test_get_vip_from_loadbalancer(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
_loadbalancer_mock.vip = _vip_mock
get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer()
result = get_vip_from_lb_obj.execute(_loadbalancer_mock)
self.assertEqual(_vip_mock, result)
@mock.patch('octavia.db.repositories.VRRPGroupRepository.create')
def test_create_vrrp_group_for_lb(self,
mock_vrrp_group_create,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mock_get_session.side_effect = ['TEST',
odb_exceptions.DBDuplicateEntry]
create_vrrp_group = database_tasks.CreateVRRPGroupForLB()
create_vrrp_group.execute(_loadbalancer_mock)
mock_vrrp_group_create.assert_called_once_with(
'TEST', load_balancer_id=LB_ID,
vrrp_group_name=LB_ID.replace('-', ''),
vrrp_auth_type=constants.VRRP_AUTH_DEFAULT,
vrrp_auth_pass=mock_generate_uuid.return_value.replace('-',
'')[0:7],
advert_int=1)
create_vrrp_group.execute(_loadbalancer_mock)
@mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete')
def test_disable_amphora_health_monitoring(self,
mock_amp_health_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring()
disable_amp_health.execute(_amphora_mock)
mock_amp_health_repo_delete.assert_called_once_with(
'TEST', amphora_id=AMP_ID)
@mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete')
def test_disable_lb_amphorae_health_monitoring(
self,
mock_amp_health_repo_delete,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
disable_amp_health = (
database_tasks.DisableLBAmphoraeHealthMonitoring())
disable_amp_health.execute(_loadbalancer_mock)
mock_amp_health_repo_delete.assert_called_once_with(
'TEST', amphora_id=AMP_ID)
@mock.patch('octavia.db.repositories.AmphoraHealthRepository.update')
def test_mark_amphora_health_monitoring_busy(self,
mock_amp_health_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_busy = database_tasks.MarkAmphoraHealthBusy()
mark_busy.execute(_amphora_mock)
mock_amp_health_repo_update.assert_called_once_with(
'TEST', amphora_id=AMP_ID, busy=True)
@mock.patch('octavia.db.repositories.AmphoraHealthRepository.update')
def test_mark_lb_amphorae_health_monitoring_busy(
self,
mock_amp_health_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_busy = (
database_tasks.MarkLBAmphoraeHealthBusy())
mark_busy.execute(_loadbalancer_mock)
mock_amp_health_repo_update.assert_called_once_with(
'TEST', amphora_id=AMP_ID, busy=True)
def test_update_lb_server_group_in_db(self,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_server_group_info = database_tasks.UpdateLBServerGroupInDB()
update_server_group_info.execute(LB_ID, SERVER_GROUP_ID)
repo.LoadBalancerRepository.update.assert_called_once_with(
'TEST',
id=LB_ID,
server_group_id=SERVER_GROUP_ID)
# Test the revert
mock_listener_repo_update.reset_mock()
update_server_group_info.revert(LB_ID, SERVER_GROUP_ID)
# Test the revert with exception
mock_listener_repo_update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
update_server_group_info.revert(LB_ID, SERVER_GROUP_ID)
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
def test_mark_health_mon_active_in_db(self,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB())
mark_health_mon_active.execute(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
HM_ID,
operating_status=constants.ONLINE,
provisioning_status=constants.ACTIVE)
# Test the revert
mock_health_mon_repo_update.reset_mock()
mark_health_mon_active.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_health_mon_repo_update.reset_mock()
mock_health_mon_repo_update.side_effect = Exception('fail')
mark_health_mon_active.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
def test_mark_health_mon_pending_create_in_db(
self,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_health_mon_pending_create = (database_tasks.
MarkHealthMonitorPendingCreateInDB())
mark_health_mon_pending_create.execute(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
HM_ID,
provisioning_status=constants.PENDING_CREATE)
# Test the revert
mock_health_mon_repo_update.reset_mock()
mark_health_mon_pending_create.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_health_mon_repo_update.reset_mock()
mock_health_mon_repo_update.side_effect = Exception('fail')
mark_health_mon_pending_create.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
def test_mark_health_mon_pending_delete_in_db(
self,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_health_mon_pending_delete = (database_tasks.
MarkHealthMonitorPendingDeleteInDB())
mark_health_mon_pending_delete.execute(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
HM_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_health_mon_repo_update.reset_mock()
mark_health_mon_pending_delete.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_health_mon_repo_update.reset_mock()
mock_health_mon_repo_update.side_effect = Exception('fail')
mark_health_mon_pending_delete.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.HealthMonitorRepository.update')
def test_mark_health_mon_pending_update_in_db(
self,
mock_health_mon_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_health_mon_pending_update = (database_tasks.
MarkHealthMonitorPendingUpdateInDB())
mark_health_mon_pending_update.execute(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
HM_ID,
provisioning_status=constants.PENDING_UPDATE)
# Test the revert
mock_health_mon_repo_update.reset_mock()
mark_health_mon_pending_update.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_health_mon_repo_update.reset_mock()
mock_health_mon_repo_update.side_effect = Exception('fail')
mark_health_mon_pending_update.revert(self.health_mon_mock)
mock_health_mon_repo_update.assert_called_once_with(
'TEST',
id=HM_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
def test_mark_l7policy_active_in_db(self,
mock_l7policy_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB())
mark_l7policy_active.execute(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.ACTIVE,
operating_status=constants.ONLINE)
# Test the revert
mock_l7policy_repo_update.reset_mock()
mark_l7policy_active.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7policy_repo_update.reset_mock()
mock_l7policy_repo_update.side_effect = Exception('fail')
mark_l7policy_active.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
def test_mark_l7policy_pending_create_in_db(self,
mock_l7policy_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7policy_pending_create = (database_tasks.
MarkL7PolicyPendingCreateInDB())
mark_l7policy_pending_create.execute(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.PENDING_CREATE)
# Test the revert
mock_l7policy_repo_update.reset_mock()
mark_l7policy_pending_create.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7policy_repo_update.reset_mock()
mock_l7policy_repo_update.side_effect = Exception('fail')
mark_l7policy_pending_create.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
def test_mark_l7policy_pending_delete_in_db(self,
mock_l7policy_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7policy_pending_delete = (database_tasks.
MarkL7PolicyPendingDeleteInDB())
mark_l7policy_pending_delete.execute(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_l7policy_repo_update.reset_mock()
mark_l7policy_pending_delete.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7policy_repo_update.reset_mock()
mock_l7policy_repo_update.side_effect = Exception('fail')
mark_l7policy_pending_delete.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7PolicyRepository.update')
def test_mark_l7policy_pending_update_in_db(self,
mock_l7policy_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7policy_pending_update = (database_tasks.
MarkL7PolicyPendingUpdateInDB())
mark_l7policy_pending_update.execute(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
L7POLICY_ID,
provisioning_status=constants.PENDING_UPDATE)
# Test the revert
mock_l7policy_repo_update.reset_mock()
mark_l7policy_pending_update.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7policy_repo_update.reset_mock()
mock_l7policy_repo_update.side_effect = Exception('fail')
mark_l7policy_pending_update.revert(self.l7policy_mock)
mock_l7policy_repo_update.assert_called_once_with(
'TEST',
id=L7POLICY_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7RuleRepository.update')
def test_mark_l7rule_active_in_db(self,
mock_l7rule_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB())
mark_l7rule_active.execute(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
L7RULE_ID,
provisioning_status=constants.ACTIVE,
operating_status=constants.ONLINE)
# Test the revert
mock_l7rule_repo_update.reset_mock()
mark_l7rule_active.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7rule_repo_update.reset_mock()
mock_l7rule_repo_update.side_effect = Exception('fail')
mark_l7rule_active.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7RuleRepository.update')
def test_mark_l7rule_pending_create_in_db(self,
mock_l7rule_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7rule_pending_create = (database_tasks.
MarkL7RulePendingCreateInDB())
mark_l7rule_pending_create.execute(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
L7RULE_ID,
provisioning_status=constants.PENDING_CREATE)
# Test the revert
mock_l7rule_repo_update.reset_mock()
mark_l7rule_pending_create.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7rule_repo_update.reset_mock()
mock_l7rule_repo_update.side_effect = Exception('fail')
mark_l7rule_pending_create.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7RuleRepository.update')
def test_mark_l7rule_pending_delete_in_db(self,
mock_l7rule_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7rule_pending_delete = (database_tasks.
MarkL7RulePendingDeleteInDB())
mark_l7rule_pending_delete.execute(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
L7RULE_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_l7rule_repo_update.reset_mock()
mark_l7rule_pending_delete.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7rule_repo_update.reset_mock()
mock_l7rule_repo_update.side_effect = Exception('fail')
mark_l7rule_pending_delete.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.L7RuleRepository.update')
def test_mark_l7rule_pending_update_in_db(self,
mock_l7rule_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_l7rule_pending_update = (database_tasks.
MarkL7RulePendingUpdateInDB())
mark_l7rule_pending_update.execute(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
L7RULE_ID,
provisioning_status=constants.PENDING_UPDATE)
# Test the revert
mock_l7rule_repo_update.reset_mock()
mark_l7rule_pending_update.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_l7rule_repo_update.reset_mock()
mock_l7rule_repo_update.side_effect = Exception('fail')
mark_l7rule_pending_update.revert(self.l7rule_mock)
mock_l7rule_repo_update.assert_called_once_with(
'TEST',
id=L7RULE_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.update')
def test_mark_member_active_in_db(self,
mock_member_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_member_active = (database_tasks.MarkMemberActiveInDB())
mark_member_active.execute(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
MEMBER_ID,
provisioning_status=constants.ACTIVE)
# Test the revert
mock_member_repo_update.reset_mock()
mark_member_active.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_member_repo_update.reset_mock()
mock_member_repo_update.side_effect = Exception('fail')
mark_member_active.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.update')
def test_mark_member_pending_create_in_db(self,
mock_member_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_member_pending_create = (database_tasks.
MarkMemberPendingCreateInDB())
mark_member_pending_create.execute(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
MEMBER_ID,
provisioning_status=constants.PENDING_CREATE)
# Test the revert
mock_member_repo_update.reset_mock()
mark_member_pending_create.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_member_repo_update.reset_mock()
mock_member_repo_update.side_effect = Exception('fail')
mark_member_pending_create.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.update')
def test_mark_member_pending_delete_in_db(self,
mock_member_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_member_pending_delete = (database_tasks.
MarkMemberPendingDeleteInDB())
mark_member_pending_delete.execute(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
MEMBER_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_member_repo_update.reset_mock()
mark_member_pending_delete.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_member_repo_update.reset_mock()
mock_member_repo_update.side_effect = Exception('fail')
mark_member_pending_delete.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.update')
def test_mark_member_pending_update_in_db(self,
mock_member_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_member_pending_update = (database_tasks.
MarkMemberPendingUpdateInDB())
mark_member_pending_update.execute(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
MEMBER_ID,
provisioning_status=constants.PENDING_UPDATE)
# Test the revert
mock_member_repo_update.reset_mock()
mark_member_pending_update.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_member_repo_update.reset_mock()
mock_member_repo_update.side_effect = Exception('fail')
mark_member_pending_update.revert(self.member_mock)
mock_member_repo_update.assert_called_once_with(
'TEST',
id=MEMBER_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.PoolRepository.update')
def test_mark_pool_active_in_db(self,
mock_pool_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_pool_active = (database_tasks.MarkPoolActiveInDB())
mark_pool_active.execute(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
POOL_ID,
provisioning_status=constants.ACTIVE)
# Test the revert
mock_pool_repo_update.reset_mock()
mark_pool_active.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_pool_repo_update.reset_mock()
mock_pool_repo_update.side_effect = Exception('fail')
mark_pool_active.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.PoolRepository.update')
def test_mark_pool_pending_create_in_db(self,
mock_pool_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB())
mark_pool_pending_create.execute(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
POOL_ID,
provisioning_status=constants.PENDING_CREATE)
# Test the revert
mock_pool_repo_update.reset_mock()
mark_pool_pending_create.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_pool_repo_update.reset_mock()
mock_pool_repo_update.side_effect = Exception('fail')
mark_pool_pending_create.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.PoolRepository.update')
def test_mark_pool_pending_delete_in_db(self,
mock_pool_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB())
mark_pool_pending_delete.execute(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
POOL_ID,
provisioning_status=constants.PENDING_DELETE)
# Test the revert
mock_pool_repo_update.reset_mock()
mark_pool_pending_delete.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_pool_repo_update.reset_mock()
mock_pool_repo_update.side_effect = Exception('fail')
mark_pool_pending_delete.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.PoolRepository.update')
def test_mark_pool_pending_update_in_db(self,
mock_pool_repo_update,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
mark_pool_pending_update = (database_tasks.
MarkPoolPendingUpdateInDB())
mark_pool_pending_update.execute(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
POOL_ID,
provisioning_status=constants.PENDING_UPDATE)
# Test the revert
mock_pool_repo_update.reset_mock()
mark_pool_pending_update.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
# Test the revert with exception
mock_pool_repo_update.reset_mock()
mock_pool_repo_update.side_effect = Exception('fail')
mark_pool_pending_update.revert(POOL_ID)
mock_pool_repo_update.assert_called_once_with(
'TEST',
id=POOL_ID,
provisioning_status=constants.ERROR)
@mock.patch('octavia.db.repositories.MemberRepository.update_pool_members')
def test_update_pool_members_operating_status_in_db(
self,
mock_member_repo_update_pool_members,
mock_generate_uuid,
mock_LOG,
mock_get_session,
mock_loadbalancer_repo_update,
mock_listener_repo_update,
mock_amphora_repo_update,
mock_amphora_repo_delete):
update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB()
update_members.execute(POOL_ID, constants.ONLINE)
mock_member_repo_update_pool_members.assert_called_once_with(
'TEST',
POOL_ID,
operating_status=constants.ONLINE)
| 42.191542
| 79
| 0.568691
| 11,299
| 118,727
| 5.492344
| 0.034959
| 0.077508
| 0.058429
| 0.060266
| 0.835847
| 0.799784
| 0.781237
| 0.768523
| 0.756099
| 0.739357
| 0
| 0.005443
| 0.373268
| 118,727
| 2,813
| 80
| 42.206541
| 0.828558
| 0.031206
| 0
| 0.749546
| 0
| 0
| 0.044507
| 0.030615
| 0
| 0
| 0
| 0.000355
| 0.102541
| 1
| 0.034936
| false
| 0.000907
| 0.005898
| 0
| 0.041289
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9f8dcb19533a96faaad26bde1b0790a5c363c97
| 142,263
|
py
|
Python
|
autotest/gcore/vsis3.py
|
jpapadakis/gdal
|
f07aa15fd65af36b04291303cc6834c87f662814
|
[
"MIT"
] | 18
|
2021-01-27T00:07:35.000Z
|
2022-03-25T22:20:13.000Z
|
autotest/gcore/vsis3.py
|
jpapadakis/gdal
|
f07aa15fd65af36b04291303cc6834c87f662814
|
[
"MIT"
] | 1
|
2015-04-14T00:19:57.000Z
|
2015-04-14T00:29:29.000Z
|
autotest/gcore/vsis3.py
|
jpapadakis/gdal
|
f07aa15fd65af36b04291303cc6834c87f662814
|
[
"MIT"
] | 1
|
2021-11-21T02:33:51.000Z
|
2021-11-21T02:33:51.000Z
|
#!/usr/bin/env pytest
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test /vsis3
# Author: Even Rouault <even dot rouault at spatialys dot com>
#
###############################################################################
# Copyright (c) 2015, Even Rouault <even dot rouault at spatialys dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import json
import os.path
import stat
import sys
from osgeo import gdal
import gdaltest
import webserver
import pytest
def open_for_read(uri):
"""
Opens a test file for reading.
"""
return gdal.VSIFOpenExL(uri, 'rb', 1)
###############################################################################
def test_vsis3_init():
gdaltest.aws_vars = {}
for var in ('AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_TIMESTAMP', 'AWS_HTTPS', 'AWS_VIRTUAL_HOSTING', 'AWS_S3_ENDPOINT', 'AWS_REQUEST_PAYER', 'AWS_DEFAULT_REGION', 'AWS_DEFAULT_PROFILE', 'AWS_PROFILE', 'AWS_NO_SIGN_REQUEST'):
gdaltest.aws_vars[var] = gdal.GetConfigOption(var)
if gdaltest.aws_vars[var] is not None:
gdal.SetConfigOption(var, "")
# To avoid user AWS credentials in ~/.aws/credentials and ~/.aws/config
# to mess up our tests
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
assert gdal.GetSignedURL('/vsis3/foo/bar') is None
###############################################################################
# Test AWS_NO_SIGN_REQUEST=YES
def test_vsis3_no_sign_request():
if not gdaltest.built_against_curl():
pytest.skip()
with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'):
actual_url = gdal.GetActualURL('/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF')
assert actual_url == 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'
actual_url = gdal.GetActualURL('/vsis3_streaming/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF')
assert actual_url == 'https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF'
f = open_for_read('/vsis3/landsat-pds/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF')
if f is None:
if gdaltest.gdalurlopen('https://landsat-pds.s3.amazonaws.com/L8/001/002/LC80010022016230LGN00/LC80010022016230LGN00_B1.TIF') is None:
pytest.skip('cannot open URL')
pytest.fail()
gdal.VSIFCloseL(f)
###############################################################################
# Test Sync() and multithreaded download
def test_vsis3_sync_multithreaded_download():
if not gdaltest.built_against_curl():
pytest.skip()
def cbk(pct, _, tab):
assert pct >= tab[0]
tab[0] = pct
return True
tab = [ -1 ]
# Use a public bucket with /test_dummy/foo and /test_dummy/bar files
with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'):
assert gdal.Sync('/vsis3/cdn.proj.org/test_dummy',
'/vsimem/test_vsis3_no_sign_request_sync',
options=['NUM_THREADS=2'],
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo').size == 4
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar').size == 4
gdal.RmdirRecursive('/vsimem/test_vsis3_no_sign_request_sync')
###############################################################################
# Test Sync() and multithreaded download and CHUNK_SIZE
def test_vsis3_sync_multithreaded_download_chunk_size():
if not gdaltest.built_against_curl():
pytest.skip()
def cbk(pct, _, tab):
assert pct >= tab[0]
tab[0] = pct
return True
tab = [ -1 ]
# Use a public bucket with /test_dummy/foo and /test_dummy/bar files
with gdaltest.config_option('AWS_NO_SIGN_REQUEST', 'YES'):
assert gdal.Sync('/vsis3/cdn.proj.org/test_dummy',
'/vsimem/test_vsis3_no_sign_request_sync',
options=['NUM_THREADS=2', 'CHUNK_SIZE=3'],
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/foo').size == 4
assert gdal.VSIStatL('/vsimem/test_vsis3_no_sign_request_sync/test_dummy/bar').size == 4
gdal.RmdirRecursive('/vsimem/test_vsis3_no_sign_request_sync')
###############################################################################
# Error cases
def test_vsis3_1():
if not gdaltest.built_against_curl():
pytest.skip()
# Missing AWS_SECRET_ACCESS_KEY
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3/foo/bar')
assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_SECRET_ACCESS_KEY') >= 0
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/foo/bar')
assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_SECRET_ACCESS_KEY') >= 0
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY')
# Missing AWS_ACCESS_KEY_ID
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3/foo/bar')
assert f is None and gdal.VSIGetLastErrorMsg().find('AWS_ACCESS_KEY_ID') >= 0
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID')
# ERROR 1: The AWS Access Key Id you provided does not exist in our records.
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3/foo/bar.baz')
if f is not None or gdal.VSIGetLastErrorMsg() == '':
if f is not None:
gdal.VSIFCloseL(f)
if gdal.GetConfigOption('APPVEYOR') is not None:
return
pytest.fail(gdal.VSIGetLastErrorMsg())
gdal.ErrorReset()
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/foo/bar.baz')
assert f is None and gdal.VSIGetLastErrorMsg() != ''
###############################################################################
def test_vsis3_start_webserver():
gdaltest.webserver_process = None
gdaltest.webserver_port = 0
if not gdaltest.built_against_curl():
pytest.skip()
(gdaltest.webserver_process, gdaltest.webserver_port) = webserver.launch(handler=webserver.DispatcherHttpHandler)
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID')
gdal.SetConfigOption('AWS_TIMESTAMP', '20150101T000000Z')
gdal.SetConfigOption('AWS_HTTPS', 'NO')
gdal.SetConfigOption('AWS_VIRTUAL_HOSTING', 'NO')
gdal.SetConfigOption('AWS_S3_ENDPOINT', '127.0.0.1:%d' % gdaltest.webserver_port)
def get_s3_fake_bucket_resource_method(request):
request.protocol_version = 'HTTP/1.1'
if 'Authorization' not in request.headers:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=38901846b865b12ac492bc005bb394ca8d60c098b68db57c084fac686a932f9e'
expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date,Signature=9f623b7ffce76188a456c70fb4813eb31969e88d130d6b4d801b3accbf050d6c'
if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081:
sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization']))
request.send_response(403)
return
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
###############################################################################
# Test with a fake AWS server
def test_vsis3_2():
if gdaltest.webserver_port == 0:
pytest.skip()
signed_url = gdal.GetSignedURL('/vsis3/s3_fake_bucket/resource')
expected_url_8080 = 'http://127.0.0.1:8080/s3_fake_bucket/resource?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AWS_ACCESS_KEY_ID%2F20150101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150101T000000Z&X-Amz-Expires=3600&X-Amz-Signature=dca239dd95f72ff8c37c15c840afc54cd19bdb07f7aaee2223108b5b0ad35da8&X-Amz-SignedHeaders=host'
expected_url_8081 = 'http://127.0.0.1:8081/s3_fake_bucket/resource?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AWS_ACCESS_KEY_ID%2F20150101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20150101T000000Z&X-Amz-Expires=3600&X-Amz-Signature=ef5216bc5971863414c69f6ca095276c0d62c0da97fa4f6ab80c30bd7fc146ac&X-Amz-SignedHeaders=host'
assert signed_url in (expected_url_8080, expected_url_8081)
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3_streaming/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if 'Authorization' not in request.headers:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,Signature=464a21835038b4f4d292b6463b8a005b9aaa980513aa8c42fc170abb733dce85'
expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token,Signature=b10e91575186342f9f2acfc91c4c2c9938c4a9e8cdcbc043d09d59d9641ad7fb'
if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081:
sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization']))
request.send_response(403)
return
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
handler.add('GET', '/s3_fake_bucket_with_session_token/resource', custom_method=method)
# Test with temporary credentials
with gdaltest.config_option('AWS_SESSION_TOKEN', 'AWS_SESSION_TOKEN'):
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket_with_session_token/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if 'Range' in request.headers:
if request.headers['Range'] != 'bytes=0-16383':
sys.stderr.write("Bad Range: '%s'\n" % str(request.headers['Range']))
request.send_response(403)
return
request.send_response(206)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Range', 'bytes 0-16383/1000000')
request.send_header('Content-Length', 16384)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(('a' * 16384).encode('ascii'))
else:
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 1000000)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(('a' * 1000000).encode('ascii'))
handler.add('GET', '/s3_fake_bucket/resource2.bin', custom_method=method)
with webserver.install_http_handler(handler):
# old_val = gdal.GetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN')
# gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR')
stat_res = gdal.VSIStatL('/vsis3/s3_fake_bucket/resource2.bin')
# gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', old_val)
if stat_res is None or stat_res.size != 1000000:
if stat_res is not None:
print(stat_res.size)
else:
print(stat_res)
pytest.fail()
handler = webserver.SequentialHandler()
handler.add('HEAD', '/s3_fake_bucket/resource2.bin', 200,
{'Content-type': 'text/plain',
'Content-Length': 1000000,
'Connection': 'close'})
with webserver.install_http_handler(handler):
stat_res = gdal.VSIStatL('/vsis3_streaming/s3_fake_bucket/resource2.bin')
if stat_res is None or stat_res.size != 1000000:
if stat_res is not None:
print(stat_res.size)
else:
print(stat_res)
pytest.fail()
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method)
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-west-2') >= 0 and request.headers['Host'].startswith('127.0.0.1'):
request.send_response(301)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>PermanentRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method)
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-west-2') >= 0 and request.headers['Host'].startswith('localhost'):
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket/redirect', custom_method=method)
# Test region and endpoint 'redirects'
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/redirect')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
if data != 'foo':
if gdaltest.is_travis_branch('trusty'):
pytest.skip('Skipped on trusty branch, but should be investigated')
pytest.fail(data)
# Test region and endpoint 'redirects'
gdal.VSICurlClearCache()
handler.req_count = 0
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3_streaming/s3_fake_bucket/redirect')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
handler = webserver.SequentialHandler()
def method(request):
# /vsis3_streaming/ should have remembered the change of region and endpoint
if request.headers['Authorization'].find('us-west-2') < 0 or \
not request.headers['Host'].startswith('localhost'):
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.protocol_version = 'HTTP/1.1'
request.send_response(400)
response = 'bla'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('GET', '/s3_fake_bucket/non_xml_error', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/non_xml_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('bla') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><oops>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/invalid_xml_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/invalid_xml_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<oops>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error/>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_code_in_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_code_in_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error/>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>AuthorizationHeaderMalformed</Code></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_region_in_AuthorizationHeaderMalformed_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_region_in_AuthorizationHeaderMalformed_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>PermanentRedirect</Code></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_endpoint_in_PermanentRedirect_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_endpoint_in_PermanentRedirect_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Code>bla</Code></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
handler.add('GET', '/s3_fake_bucket/no_message_in_error', 400,
{'Content-type': 'application/xml',
'Transfer-Encoding': 'chunked',
'Connection': 'close'}, response)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3_streaming/s3_fake_bucket/no_message_in_error')
assert f is None and gdal.VSIGetLastErrorMsg().find('<Error>') >= 0
# Test with requester pays
handler = webserver.SequentialHandler()
def method(request):
if 'x-amz-request-payer' not in request.headers:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
expected_authorization_8080 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-request-payer,Signature=cf713a394e1b629ac0e468d60d3d4a12f5236fd72d21b6005c758b0dfc7049cd'
expected_authorization_8081 = 'AWS4-HMAC-SHA256 Credential=AWS_ACCESS_KEY_ID/20150101/us-east-1/s3/aws4_request,SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-request-payer,Signature=4756166679008a1a40cd6ff91dbbef670a71c11bf8e3c998dd7385577c3ac4d9'
if request.headers['Authorization'] != expected_authorization_8080 and request.headers['Authorization'] != expected_authorization_8081:
sys.stderr.write("Bad Authorization: '%s'\n" % str(request.headers['Authorization']))
request.send_response(403)
return
if request.headers['x-amz-request-payer'] != 'requester':
sys.stderr.write("Bad x-amz-request-payer: '%s'\n" % str(request.headers['x-amz-request-payer']))
request.send_response(403)
return
request.send_response(200)
request.send_header('Content-type', 'text/plain')
request.send_header('Content-Length', 3)
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write("""foo""".encode('ascii'))
handler.add('GET', '/s3_fake_bucket_with_requester_pays/resource', custom_method=method)
with gdaltest.config_option('AWS_REQUEST_PAYER', 'requester'):
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/s3_fake_bucket_with_requester_pays/resource')
assert f is not None
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Test temporary redirect
handler = webserver.SequentialHandler()
class HandlerClass(object):
def __init__(self, response_value):
self.old_authorization = None
self.response_value = response_value
def method_req_1(self, request):
if request.headers['Host'].find('127.0.0.1') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
self.old_authorization = request.headers['Authorization']
request.protocol_version = 'HTTP/1.1'
request.send_response(307)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>TemporaryRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
def method_req_2(self, request):
if request.headers['Host'].find('localhost') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
if self.old_authorization == request.headers['Authorization']:
sys.stderr.write('Should have get a different Authorization. Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = self.response_value
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
h = HandlerClass('foo')
handler.add('GET', '/s3_test_temporary_redirect_read/resource', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read/resource', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_test_temporary_redirect_read/resource')
assert f is not None
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Retry on the same bucket and check that the redirection was indeed temporary
handler = webserver.SequentialHandler()
h = HandlerClass('bar')
handler.add('GET', '/s3_test_temporary_redirect_read/resource2', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read/resource2', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_test_temporary_redirect_read/resource2')
assert f is not None
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'bar'
###############################################################################
# Test re-opening after changing configuration option (#2294)
def test_vsis3_open_after_config_option_chage():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test_vsis3_change_config_options/?delimiter=%2F', 403)
handler.add('GET', '/test_vsis3_change_config_options/test.bin', 403)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin')
assert f is None
# Does not attempt any network access since we didn't change significant
# parameters
f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin')
assert f is None
with gdaltest.config_option('AWS_ACCESS_KEY_ID', 'another_key_id'):
handler = webserver.SequentialHandler()
handler.add('GET', '/test_vsis3_change_config_options/?delimiter=%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
<Key>test.bin</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>123456</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/test_vsis3_change_config_options/test.bin')
assert f is not None
gdal.VSIFCloseL(f)
###############################################################################
# Test ReadDir() with a fake AWS server
def test_vsis3_readdir():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
if request.headers['Host'].startswith('127.0.0.1'):
request.send_response(301)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>PermanentRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Host'].startswith('localhost'):
request.send_response(200)
request.send_header('Content-type', 'application/xml')
response = """<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir with_space/</Prefix>
<NextMarker>bla</NextMarker>
<Contents>
<Key>a_dir with_space/resource3 with_space.bin</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>123456</Size>
</Contents>
</ListBucketResult>
"""
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%20with_space%2F', custom_method=method)
def method(request):
# /vsis3/ should have remembered the change of region and endpoint
if request.headers['Authorization'].find('us-west-2') < 0 or \
not request.headers['Host'].startswith('localhost'):
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
response = """<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir with_space/</Prefix>
<Contents>
<Key>a_dir with_space/resource4.bin</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
</Contents>
<Contents>
<Key>a_dir with_space/i_am_a_glacier_file</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
<StorageClass>GLACIER</StorageClass>
</Contents>
<CommonPrefixes>
<Prefix>a_dir with_space/subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
"""
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&marker=bla&prefix=a_dir%20with_space%2F', custom_method=method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')
if f is None:
if gdaltest.is_travis_branch('trusty'):
pytest.skip('Skipped on trusty branch, but should be investigated')
pytest.fail()
gdal.VSIFCloseL(f)
with webserver.install_http_handler(webserver.SequentialHandler()):
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space')
assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir']
assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').size == 123456
assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').mtime == 1
# Same as above: cached
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space')
assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir']
# ReadDir on something known to be a file shouldn't cause network access
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')
assert dir_contents is None
# Test unrelated partial clear of the cache
gdal.VSICurlPartialClearCache('/vsis3/s3_fake_bucket_unrelated')
assert gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin').size == 123456
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir with_space')
assert dir_contents == ['resource3 with_space.bin', 'resource4.bin', 'subdir']
# Test partial clear of the cache
gdal.VSICurlPartialClearCache('/vsis3/s3_fake_bucket2/a_dir with_space')
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket2/a_dir%20with_space/resource3%20with_space.bin', 400)
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&max-keys=100&prefix=a_dir%20with_space%2Fresource3%20with_space.bin%2F', 400)
with webserver.install_http_handler(handler):
gdal.VSIStatL('/vsis3/s3_fake_bucket2/a_dir with_space/resource3 with_space.bin')
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir/</Prefix>
<Contents>
<Key>a_dir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir')
assert dir_contents == ['test.txt']
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket2/?delimiter=%2F&prefix=a_dir%2F', 200, {},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>a_dir/</Prefix>
<Contents>
<Key>a_dir/resource4.bin</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
</Contents>
<Contents>
<Key>a_dir/i_am_a_glacier_file</Key>
<LastModified>2015-10-16T12:34:56.000Z</LastModified>
<Size>456789</Size>
<StorageClass>GLACIER</StorageClass>
</Contents>
<CommonPrefixes>
<Prefix>a_dir/subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with gdaltest.config_option('CPL_VSIL_CURL_IGNORE_GLACIER_STORAGE', 'NO'):
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_fake_bucket2/a_dir')
assert dir_contents == ['resource4.bin', 'i_am_a_glacier_file', 'subdir']
# Test CPL_VSIL_CURL_NON_CACHED
for config_option_value in ['/vsis3/s3_non_cached/test.txt',
'/vsis3/s3_non_cached',
'/vsis3/s3_non_cached:/vsis3/unrelated',
'/vsis3/unrelated:/vsis3/s3_non_cached',
'/vsis3/unrelated:/vsis3/s3_non_cached:/vsis3/unrelated']:
with gdaltest.config_option('CPL_VSIL_CURL_NON_CACHED', config_option_value):
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo', config_option_value
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'bar2')
with webserver.install_http_handler(handler):
size = gdal.VSIStatL('/vsis3/s3_non_cached/test.txt').size
assert size == 4, config_option_value
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo')
with webserver.install_http_handler(handler):
size = gdal.VSIStatL('/vsis3/s3_non_cached/test.txt').size
if size != 3:
print(config_option_value)
pytest.fail(data)
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'bar2')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'bar2', config_option_value
# Retry without option
for config_option_value in [None,
'/vsis3/s3_non_cached/bar.txt']:
with gdaltest.config_option('CPL_VSIL_CURL_NON_CACHED', config_option_value):
handler = webserver.SequentialHandler()
if config_option_value is None:
handler.add('GET', '/s3_non_cached/?delimiter=%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<Contents>
<Key>test2.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
handler.add('GET', '/s3_non_cached/test.txt', 200, {}, 'foo')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo', config_option_value
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_non_cached/test.txt')
assert f is not None, config_option_value
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
# We should still get foo because of caching
assert data == 'foo', config_option_value
# List buckets (empty result)
handler = webserver.SequentialHandler()
handler.add('GET', '/', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListAllMyBucketsResult>
<Buckets>
</Buckets>
</ListAllMyBucketsResult>
""")
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/')
assert dir_contents == ['.']
gdal.VSICurlClearCache()
# List buckets
handler = webserver.SequentialHandler()
handler.add('GET', '/', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListAllMyBucketsResult>
<Buckets>
<Bucket>
<Name>mybucket</Name>
</Bucket>
</Buckets>
</ListAllMyBucketsResult>
""")
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/')
assert dir_contents == ['mybucket']
# Test temporary redirect
handler = webserver.SequentialHandler()
class HandlerClass(object):
def __init__(self, response_value):
self.old_authorization = None
self.response_value = response_value
def method_req_1(self, request):
if request.headers['Host'].find('127.0.0.1') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
self.old_authorization = request.headers['Authorization']
request.protocol_version = 'HTTP/1.1'
request.send_response(307)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>TemporaryRedirect</Code><Endpoint>localhost:%d</Endpoint></Error>' % request.server.port
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
def method_req_2(self, request):
if request.headers['Host'].find('localhost') < 0:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
if self.old_authorization == request.headers['Authorization']:
sys.stderr.write('Should have get a different Authorization. Bad headers: %s\n' % str(request.headers))
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
response = self.response_value
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
h = HandlerClass("""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<CommonPrefixes>
<Prefix>test</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_test_temporary_redirect_read_dir')
assert dir_contents == ['test']
# Retry on the same bucket and check that the redirection was indeed temporary
handler = webserver.SequentialHandler()
h = HandlerClass("""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test/</Prefix>
<CommonPrefixes>
<Prefix>test/test2</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F&prefix=test%2F', custom_method=h.method_req_1)
handler.add('GET', '/s3_test_temporary_redirect_read_dir/?delimiter=%2F&prefix=test%2F', custom_method=h.method_req_2)
with webserver.install_http_handler(handler):
dir_contents = gdal.ReadDir('/vsis3/s3_test_temporary_redirect_read_dir/test')
assert dir_contents == ['test2']
###############################################################################
# Test OpenDir() with a fake AWS server
def test_vsis3_opendir():
if gdaltest.webserver_port == 0:
pytest.skip()
# Unlimited depth
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<Contents>
<Key>subdir/</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>0</Size>
</Contents>
<Contents>
<Key>subdir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>5</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
d = gdal.OpenDir('/vsis3/vsis3_opendir')
assert d is not None
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'test.txt'
assert entry.size == 40
assert entry.mode == 32768
assert entry.mtime == 1
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir'
assert entry.mode == 16384
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir/test.txt'
entry = gdal.GetNextDirEntry(d)
assert entry is None
gdal.CloseDir(d)
# Depth = 0
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/?delimiter=%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<CommonPrefixes>
<Prefix>subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
d = gdal.OpenDir('/vsis3/vsis3_opendir', 0)
assert d is not None
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'test.txt'
assert entry.size == 40
assert entry.mode == 32768
assert entry.mtime == 1
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir'
assert entry.mode == 16384
entry = gdal.GetNextDirEntry(d)
assert entry is None
gdal.CloseDir(d)
# Depth = 1
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/?delimiter=%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<Contents>
<Key>test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<CommonPrefixes>
<Prefix>subdir/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
d = gdal.OpenDir('/vsis3/vsis3_opendir', 1)
assert d is not None
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'test.txt'
assert entry.size == 40
assert entry.mode == 32768
assert entry.mtime == 1
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir'
assert entry.mode == 16384
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_opendir/?delimiter=%2F&prefix=subdir%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>subdir/</Prefix>
<Marker/>
<Contents>
<Key>subdir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>5</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
entry = gdal.GetNextDirEntry(d)
assert entry.name == 'subdir/test.txt'
entry = gdal.GetNextDirEntry(d)
assert entry is None
gdal.CloseDir(d)
###############################################################################
# Test simple PUT support with a fake AWS server
def test_vsis3_4():
if gdaltest.webserver_port == 0:
pytest.skip()
with webserver.install_http_handler(webserver.SequentialHandler()):
with gdaltest.error_handler():
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3', 'wb')
assert f is None
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket3/empty_file.bin', 200, {'Connection': 'close'}, 'foo')
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size == 3
# Empty file
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/empty_file.bin', custom_method=method)
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')
assert f is not None
gdal.ErrorReset()
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket3/empty_file.bin', 200, {'Connection': 'close'}, '')
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_fake_bucket3/empty_file.bin').size == 0
# Invalid seek
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')
assert f is not None
with gdaltest.error_handler():
ret = gdal.VSIFSeekL(f, 1, 0)
assert ret != 0
gdal.VSIFCloseL(f)
# Invalid read
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file.bin', 'wb')
assert f is not None
with gdaltest.error_handler():
ret = gdal.VSIFReadL(1, 1, f)
assert not ret
gdal.VSIFCloseL(f)
# Error case
handler = webserver.SequentialHandler()
handler.add('PUT', '/s3_fake_bucket3/empty_file_error.bin', 403)
with webserver.install_http_handler(handler):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/empty_file_error.bin', 'wb')
assert f is not None
gdal.ErrorReset()
with gdaltest.error_handler():
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() != ''
# Nominal case
gdal.NetworkStatsReset()
with gdaltest.config_option('CPL_VSIL_NETWORK_STATS_ENABLED', 'YES'):
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/another_file.bin', 'wb')
assert f is not None
assert gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) == 0
assert gdal.VSIFSeekL(f, 0, 1) == 0
assert gdal.VSIFSeekL(f, 0, 2) == 0
assert gdal.VSIFWriteL('foo', 1, 3, f) == 3
assert gdal.VSIFSeekL(f, gdal.VSIFTellL(f), 0) == 0
assert gdal.VSIFWriteL('bar', 1, 3, f) == 3
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '6':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(6).decode('ascii')
if content != 'foobar':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/another_file.bin', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
j = json.loads(gdal.NetworkStatsGetAsSerializedJSON())
#print(j)
assert j == {
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
},
"handlers": {
"vsis3": {
"files": {
"/vsis3/s3_fake_bucket3/another_file.bin": {
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
},
"actions": {
"Write": {
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
}
}
}
}
},
"methods": {
"PUT": {
"count": 1,
"uploaded_bytes": 6
}
}
}
}
}
gdal.NetworkStatsReset()
# Redirect case
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/redirect', 'wb')
assert f is not None
assert gdal.VSIFWriteL('foobar', 1, 6, f) == 6
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
if request.headers['Content-Length'] != '6':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(6).decode('ascii')
if content != 'foobar':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/redirect', custom_method=method)
handler.add('PUT', '/s3_fake_bucket3/redirect', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
###############################################################################
# Test simple PUT support with retry logic
def test_vsis3_write_single_put_retry():
if gdaltest.webserver_port == 0:
pytest.skip()
with gdaltest.config_options({'GDAL_HTTP_MAX_RETRY': '2',
'GDAL_HTTP_RETRY_DELAY': '0.01'}):
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket3/put_with_retry.bin', 'wb')
assert f is not None
assert gdal.VSIFWriteL('foo', 1, 3, f) == 3
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(3).decode('ascii')
if content != 'foo':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket3/put_with_retry.bin', 502)
handler.add('PUT', '/s3_fake_bucket3/put_with_retry.bin', custom_method=method)
with gdaltest.error_handler():
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
###############################################################################
# Test simple DELETE support with a fake AWS server
def test_vsis3_5():
if gdaltest.webserver_port == 0:
pytest.skip()
with webserver.install_http_handler(webserver.SequentialHandler()):
with gdaltest.error_handler():
ret = gdal.Unlink('/vsis3/foo')
assert ret != 0
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/delete_file', 200, {'Connection': 'close'}, 'foo')
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file').size == 3
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file').size == 3
handler = webserver.SequentialHandler()
handler.add('DELETE', '/s3_delete_bucket/delete_file', 204)
with webserver.install_http_handler(handler):
ret = gdal.Unlink('/vsis3/s3_delete_bucket/delete_file')
assert ret == 0
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/delete_file', 404, {'Connection': 'close'})
handler.add('GET', '/s3_delete_bucket/?delimiter=%2F&max-keys=100&prefix=delete_file%2F', 404, {'Connection': 'close'})
with webserver.install_http_handler(handler):
assert gdal.VSIStatL('/vsis3/s3_delete_bucket/delete_file') is None
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/delete_file_error', 200)
handler.add('DELETE', '/s3_delete_bucket/delete_file_error', 403)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
ret = gdal.Unlink('/vsis3/s3_delete_bucket/delete_file_error')
assert ret != 0
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_delete_bucket/redirect', 200)
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
request.send_response(204)
request.send_header('Content-Length', 0)
request.end_headers()
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('DELETE', '/s3_delete_bucket/redirect', custom_method=method)
handler.add('DELETE', '/s3_delete_bucket/redirect', custom_method=method)
with webserver.install_http_handler(handler):
ret = gdal.Unlink('/vsis3/s3_delete_bucket/redirect')
assert ret == 0
###############################################################################
# Test DeleteObjects with a fake AWS server
def test_vsis3_unlink_batch():
if gdaltest.webserver_port == 0:
pytest.skip()
def method(request):
if request.headers['Content-MD5'] != 'Ze0X4LdlTwCsT+WpNxD9FA==':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(403)
return
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>foo</Key>
</Object>
<Object>
<Key>bar/baz</Key>
</Object>
</Delete>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>foo</Key></Deleted><Deleted><Key>bar/baz</Key></Deleted></DeleteResult>"""
request.send_header('Content-Length', len(response))
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler = webserver.SequentialHandler()
handler.add('POST', '/unlink_batch/?delete', custom_method=method)
handler.add('POST', '/unlink_batch/?delete', 200, {},
"""<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>baw</Key></Deleted></DeleteResult>""")
with gdaltest.config_option('CPL_VSIS3_UNLINK_BATCH_SIZE', '2'):
with webserver.install_http_handler(handler):
ret = gdal.UnlinkBatch(['/vsis3/unlink_batch/foo', '/vsis3/unlink_batch/bar/baz', '/vsis3/unlink_batch/baw'])
assert ret
handler = webserver.SequentialHandler()
handler.add('POST', '/unlink_batch/?delete', 200, {},
"""<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Failed><Key>foo</Key></Failed></DeleteResult>""")
with webserver.install_http_handler(handler):
ret = gdal.UnlinkBatch(['/vsis3/unlink_batch/foo'])
assert not ret
###############################################################################
# Test RmdirRecursive() with a fake AWS server
def test_vsis3_rmdir_recursive():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
handler.add('GET', '/test_rmdir_recursive/?prefix=somedir%2F', 200, {'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>somedir/</Prefix>
<Marker/>
<Contents>
<Key>somedir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<Contents>
<Key>somedir/subdir/</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>0</Size>
</Contents>
<Contents>
<Key>somedir/subdir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>5</Size>
</Contents>
</ListBucketResult>
""")
def method(request):
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>somedir/test.txt</Key>
</Object>
<Object>
<Key>somedir/subdir/</Key>
</Object>
</Delete>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>somedir/test.txt</Key></Deleted><Deleted><Key>somedir/subdir/</Key></Deleted></DeleteResult>"""
request.send_header('Content-Length', len(response))
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_rmdir_recursive/?delete', custom_method=method)
def method(request):
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>somedir/subdir/test.txt</Key>
</Object>
<Object>
<Key>somedir/</Key>
</Object>
</Delete>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(403)
return
request.protocol_version = 'HTTP/1.1'
request.send_response(200)
response = """<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Deleted><Key>somedir/subdir/test.txt</Key></Deleted><Deleted><Key>somedir/</Key></Deleted></DeleteResult>"""
request.send_header('Content-Length', len(response))
request.send_header('Connection', 'close')
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_rmdir_recursive/?delete', custom_method=method)
with gdaltest.config_option('CPL_VSIS3_UNLINK_BATCH_SIZE', '2'):
with webserver.install_http_handler(handler):
assert gdal.RmdirRecursive('/vsis3/test_rmdir_recursive/somedir') == 0
###############################################################################
# Test multipart upload with a fake AWS server
def test_vsis3_6():
if gdaltest.webserver_port == 0:
pytest.skip()
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb')
assert f is not None
size = 1024 * 1024 + 1
big_buffer = 'a' * size
handler = webserver.SequentialHandler()
def method(request):
request.protocol_version = 'HTTP/1.1'
if request.headers['Authorization'].find('us-east-1') >= 0:
request.send_response(400)
response = '<?xml version="1.0" encoding="UTF-8"?><Error><Message>bla</Message><Code>AuthorizationHeaderMalformed</Code><Region>us-west-2</Region></Error>'
response = '%x\r\n%s\r\n0\r\n\r\n' % (len(response), response)
request.send_header('Content-type', 'application/xml')
request.send_header('Transfer-Encoding', 'chunked')
request.end_headers()
request.wfile.write(response.encode('ascii'))
elif request.headers['Authorization'].find('us-west-2') >= 0:
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
else:
sys.stderr.write('Bad headers: %s\n' % str(request.headers))
request.send_response(403)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', custom_method=method)
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '1048576':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"first_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', custom_method=method)
with webserver.install_http_handler(handler):
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == size
handler = webserver.SequentialHandler()
def method(request):
if request.headers['Content-Length'] != '1':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('ETag', '"second_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=2&uploadId=my_id', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '186':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
content = request.rfile.read(186).decode('ascii')
if content != """<CompleteMultipartUpload>
<Part>
<PartNumber>1</PartNumber><ETag>"first_etag"</ETag></Part>
<Part>
<PartNumber>2</PartNumber><ETag>"second_etag"</ETag></Part>
</CompleteMultipartUpload>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', custom_method=method)
gdal.ErrorReset()
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_403_error.bin?uploads', 403)
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_empty_result.bin?uploads', 200)
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin?uploads', 200, {}, 'foo')
handler.add('POST', '/s3_fake_bucket4/large_file_initiate_no_uploadId.bin?uploads', 200, {}, '<foo/>')
with webserver.install_http_handler(handler):
for filename in ['/vsis3/s3_fake_bucket4/large_file_initiate_403_error.bin',
'/vsis3/s3_fake_bucket4/large_file_initiate_empty_result.bin',
'/vsis3/s3_fake_bucket4/large_file_initiate_invalid_xml_result.bin',
'/vsis3/s3_fake_bucket4/large_file_initiate_no_uploadId.bin']:
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None
with gdaltest.error_handler():
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == 0
gdal.ErrorReset()
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == ''
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?partNumber=1&uploadId=my_id', 403)
handler.add('DELETE', '/s3_fake_bucket4/large_file_upload_part_403_error.bin?uploadId=my_id', 204)
handler.add('POST', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?partNumber=1&uploadId=my_id', 200)
handler.add('DELETE', '/s3_fake_bucket4/large_file_upload_part_no_etag.bin?uploadId=my_id', 204)
with webserver.install_http_handler(handler):
for filename in ['/vsis3/s3_fake_bucket4/large_file_upload_part_403_error.bin',
'/vsis3/s3_fake_bucket4/large_file_upload_part_no_etag.bin']:
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None, filename
with gdaltest.error_handler():
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == 0, filename
gdal.ErrorReset()
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() == '', filename
# Simulate failure in AbortMultipart stage
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?partNumber=1&uploadId=my_id', 403)
handler.add('DELETE', '/s3_fake_bucket4/large_file_abortmultipart_403_error.bin?uploadId=my_id', 403)
filename = '/vsis3/s3_fake_bucket4/large_file_abortmultipart_403_error.bin'
with webserver.install_http_handler(handler):
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None, filename
with gdaltest.error_handler():
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == 0, filename
gdal.ErrorReset()
with gdaltest.error_handler():
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() != '', filename
# Simulate failure in CompleteMultipartUpload stage
handler = webserver.SequentialHandler()
handler.add('POST', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploads', 200, {},
'<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>')
handler.add('PUT', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?partNumber=1&uploadId=my_id', 200, {'ETag': 'first_etag'}, '')
handler.add('PUT', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?partNumber=2&uploadId=my_id', 200, {'ETag': 'second_etag'}, '')
handler.add('POST', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploadId=my_id', 403)
# handler.add('DELETE', '/s3_fake_bucket4/large_file_completemultipart_403_error.bin?uploadId=my_id', 204)
filename = '/vsis3/s3_fake_bucket4/large_file_completemultipart_403_error.bin'
with webserver.install_http_handler(handler):
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
f = gdal.VSIFOpenL(filename, 'wb')
assert f is not None, filename
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == size, filename
gdal.ErrorReset()
with gdaltest.error_handler():
gdal.VSIFCloseL(f)
assert gdal.GetLastErrorMsg() != '', filename
###############################################################################
# Test multipart upload with retry logic
def test_vsis3_write_multipart_retry():
if gdaltest.webserver_port == 0:
pytest.skip()
with gdaltest.config_options({'GDAL_HTTP_MAX_RETRY': '2',
'GDAL_HTTP_RETRY_DELAY': '0.01'}):
with gdaltest.config_option('VSIS3_CHUNK_SIZE', '1'): # 1 MB
with webserver.install_http_handler(webserver.SequentialHandler()):
f = gdal.VSIFOpenL('/vsis3/s3_fake_bucket4/large_file.bin', 'wb')
assert f is not None
size = 1024 * 1024 + 1
big_buffer = 'a' * size
handler = webserver.SequentialHandler()
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', 502)
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploads', 200,
{'Content-type': 'application/xml',
'Content-Length': len(response),
'Connection': 'close'},
response)
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', 502)
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=1&uploadId=my_id', 200,
{'Content-Length': '0',
'ETag': '"first_etag"',
'Connection': 'close'}, {})
with gdaltest.error_handler():
with webserver.install_http_handler(handler):
ret = gdal.VSIFWriteL(big_buffer, 1, size, f)
assert ret == size
handler = webserver.SequentialHandler()
handler.add('PUT', '/s3_fake_bucket4/large_file.bin?partNumber=2&uploadId=my_id', 200,
{'Content-Length': '0',
'ETag': '"second_etag"',
'Connection': 'close'}, {})
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', 502)
handler.add('POST', '/s3_fake_bucket4/large_file.bin?uploadId=my_id', 200,
{'Content-Length': '0',
'Connection': 'close'}, {})
with gdaltest.error_handler():
with webserver.install_http_handler(handler):
gdal.VSIFCloseL(f)
###############################################################################
# Test Mkdir() / Rmdir()
def test_vsis3_7():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir/', 404, {'Connection': 'close'})
handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir%2F', 404, {'Connection': 'close'})
handler.add('PUT', '/s3_bucket_test_mkdir/dir/', 200)
with webserver.install_http_handler(handler):
ret = gdal.Mkdir('/vsis3/s3_bucket_test_mkdir/dir', 0)
assert ret == 0
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_mkdir/dir').mode)
dir_content = gdal.ReadDir('/vsis3/s3_bucket_test_mkdir/dir')
assert dir_content == ['.']
# Try creating already existing directory
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir/', 416, {'Connection': 'close'})
with webserver.install_http_handler(handler):
ret = gdal.Mkdir('/vsis3/s3_bucket_test_mkdir/dir', 0)
assert ret != 0
handler = webserver.SequentialHandler()
handler.add('DELETE', '/s3_bucket_test_mkdir/dir/', 204)
with webserver.install_http_handler(handler):
ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir')
assert ret == 0
# Try deleting already deleted directory
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir/', 404)
handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir%2F', 404, {'Connection': 'close'})
with webserver.install_http_handler(handler):
ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir')
assert ret != 0
# Try deleting non-empty directory
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_mkdir/dir_nonempty/', 416)
handler.add('GET', '/s3_bucket_test_mkdir/?delimiter=%2F&max-keys=100&prefix=dir_nonempty%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>dir_nonempty/</Prefix>
<Contents>
<Key>dir_nonempty/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
ret = gdal.Rmdir('/vsis3/s3_bucket_test_mkdir/dir_nonempty')
assert ret != 0
# Try stat'ing a directory not ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_dir_stat/test_dir_stat', 400)
handler.add('GET', '/s3_bucket_test_dir_stat/?delimiter=%2F&max-keys=100&prefix=test_dir_stat%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dir_stat/</Prefix>
<Contents>
<Key>test_dir_stat/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_dir_stat/test_dir_stat').mode)
# Try ReadDi'ing a directory not ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_readdir/?delimiter=%2F&prefix=test_dirread%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dirread/</Prefix>
<Contents>
<Key>test_dirread/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.ReadDir('/vsis3/s3_bucket_test_readdir/test_dirread') is not None
# Try stat'ing a directory ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_dir_stat_2/test_dir_stat/', 400)
handler.add('GET', '/s3_bucket_test_dir_stat_2/?delimiter=%2F&max-keys=100&prefix=test_dir_stat%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dir_stat/</Prefix>
<Contents>
<Key>test_dir_stat/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/s3_bucket_test_dir_stat_2/test_dir_stat/').mode)
# Try ReadDi'ing a directory ending with slash
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_bucket_test_readdir2/?delimiter=%2F&prefix=test_dirread%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>test_dirread/</Prefix>
<Contents>
<Key>test_dirread/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.ReadDir('/vsis3/s3_bucket_test_readdir2/test_dirread') is not None
###############################################################################
# Test handling of file and directory with same name
def test_vsis3_8():
if gdaltest.webserver_port == 0:
pytest.skip()
handler = webserver.SequentialHandler()
handler.add('GET', '/vsis3_8/?delimiter=%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
<Key>test</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>40</Size>
</Contents>
<CommonPrefixes>
<Prefix>test/</Prefix>
</CommonPrefixes>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
listdir = gdal.ReadDir('/vsis3/vsis3_8', 0)
assert listdir == ['test', 'test/']
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert not stat.S_ISDIR(gdal.VSIStatL('/vsis3/vsis3_8/test').mode)
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert stat.S_ISDIR(gdal.VSIStatL('/vsis3/vsis3_8/test/').mode)
###############################################################################
# Test vsisync() with SYNC_STRATEGY=ETAG
def test_vsis3_sync_etag():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
options = ['SYNC_STRATEGY=ETAG']
with gdaltest.error_handler():
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert not gdal.Sync('/i_do/not/exist', '/vsis3/', options=options)
with gdaltest.error_handler():
handler = webserver.SequentialHandler()
handler.add('GET', '/do_not/exist', 404)
handler.add('GET', '/do_not/?delimiter=%2F&max-keys=100&prefix=exist%2F', 404)
handler.add('PUT', '/do_not/exist', 404)
with webserver.install_http_handler(handler):
assert not gdal.Sync('vsifile.py', '/vsis3/do_not/exist', options=options)
handler = webserver.SequentialHandler()
handler.add('GET', '/out/', 200)
handler.add('GET', '/out/testsync.txt', 404)
handler.add('GET', '/out/?delimiter=%2F&max-keys=100&prefix=testsync.txt%2F', 404)
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(3).decode('ascii')
if content != 'foo':
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.send_header('ETag', '"acbd18db4cc2f85cedef654fccc4a4d8"')
request.end_headers()
handler.add('PUT', '/out/testsync.txt', custom_method=method)
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo')
def cbk(pct, _, tab):
assert pct > tab[0]
tab[0] = pct
return True
tab = [ 0 ]
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options,
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
# Re-try with cached ETag. Should generate no network access
handler = webserver.SequentialHandler()
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options)
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out/testsync.txt', options=options)
gdal.VSICurlClearCache()
# Other direction: S3 to /vsimem
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options)
# Shouldn't do any copy, but hard to verify
with webserver.install_http_handler(webserver.SequentialHandler()):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options)
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/testsync.txt', options=options)
# Modify target file, and redo synchronization
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'bar')
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/', options=options)
f = gdal.VSIFOpenL('/vsimem/testsync.txt', 'rb')
data = gdal.VSIFReadL(1, 3, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# /vsimem to S3, but after cleaning the cache
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/', 200)
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'ETag' : '"acbd18db4cc2f85cedef654fccc4a4d8"' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/testsync.txt', '/vsis3/out', options=options)
gdal.Unlink('/vsimem/testsync.txt')
# Directory copying
gdal.VSICurlClearCache()
gdal.Mkdir('/vsimem/subdir', 0)
gdal.FileFromMemBuffer('/vsimem/subdir/testsync.txt', 'foo')
handler = webserver.SequentialHandler()
handler.add('GET', '/out/', 200, {},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix/>
<Marker/>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>testsync.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>3</Size>
<ETag>"acbd18db4cc2f85cedef654fccc4a4d8"</ETag>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/subdir/', '/vsis3/out', options=options)
gdal.RmdirRecursive('/vsimem/subdir')
###############################################################################
# Test vsisync() with SYNC_STRATEGY=TIMESTAMP
def test_vsis3_sync_timestamp():
if gdaltest.webserver_port == 0:
pytest.skip()
options = ['SYNC_STRATEGY=TIMESTAMP']
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo')
# S3 to local: S3 file is older -> download
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/',
options=options)
# S3 to local: S3 file is newer -> do nothing
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/',
options=options)
# Local to S3: S3 file is older -> upload
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
handler.add('PUT', '/out/testsync.txt', 200)
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt',
options=options)
# Local to S3: S3 file is newer -> do nothgin
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt',
options=options)
gdal.Unlink('/vsimem/testsync.txt')
###############################################################################
# Test vsisync() with SYNC_STRATEGY=OVERWRITE
def test_vsis3_sync_overwrite():
if gdaltest.webserver_port == 0:
pytest.skip()
options = ['SYNC_STRATEGY=OVERWRITE']
gdal.FileFromMemBuffer('/vsimem/testsync.txt', 'foo')
# S3 to local: S3 file is newer
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/out/testsync.txt', '/vsimem/',
options=options)
# Local to S3: S3 file is newer
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/out/testsync.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 2037 00:00:01 GMT' }, "foo")
handler.add('PUT', '/out/testsync.txt', 200)
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsimem/testsync.txt', '/vsis3/out/testsync.txt',
options=options)
gdal.Unlink('/vsimem/testsync.txt')
###############################################################################
# Test vsisync() with source and target in /vsis3
def test_vsis3_sync_source_target_in_vsis3():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/in/testsync.txt', 200,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
handler.add('GET', '/out/', 200)
handler.add('GET', '/out/testsync.txt', 200,
{ 'Content-Length' : '3',
'Last-Modified': 'Mon, 01 Jan 1970 00:00:01 GMT' }, "foo")
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
if request.headers['x-amz-copy-source'] != '/in/testsync.txt':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/out/testsync.txt', custom_method=method)
with webserver.install_http_handler(handler):
assert gdal.Sync( '/vsis3/in/testsync.txt', '/vsis3/out/')
###############################################################################
# Test rename
def test_vsis3_fake_rename():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test/source.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3' }, "foo")
handler.add('GET', '/test/target.txt', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=target.txt%2F', 200)
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
if request.headers['x-amz-copy-source'] != '/test/source.txt':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test/target.txt', custom_method=method)
handler.add('DELETE', '/test/source.txt', 204)
with webserver.install_http_handler(handler):
assert gdal.Rename( '/vsis3/test/source.txt', '/vsis3/test/target.txt') == 0
###############################################################################
# Test rename
def test_vsis3_fake_rename_dir():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test/source_dir', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=source_dir%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix>source_dir/</Prefix>
<Contents>
<Key>source_dir/test.txt</Key>
<LastModified>1970-01-01T00:00:01.000Z</LastModified>
<Size>3</Size>
</Contents>
</ListBucketResult>
""")
handler.add('GET', '/test/target_dir/', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=target_dir%2F', 404)
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test/target_dir/', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '0':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
if request.headers['x-amz-copy-source'] != '/test/source_dir/test.txt':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test/target_dir/test.txt', custom_method=method)
handler.add('DELETE', '/test/source_dir/test.txt', 204)
handler.add('GET', '/test/source_dir/', 404)
handler.add('GET', '/test/?delimiter=%2F&max-keys=100&prefix=source_dir%2F', 404)
with webserver.install_http_handler(handler):
assert gdal.Rename( '/vsis3/test/source_dir', '/vsis3/test/target_dir') == 0
###############################################################################
# Test rename onto existing dir is not allowed
def test_vsis3_fake_rename_on_existing_dir():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/test/source.txt', 206,
{ 'Content-Length' : '3',
'Content-Range': 'bytes 0-2/3' }, "foo")
handler.add('GET', '/test_target_dir/', 200)
with webserver.install_http_handler(handler):
assert gdal.Rename( '/vsis3/test/source.txt', '/vsis3/test_target_dir') == -1
###############################################################################
# Test Sync() and multithreaded download and CHUNK_SIZE
def test_vsis3_fake_sync_multithreaded_upload_chunk_size():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
def cbk(pct, _, tab):
assert pct >= tab[0]
tab[0] = pct
return True
gdal.Mkdir('/vsimem/test', 0)
gdal.FileFromMemBuffer('/vsimem/test/foo', 'foo\n')
tab = [ -1 ]
handler = webserver.SequentialHandler()
handler.add('GET', '/test_bucket/?prefix=test%2F', 200)
handler.add('GET', '/test_bucket/test', 404)
handler.add('GET', '/test_bucket/?delimiter=%2F&max-keys=100&prefix=test%2F', 200)
handler.add('GET', '/test_bucket/', 200)
handler.add('GET', '/test_bucket/test/', 404)
handler.add('PUT', '/test_bucket/test/', 200)
def method(request):
request.protocol_version = 'HTTP/1.1'
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_bucket/test/foo?uploads', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"first_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_bucket/test/foo?partNumber=1&uploadId=my_id', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '1':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"second_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_bucket/test/foo?partNumber=2&uploadId=my_id', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '186':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
content = request.rfile.read(186).decode('ascii')
if content != """<CompleteMultipartUpload>
<Part>
<PartNumber>1</PartNumber><ETag>"first_etag"</ETag></Part>
<Part>
<PartNumber>2</PartNumber><ETag>"second_etag"</ETag></Part>
</CompleteMultipartUpload>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('POST', '/test_bucket/test/foo?uploadId=my_id', custom_method=method)
with gdaltest.config_option('VSIS3_SIMULATE_THREADING', 'YES'):
with webserver.install_http_handler(handler):
assert gdal.Sync('/vsimem/test',
'/vsis3/test_bucket',
options=['NUM_THREADS=1', 'CHUNK_SIZE=3'],
callback=cbk, callback_data=tab)
assert tab[0] == 1.0
gdal.RmdirRecursive('/vsimem/test')
def test_vsis3_fake_sync_multithreaded_upload_chunk_size_failure():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
gdal.Mkdir('/vsimem/test', 0)
gdal.FileFromMemBuffer('/vsimem/test/foo', 'foo\n')
handler = webserver.SequentialHandler()
handler.add('GET', '/test_bucket/?prefix=test%2F', 200)
handler.add('GET', '/test_bucket/test', 404)
handler.add('GET', '/test_bucket/?delimiter=%2F&max-keys=100&prefix=test%2F', 200)
handler.add('GET', '/test_bucket/', 200)
handler.add('GET', '/test_bucket/test/', 404)
handler.add('PUT', '/test_bucket/test/', 200)
def method(request):
request.protocol_version = 'HTTP/1.1'
response = '<?xml version="1.0" encoding="UTF-8"?><InitiateMultipartUploadResult><UploadId>my_id</UploadId></InitiateMultipartUploadResult>'
request.send_response(200)
request.send_header('Content-type', 'application/xml')
request.send_header('Content-Length', len(response))
request.end_headers()
request.wfile.write(response.encode('ascii'))
handler.add('POST', '/test_bucket/test/foo?uploads', custom_method=method)
def method(request):
if request.headers['Content-Length'] != '3':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('ETag', '"first_etag"')
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_bucket/test/foo?partNumber=1&uploadId=my_id', 400)
handler.add('DELETE', '/test_bucket/test/foo?uploadId=my_id', 204)
with gdaltest.config_options({'VSIS3_SIMULATE_THREADING': 'YES',
'VSIS3_SYNC_MULTITHREADING': 'NO'}):
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
assert not gdal.Sync('/vsimem/test',
'/vsis3/test_bucket',
options=['NUM_THREADS=1', 'CHUNK_SIZE=3'])
gdal.RmdirRecursive('/vsimem/test')
###############################################################################
# Test reading/writing metadata
def test_vsis3_metadata():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
# Read HEADERS domain
handler = webserver.SequentialHandler()
handler.add('GET', '/test_metadata/foo.txt', 200, {'foo': 'bar'})
with webserver.install_http_handler(handler):
md = gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'HEADERS')
assert 'foo' in md and md['foo'] == 'bar'
# Read TAGS domain
handler = webserver.SequentialHandler()
handler.add('GET', '/test_metadata/foo.txt?tagging', 200, {},
"""<Tagging><TagSet><Tag><Key>foo</Key><Value>bar</Value></Tag></TagSet></Tagging>""")
with webserver.install_http_handler(handler):
md = gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'TAGS')
assert 'foo' in md and md['foo'] == 'bar'
# Write HEADERS domain
handler = webserver.SequentialHandler()
def method(request):
if request.headers['foo'] != 'bar':
sys.stderr.write('Did not get expected headers: %s\n' % str(request.headers))
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.end_headers()
handler.add('PUT', '/test_metadata/foo.txt', custom_method=method)
with webserver.install_http_handler(handler):
assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'HEADERS')
# Write TAGS domain
handler = webserver.SequentialHandler()
def method(request):
request.wfile.write('HTTP/1.1 100 Continue\r\n\r\n'.encode('ascii'))
content = request.rfile.read(int(request.headers['Content-Length'])).decode('ascii')
if content != """<?xml version="1.0" encoding="UTF-8"?>
<Tagging xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<TagSet>
<Tag>
<Key>foo</Key>
<Value>bar</Value>
</Tag>
</TagSet>
</Tagging>
""":
sys.stderr.write('Did not get expected content: %s\n' % content)
request.send_response(400)
request.send_header('Content-Length', 0)
request.end_headers()
return
request.send_response(200)
request.send_header('Content-Length', 0)
request.end_headers()
handler.add('PUT', '/test_metadata/foo.txt?tagging', custom_method=method)
with webserver.install_http_handler(handler):
assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {'foo': 'bar'}, 'TAGS')
# Write TAGS domain (wiping tags)
handler = webserver.SequentialHandler()
handler.add('DELETE', '/test_metadata/foo.txt?tagging', 204)
with webserver.install_http_handler(handler):
assert gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {}, 'TAGS')
# Error case
with gdaltest.error_handler():
assert gdal.GetFileMetadata('/vsis3/test_metadata/foo.txt', 'UNSUPPORTED') == {}
# Error case
with gdaltest.error_handler():
assert not gdal.SetFileMetadata('/vsis3/test_metadata/foo.txt', {}, 'UNSUPPORTED')
###############################################################################
# Test that we take into account directory listing to avoid useless
# requests
def test_vsis3_no_useless_requests():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/no_useless_requests/?delimiter=%2F', 200,
{'Content-type': 'application/xml'},
"""<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult>
<Prefix></Prefix>
<Contents>
</Contents>
</ListBucketResult>
""")
with webserver.install_http_handler(handler):
assert gdal.VSIFOpenL('/vsis3/no_useless_requests/foo.txt', 'rb') is None
assert gdal.VSIFOpenL('/vsis3/no_useless_requests/bar.txt', 'rb') is None
assert gdal.VSIStatL('/vsis3/no_useless_requests/baz.txt') is None
###############################################################################
# Test w+ access
def test_vsis3_random_write():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
with gdaltest.error_handler():
assert gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b') is None
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b')
assert f
assert gdal.VSIFWriteL('foo', 3, 1, f) == 1
assert gdal.VSIFSeekL(f, 0, 0) == 0
assert gdal.VSIFReadL(3, 1, f).decode('ascii') == 'foo'
assert gdal.VSIFEofL(f) == 0
assert gdal.VSIFTellL(f) == 3
handler = webserver.SequentialHandler()
handler.add('PUT', '/random_write/test.bin', 200, {}, expected_body=b'foo')
with webserver.install_http_handler(handler):
assert gdal.VSIFCloseL(f) == 0
###############################################################################
# Test w+ access
def test_vsis3_random_write_failure_1():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b')
assert f
handler = webserver.SequentialHandler()
handler.add('PUT', '/random_write/test.bin', 400, {})
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
assert gdal.VSIFCloseL(f) != 0
###############################################################################
# Test w+ access
def test_vsis3_random_write_failure_2():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
with gdaltest.config_option('VSIS3_CHUNK_SIZE_BYTES', '1'):
f = gdal.VSIFOpenL('/vsis3/random_write/test.bin', 'w+b')
assert f
assert gdal.VSIFWriteL('foo', 3, 1, f) == 1
handler = webserver.SequentialHandler()
handler.add('POST', '/random_write/test.bin?uploads', 400, {})
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
assert gdal.VSIFCloseL(f) != 0
###############################################################################
# Test w+ access
def test_vsis3_random_write_gtiff_create_copy():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('GET', '/random_write/test.tif', 404, {})
handler.add('GET', '/random_write/?delimiter=%2F&max-keys=100&prefix=test.tif%2F', 404, {})
handler.add('GET', '/random_write/?delimiter=%2F', 404, {})
src_ds = gdal.Open('data/byte.tif')
with gdaltest.config_option('CPL_VSIL_USE_TEMP_FILE_FOR_RANDOM_WRITE', 'YES'):
with webserver.install_http_handler(handler):
ds = gdal.GetDriverByName('GTiff').CreateCopy('/vsis3/random_write/test.tif', src_ds)
assert ds is not None
handler = webserver.SequentialHandler()
handler.add('PUT', '/random_write/test.tif', 200, {})
with webserver.install_http_handler(handler):
ds = None
###############################################################################
# Read credentials from simulated ~/.aws/credentials
def test_vsis3_read_credentials_file():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_credentials', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.Unlink('/vsimem/aws_credentials')
###############################################################################
# Read credentials from simulated ~/.aws/config
def test_vsis3_read_config_file():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_config', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
region = us-east-1
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.Unlink('/vsimem/aws_config')
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config
def test_vsis3_read_credentials_config_file():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_credentials', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
gdal.FileFromMemBuffer('/vsimem/aws_config', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
region = us-east-1
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.Unlink('/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.Unlink('/vsimem/aws_config')
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config with
# a non default profile
def test_vsis3_read_credentials_config_file_non_default_profile(tmpdir):
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', None)
gdal.SetConfigOption('AWS_CONFIG_FILE', None)
gdal.SetConfigOption('AWS_PROFILE', 'myprofile')
os_aws = tmpdir.mkdir(".aws")
gdal.VSICurlClearCache()
os_aws.join('credentials').write("""
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[myprofile]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[default]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
os_aws.join('config').write("""
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[profile myprofile]
region = us-east-1
[default]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
with gdaltest.config_option(
'USERPROFILE' if sys.platform == 'win32' else 'HOME', str(tmpdir)
):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('AWS_PROFILE', '')
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config
def test_vsis3_read_credentials_config_file_inconsistent():
if gdaltest.webserver_port == 0:
pytest.skip()
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '/vsimem/aws_config')
gdal.VSICurlClearCache()
gdal.FileFromMemBuffer('/vsimem/aws_credentials', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID
aws_secret_access_key = AWS_SECRET_ACCESS_KEY
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
gdal.FileFromMemBuffer('/vsimem/aws_config', """
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
[default]
aws_access_key_id = AWS_ACCESS_KEY_ID_inconsistent
aws_secret_access_key = AWS_SECRET_ACCESS_KEY_inconsistent
region = us-east-1
[unrelated]
aws_access_key_id = foo
aws_secret_access_key = bar
""")
gdal.ErrorReset()
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
assert gdal.GetLastErrorMsg() != ''
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.Unlink('/vsimem/aws_credentials')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.Unlink('/vsimem/aws_config')
###############################################################################
# Read credentials from simulated EC2 instance
def test_vsis3_read_credentials_ec2_imdsv2():
if gdaltest.webserver_port == 0:
pytest.skip()
if sys.platform not in ('linux', 'linux2', 'win32'):
pytest.skip()
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d' % gdaltest.webserver_port)
# Disable hypervisor related check to test if we are really on EC2
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO')
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken',
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile',
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "3000-01-01T00:00:00Z"
}""",
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Set a fake URL to check that credentials re-use works
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
handler = webserver.SequentialHandler()
handler.add('GET', '/s3_fake_bucket/bar', 200, {}, 'bar')
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/bar')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'bar'
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None)
###############################################################################
# Read credentials from simulated EC2 instance that only supports IMDSv1
def test_vsis3_read_credentials_ec2_imdsv1():
if gdaltest.webserver_port == 0:
pytest.skip()
if sys.platform not in ('linux', 'linux2', 'win32'):
pytest.skip()
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d' % gdaltest.webserver_port)
# Disable hypervisor related check to test if we are really on EC2
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO')
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('PUT', '/latest/api/token', 403, {},
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile',
unexpected_headers=['X-aws-ec2-metadata-token'])
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "3000-01-01T00:00:00Z"
}""",
unexpected_headers=['X-aws-ec2-metadata-token'])
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None)
###############################################################################
# Read credentials from simulated EC2 instance with expiration of the
# cached credentials
def test_vsis3_read_credentials_ec2_expiration():
if gdaltest.webserver_port == 0:
pytest.skip()
if sys.platform not in ('linux', 'linux2', 'win32'):
pytest.skip()
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', '')
gdal.SetConfigOption('AWS_CONFIG_FILE', '')
gdal.SetConfigOption('AWS_SECRET_ACCESS_KEY', '')
gdal.SetConfigOption('AWS_ACCESS_KEY_ID', '')
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d' % gdaltest.webserver_port)
# Disable hypervisor related check to test if we are really on EC2
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', 'NO')
gdal.VSICurlClearCache()
handler = webserver.SequentialHandler()
handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken',
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/', 200, {}, 'myprofile',
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "1970-01-01T00:00:00Z"
}""",
expected_headers={'X-aws-ec2-metadata-token': 'mytoken'})
handler.add('PUT', '/latest/api/token', 200, {}, 'mytoken2',
expected_headers={'X-aws-ec2-metadata-token-ttl-seconds': '10'})
handler.add('GET', '/latest/meta-data/iam/security-credentials/myprofile', 200, {},
"""{
"AccessKeyId": "AWS_ACCESS_KEY_ID",
"SecretAccessKey": "AWS_SECRET_ACCESS_KEY",
"Expiration": "1970-01-01T00:00:00Z"
}""",
expected_headers={'X-aws-ec2-metadata-token': 'mytoken2'})
handler.add('GET', '/s3_fake_bucket/resource', custom_method=get_s3_fake_bucket_resource_method)
with webserver.install_http_handler(handler):
f = open_for_read('/vsis3/s3_fake_bucket/resource')
assert f is not None
data = gdal.VSIFReadL(1, 4, f).decode('ascii')
gdal.VSIFCloseL(f)
assert data == 'foo'
# Set a fake URL to demonstrate we try to re-fetch credentials
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL',
'http://localhost:%d/invalid' % gdaltest.webserver_port)
handler = webserver.SequentialHandler()
handler.add('PUT', '/invalid/latest/api/token', 404)
handler.add('GET', '/invalid/latest/meta-data/iam/security-credentials/myprofile', 404)
with webserver.install_http_handler(handler):
with gdaltest.error_handler():
f = open_for_read('/vsis3/s3_fake_bucket/bar')
assert f is None
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', '')
gdal.SetConfigOption('CPL_AWS_AUTODETECT_EC2', None)
###############################################################################
def test_vsis3_stop_webserver():
if gdaltest.webserver_port == 0:
pytest.skip()
# Clearcache needed to close all connections, since the Python server
# can only handle one connection at a time
gdal.VSICurlClearCache()
webserver.server_stop(gdaltest.webserver_process, gdaltest.webserver_port)
###############################################################################
# Nominal cases (require valid credentials)
def test_vsis3_extra_1():
if not gdaltest.built_against_curl():
pytest.skip()
credentials_filename = gdal.GetConfigOption('HOME',
gdal.GetConfigOption('USERPROFILE', '')) + '/.aws/credentials'
# Either a bucket name or bucket/filename
s3_resource = gdal.GetConfigOption('S3_RESOURCE')
if not os.path.exists(credentials_filename):
if gdal.GetConfigOption('AWS_SECRET_ACCESS_KEY') is None:
pytest.skip('Missing AWS_SECRET_ACCESS_KEY')
elif gdal.GetConfigOption('AWS_ACCESS_KEY_ID') is None:
pytest.skip('Missing AWS_ACCESS_KEY_ID')
if s3_resource is None:
pytest.skip('Missing S3_RESOURCE')
if '/' not in s3_resource:
path = '/vsis3/' + s3_resource
statres = gdal.VSIStatL(path)
assert statres is not None and stat.S_ISDIR(statres.mode), \
('%s is not a valid bucket' % path)
readdir = gdal.ReadDir(path)
assert readdir is not None, 'ReadDir() should not return empty list'
for filename in readdir:
if filename != '.':
subpath = path + '/' + filename
assert gdal.VSIStatL(subpath) is not None, \
('Stat(%s) should not return an error' % subpath)
unique_id = 'vsis3_test'
subpath = path + '/' + unique_id
ret = gdal.Mkdir(subpath, 0)
assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath)
readdir = gdal.ReadDir(path)
assert unique_id in readdir, \
('ReadDir(%s) should contain %s' % (path, unique_id))
ret = gdal.Mkdir(subpath, 0)
assert ret != 0, ('Mkdir(%s) repeated should return an error' % subpath)
ret = gdal.Rmdir(subpath)
assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath)
readdir = gdal.ReadDir(path)
assert unique_id not in readdir, \
('ReadDir(%s) should not contain %s' % (path, unique_id))
ret = gdal.Rmdir(subpath)
assert ret != 0, ('Rmdir(%s) repeated should return an error' % subpath)
ret = gdal.Mkdir(subpath, 0)
assert ret >= 0, ('Mkdir(%s) should not return an error' % subpath)
f = gdal.VSIFOpenL(subpath + '/test.txt', 'wb')
assert f is not None
gdal.VSIFWriteL('hello', 1, 5, f)
gdal.VSIFCloseL(f)
ret = gdal.Rmdir(subpath)
assert ret != 0, \
('Rmdir(%s) on non empty directory should return an error' % subpath)
f = gdal.VSIFOpenL(subpath + '/test.txt', 'rb')
assert f is not None
data = gdal.VSIFReadL(1, 5, f).decode('utf-8')
assert data == 'hello'
gdal.VSIFCloseL(f)
assert gdal.Rename(subpath + '/test.txt', subpath + '/test2.txt') == 0
f = gdal.VSIFOpenL(subpath + '/test2.txt', 'rb')
assert f is not None
data = gdal.VSIFReadL(1, 5, f).decode('utf-8')
assert data == 'hello'
gdal.VSIFCloseL(f)
ret = gdal.Unlink(subpath + '/test2.txt')
assert ret >= 0, \
('Unlink(%s) should not return an error' % (subpath + '/test2.txt'))
ret = gdal.Rmdir(subpath)
assert ret >= 0, ('Rmdir(%s) should not return an error' % subpath)
return
f = open_for_read('/vsis3/' + s3_resource)
assert f is not None, ('cannot open %s' % ('/vsis3/' + s3_resource))
ret = gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert len(ret) == 1
# Same with /vsis3_streaming/
f = open_for_read('/vsis3_streaming/' + s3_resource)
assert f is not None
ret = gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert len(ret) == 1
if False: # pylint: disable=using-constant-test
# we actually try to read at read() time and bSetError = false
# Invalid bucket : "The specified bucket does not exist"
gdal.ErrorReset()
f = open_for_read('/vsis3/not_existing_bucket/foo')
with gdaltest.error_handler():
gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert gdal.VSIGetLastErrorMsg() != ''
# Invalid resource
gdal.ErrorReset()
f = open_for_read('/vsis3_streaming/' + gdal.GetConfigOption('S3_RESOURCE') + '/invalid_resource.baz')
assert f is None, gdal.VSIGetLastErrorMsg()
# Test GetSignedURL()
signed_url = gdal.GetSignedURL('/vsis3/' + s3_resource)
f = open_for_read('/vsicurl_streaming/' + signed_url)
assert f is not None
ret = gdal.VSIFReadL(1, 1, f)
gdal.VSIFCloseL(f)
assert len(ret) == 1
###############################################################################
def test_vsis3_cleanup():
for var in gdaltest.aws_vars:
gdal.SetConfigOption(var, gdaltest.aws_vars[var])
gdal.SetConfigOption('CPL_AWS_CREDENTIALS_FILE', None)
gdal.SetConfigOption('AWS_CONFIG_FILE', None)
gdal.SetConfigOption('CPL_AWS_EC2_API_ROOT_URL', None)
| 40.974366
| 335
| 0.610749
| 16,318
| 142,263
| 5.142971
| 0.04351
| 0.025261
| 0.020447
| 0.035461
| 0.893867
| 0.873872
| 0.856726
| 0.826603
| 0.807776
| 0.782837
| 0
| 0.038421
| 0.23214
| 142,263
| 3,471
| 336
| 40.986171
| 0.729838
| 0.044319
| 0
| 0.765104
| 0
| 0.014787
| 0.315852
| 0.162256
| 0
| 0
| 0
| 0
| 0.107309
| 1
| 0.038445
| false
| 0
| 0.00338
| 0
| 0.064639
| 0.002112
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a3e99f0293f853274afed91ad78cb5b4d2be8d4
| 8,379
|
py
|
Python
|
cadnano25/cadnano/part/xovercmds.py
|
amylittleyang/OtraCAD
|
126360719704caf6850d42565fe96be53b66a22d
|
[
"MIT"
] | null | null | null |
cadnano25/cadnano/part/xovercmds.py
|
amylittleyang/OtraCAD
|
126360719704caf6850d42565fe96be53b66a22d
|
[
"MIT"
] | null | null | null |
cadnano25/cadnano/part/xovercmds.py
|
amylittleyang/OtraCAD
|
126360719704caf6850d42565fe96be53b66a22d
|
[
"MIT"
] | null | null | null |
from cadnano.cnproxy import UndoCommand
from cadnano.strand import Strand
from cadnano import getBatch
import cadnano.preferences as prefs
import random
class CreateXoverCommand(UndoCommand):
"""
Creates a Xover from the 3' end of strand5p to the 5' end of strand3p
this needs to
1. preserve the old oligo of strand3p
2. install the crossover
3. apply the strand5p oligo to the strand3p
"""
def __init__(self, part, strand5p, strand5p_idx, strand3p, strand3p_idx, update_oligo=True):
super(CreateXoverCommand, self).__init__("create xover")
self._part = part
self._strand5p = strand5p
self._strand5p_idx = strand5p_idx
self._strand3p = strand3p
self._strand3p_idx = strand3p_idx
self._old_oligo3p = strand3p.oligo()
self._update_oligo = update_oligo
# end def
def redo(self):
part = self._part
strand5p = self._strand5p
strand5p_idx = self._strand5p_idx
strand3p = self._strand3p
strand3p_idx = self._strand3p_idx
olg5p = strand5p.oligo()
old_olg3p = self._old_oligo3p
# 0. Deselect the involved strands
doc = strand5p.document()
doc.removeStrandFromSelection(strand5p)
doc.removeStrandFromSelection(strand3p)
if self._update_oligo:
# Test for Loopiness
if olg5p == strand3p.oligo():
olg5p.setLoop(True)
else:
# 1. update preserved oligo length
olg5p.incrementLength(old_olg3p.length())
# 2. Remove the old oligo and apply the 5' oligo to the 3' strand
old_olg3p.removeFromPart()
for strand in strand3p.generator3pStrand():
# emits strandHasNewOligoSignal
Strand.setOligo(strand, olg5p)
# 3. install the Xover
strand5p.setConnection3p(strand3p)
strand3p.setConnection5p(strand5p)
#print('strand5p = %s, connection3p = %s'%(strand5p._name, strand3p._name))
ss5 = strand5p.strandSet()
vh5p = ss5.virtualHelix()
st5p = ss5.strandType()
ss3 = strand3p.strandSet()
vh3p = ss3.virtualHelix()
st3p = ss3.strandType()
part.partActiveVirtualHelixChangedSignal.emit(part, vh5p)
# strand5p.strandXover5pChangedSignal.emit(strand5p, strand3p)
# if self._update_oligo and not getBatch():
if self._update_oligo:
strand5p.strandUpdateSignal.emit(strand5p)
strand3p.strandUpdateSignal.emit(strand3p)
# end def
def undo(self):
part = self._part
strand5p = self._strand5p
strand5p_idx = self._strand5p_idx
strand3p = self._strand3p
strand3p_idx = self._strand3p_idx
old_olg3p = self._old_oligo3p
olg5p = strand5p.oligo()
# 0. Deselect the involved strands
doc = strand5p.document()
doc.removeStrandFromSelection(strand5p)
doc.removeStrandFromSelection(strand3p)
# 1. uninstall the Xover
strand5p.setConnection3p(None)
strand3p.setConnection5p(None)
if self._update_oligo:
# Test Loopiness
if old_olg3p.isLoop():
old_olg3p.setLoop(False)
else:
# 2. restore the modified oligo length
olg5p.decrementLength(old_olg3p.length())
# 3. apply the old oligo to strand3p
old_olg3p.addToPart(part)
for strand in strand3p.generator3pStrand():
# emits strandHasNewOligoSignal
Strand.setOligo(strand, old_olg3p)
ss5 = strand5p.strandSet()
vh5p = ss5.virtualHelix()
st5p = ss5.strandType()
ss3 = strand3p.strandSet()
vh3p = ss3.virtualHelix()
st3p = ss3.strandType()
part.partActiveVirtualHelixChangedSignal.emit(part, vh5p)
# strand5p.strandXover5pChangedSignal.emit(strand5p, strand3p)
if self._update_oligo:
strand5p.strandUpdateSignal.emit(strand5p)
strand3p.strandUpdateSignal.emit(strand3p)
# end def
# end class
class RemoveXoverCommand(UndoCommand):
"""
Removes a Xover from the 3' end of strand5p to the 5' end of strand3p
this needs to
1. preserve the old oligo of strand3p
2. install the crossover
3. update the oligo length
4. apply the new strand3p oligo to the strand3p
"""
def __init__(self, part, strand5p, strand3p):
super(RemoveXoverCommand, self).__init__("remove xover")
self._part = part
self._strand5p = strand5p
self._strand5p_idx = strand5p.idx3Prime()
self._strand3p = strand3p
self._strand3p_idx = strand3p.idx5Prime()
n_o3p = self._new_oligo3p = strand3p.oligo().shallowCopy()
colorList = prefs.STAP_COLORS if strand5p.strandSet().isStaple() \
else prefs.SCAF_COLORS
n_o3p.setColor(random.choice(colorList).name())
n_o3p.setLength(0)
for strand in strand3p.generator3pStrand():
n_o3p.incrementLength(strand.totalLength())
# end def
n_o3p.setStrand5p(strand3p)
self._isLoop = strand3p.oligo().isLoop()
# end def
def redo(self):
part = self._part
strand5p = self._strand5p
strand5p_idx = self._strand5p_idx
strand3p = self._strand3p
strand3p_idx = self._strand3p_idx
new_olg3p = self._new_oligo3p
olg5p = self._strand5p.oligo()
# 0. Deselect the involved strands
doc = strand5p.document()
doc.removeStrandFromSelection(strand5p)
doc.removeStrandFromSelection(strand3p)
# 1. uninstall the Xover
strand5p.setConnection3p(None)
strand3p.setConnection5p(None)
if self._isLoop:
olg5p.setLoop(False)
olg5p.setStrand5p(strand3p)
else:
# 2. restore the modified oligo length
olg5p.decrementLength(new_olg3p.length())
# 3. apply the old oligo to strand3p
new_olg3p.addToPart(part)
for strand in strand3p.generator3pStrand():
# emits strandHasNewOligoSignal
Strand.setOligo(strand, new_olg3p)
ss5 = strand5p.strandSet()
vh5p = ss5.virtualHelix()
st5p = ss5.strandType()
ss3 = strand3p.strandSet()
vh3p = ss3.virtualHelix()
st3p = ss3.strandType()
part.partActiveVirtualHelixChangedSignal.emit(part, vh5p)
# strand5p.strandXover5pChangedSignal.emit(strand5p, strand3p)
strand5p.strandUpdateSignal.emit(strand5p)
strand3p.strandUpdateSignal.emit(strand3p)
# end def
def undo(self):
part = self._part
strand5p = self._strand5p
strand5p_idx = self._strand5p_idx
strand3p = self._strand3p
strand3p_idx = self._strand3p_idx
olg5p = strand5p.oligo()
new_olg3p = self._new_oligo3p
# 0. Deselect the involved strands
doc = strand5p.document()
doc.removeStrandFromSelection(strand5p)
doc.removeStrandFromSelection(strand3p)
if self._isLoop:
olg5p.setLoop(True)
# No need to restore whatever the old Oligo._strand5p was
else:
# 1. update preserved oligo length
olg5p.incrementLength(new_olg3p.length())
# 2. Remove the old oligo and apply the 5' oligo to the 3' strand
new_olg3p.removeFromPart()
for strand in strand3p.generator3pStrand():
# emits strandHasNewOligoSignal
Strand.setOligo(strand, olg5p)
# end else
# 3. install the Xover
strand5p.setConnection3p(strand3p)
strand3p.setConnection5p(strand5p)
ss5 = strand5p.strandSet()
vh5p = ss5.virtualHelix()
st5p = ss5.strandType()
ss3 = strand3p.strandSet()
vh3p = ss3.virtualHelix()
st3p = ss3.strandType()
part.partActiveVirtualHelixChangedSignal.emit(part, vh5p)
# strand5p.strandXover5pChangedSignal.emit(strand5p, strand3p)
strand5p.strandUpdateSignal.emit(strand5p)
strand3p.strandUpdateSignal.emit(strand3p)
# end def
# end class
| 35.807692
| 96
| 0.628118
| 842
| 8,379
| 6.10095
| 0.146081
| 0.030368
| 0.031147
| 0.016547
| 0.80183
| 0.767569
| 0.767569
| 0.750827
| 0.730971
| 0.67919
| 0
| 0.048401
| 0.294785
| 8,379
| 234
| 97
| 35.807692
| 0.820951
| 0.194057
| 0
| 0.732026
| 0
| 0
| 0.00361
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039216
| false
| 0
| 0.03268
| 0
| 0.084967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a5251a1a46d1a18949b3dd2b2c26f7e8e32aa06
| 7,795
|
py
|
Python
|
readgadget/modules/rs_structs.py
|
danielmarostica/pygadgetreader
|
977949da7fcb6585f3e0270019d369c6967b317c
|
[
"BSD-3-Clause"
] | 6
|
2020-09-02T21:11:59.000Z
|
2021-09-24T16:12:44.000Z
|
readgadget/modules/rs_structs.py
|
danielmarostica/pygadgetreader
|
977949da7fcb6585f3e0270019d369c6967b317c
|
[
"BSD-3-Clause"
] | 1
|
2021-09-24T14:40:03.000Z
|
2021-09-25T20:07:13.000Z
|
readgadget/modules/rs_structs.py
|
danielmarostica/pygadgetreader
|
977949da7fcb6585f3e0270019d369c6967b317c
|
[
"BSD-3-Clause"
] | 1
|
2020-11-18T19:15:39.000Z
|
2020-11-18T19:15:39.000Z
|
import numpy as np
import sys
## ROCKSTAR ##
halostruct1 = np.dtype([('id',np.int64),
('pos',np.float32,(6,)),
('corevel',np.float32,(3,)),
('bulkvel',np.float32,(3,)),
('m',np.float32),
('r',np.float32),
('child_r',np.float32),
('vmax_r',np.float32),
('mgrav',np.float32),
('vmax',np.float32),
('rvmax',np.float32),
('rs',np.float32),
('klypin_rs',np.float32),
('vrms',np.float32),
('J',np.float32,(3,)),
('energy',np.float32),
('spin',np.float32),
('alt_m',np.float32,(4,)),
('Xoff',np.float32),
('Voff',np.float32),
('b_to_a',np.float32),
('c_to_a',np.float32),
('A',np.float32,(3,)),
('b_to_a2',np.float32),
('c_to_a2',np.float32),
('A2',np.float32,(3,)),
('bullock_spin',np.float32),
('kin_to_pot',np.float32),
('m_pe_b',np.float32),
('m_pe_d',np.float32),
('dummy1',np.float32), ## ALIGNMENT
('num_p',np.int64),
('num_child_particles',np.int64),
('p_start',np.int64),
('desc',np.int64),
('flags',np.int64),
('n_core',np.int64),
('dummy2',np.float32), ## ALIGNMENT
('min_pos_err',np.float32),
('min_vel_err',np.float32),
('min_bulkvel_err',np.float32)
])
halostruct2 = np.dtype([('id',np.int64),
('pos',np.float32,(6,)),
('corevel',np.float32,(3,)),
('bulkvel',np.float32,(3,)),
('m',np.float32),
('r',np.float32),
('child_r',np.float32),
('vmax_r',np.float32),
('mgrav',np.float32),
('vmax',np.float32),
('rvmax',np.float32),
('rs',np.float32),
('klypin_rs',np.float32),
('vrms',np.float32),
('J',np.float32,(3,)),
('energy',np.float32),
('spin',np.float32),
('alt_m',np.float32,(4,)),
('Xoff',np.float32),
('Voff',np.float32),
('b_to_a',np.float32),
('c_to_a',np.float32),
('A',np.float32,(3,)),
('b_to_a2',np.float32),
('c_to_a2',np.float32),
('A2',np.float32,(3,)),
('bullock_spin',np.float32),
('kin_to_pot',np.float32),
('m_pe_b',np.float32),
('m_pe_d',np.float32),
('halfmass_radius',np.float32),
#('dummy1',np.float32), ## ALIGNMENT
('num_p',np.int64),
('num_child_particles',np.int64),
('p_start',np.int64),
('desc',np.int64),
('flags',np.int64),
('n_core',np.int64),
('dummy2',np.float32), ## ALIGNMENT
('min_pos_err',np.float32),
('min_vel_err',np.float32),
('min_bulkvel_err',np.float32)
])
## ROCKSTAR-GALAXIES ##
halogalaxystruct1 = np.dtype([('id',np.int64),
('pos',np.float32,(6,)),
('corevel',np.float32,(3,)),
('bulkvel',np.float32,(3,)),
('m',np.float32),
('r',np.float32),
('child_r',np.float32),
('vmax_r',np.float32),
('mgrav',np.float32),
('vmax',np.float32),
('rvmax',np.float32),
('rs',np.float32),
('klypin_rs',np.float32),
('vrms',np.float32),
('J',np.float32,(3,)),
('energy',np.float32),
('spin',np.float32),
('alt_m',np.float32,(4,)),
('Xoff',np.float32),
('Voff',np.float32),
('b_to_a',np.float32),
('c_to_a',np.float32),
('A',np.float32,(3,)),
('b_to_a2',np.float32),
('c_to_a2',np.float32),
('A2',np.float32,(3,)),
('bullock_spin',np.float32),
('kin_to_pot',np.float32),
('m_pe_b',np.float32),
('m_pe_d',np.float32),
('dummy1',np.float32), ## ALIGNMENT
('num_p',np.int64),
('num_child_particles',np.int64),
('p_start',np.int64),
('desc',np.int64),
('flags',np.int64),
('n_core',np.int64),
('dummy2',np.float32), ## ALIGNMENT
('min_pos_err',np.float32),
('min_vel_err',np.float32),
('min_bulkvel_err',np.float32),
('type',np.int32),
('sm',np.float32),
('gas',np.float32),
('bh',np.float32),
('peak_density',np.float32),
('av_density',np.float32),
])
def getRSformat(obj):
if obj.galaxies == 0:
if obj.format_revision == 0:
print('OUTDATED ROCKSTAR, PLEASE UPDATE!')
sys.exit()
elif obj.format_revision == 1:
if obj.debug: print('returning halostruct1')
return halostruct1
elif obj.format_revision == 2:
if obj.debug: print('returning halostruct2')
return halostruct2
else:
print('found HALO_FORMAT_REVISION=%d, if this is >2 email me!' %
obj.format_revision)
sys.exit()
elif obj.galaxies == 1:
if obj.format_revision == 0:
print('OUTDATED ROCKSTAR-GALAXIES, PLEASE UPDATE!')
sys.exit()
elif obj.format_revision == 1:
if obj.debug: print('returning halogalaxystruct1')
return halogalaxystruct1
else:
print('found HALO_FORMAT_REVISION=%d, if this is >1 email me!' %
obj.format_revision)
sys.exit()
| 44.289773
| 78
| 0.340988
| 639
| 7,795
| 3.99687
| 0.147105
| 0.380579
| 0.058731
| 0.028191
| 0.859436
| 0.850039
| 0.850039
| 0.825764
| 0.793657
| 0.793657
| 0
| 0.082893
| 0.514047
| 7,795
| 175
| 79
| 44.542857
| 0.591341
| 0.015908
| 0
| 0.83125
| 0
| 0
| 0.134527
| 0.006275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00625
| false
| 0
| 0.0125
| 0
| 0.0375
| 0.04375
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8a56cef3b3f2ca89cec426bc77ad6809415c305d
| 1,327
|
bzl
|
Python
|
python/library.bzl
|
robfig/rules_proto
|
6a85b0e4c3eeddf8863890ef48f2daab7a524ab7
|
[
"Apache-2.0"
] | null | null | null |
python/library.bzl
|
robfig/rules_proto
|
6a85b0e4c3eeddf8863890ef48f2daab7a524ab7
|
[
"Apache-2.0"
] | null | null | null |
python/library.bzl
|
robfig/rules_proto
|
6a85b0e4c3eeddf8863890ef48f2daab7a524ab7
|
[
"Apache-2.0"
] | null | null | null |
load("//python:compile.bzl", "py_proto_compile", "py_grpc_compile")
load("@grpc_py_deps//:requirements.bzl", "all_requirements")
def py_proto_library(**kwargs):
name = kwargs.get("name")
deps = kwargs.get("deps")
verbose = kwargs.get("verbose")
visibility = kwargs.get("visibility")
name_pb = name + "_pb"
py_proto_compile(
name = name_pb,
deps = deps,
visibility = visibility,
verbose = verbose,
)
native.py_library(
name = name,
srcs = [name_pb],
deps = all_requirements, # fixme don't need grpc here
# This magically adds REPOSITORY_NAME/PACKAGE_NAME/{name_pb} to PYTHONPATH
imports = [name_pb],
visibility = visibility,
)
def py_grpc_library(**kwargs):
name = kwargs.get("name")
deps = kwargs.get("deps")
verbose = kwargs.get("verbose")
visibility = kwargs.get("visibility")
name_pb = name + "_pb"
py_grpc_compile(
name = name_pb,
deps = deps,
visibility = visibility,
verbose = verbose,
)
native.py_library(
name = name,
srcs = [name_pb],
deps = all_requirements,
# This magically adds REPOSITORY_NAME/PACKAGE_NAME/{name_pb} to PYTHONPATH
imports = [name_pb],
visibility = visibility,
)
| 28.847826
| 82
| 0.609646
| 152
| 1,327
| 5.092105
| 0.217105
| 0.093023
| 0.05168
| 0.059432
| 0.819121
| 0.819121
| 0.819121
| 0.819121
| 0.819121
| 0.819121
| 0
| 0
| 0.271289
| 1,327
| 46
| 83
| 28.847826
| 0.800414
| 0.129616
| 0
| 0.75
| 0
| 0
| 0.134549
| 0.027778
| 0
| 0
| 0
| 0.021739
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a5d63158988a4154bd4df2b897b694d5cad31f9
| 46,478
|
py
|
Python
|
alembic/versions/1d092815507a_add_huawei_2g_managedobjects.py
|
bodastage/bts-database
|
96df7915621dd46daf55016eedf5cfc84dd0e3a2
|
[
"Apache-2.0"
] | 1
|
2019-08-30T01:20:14.000Z
|
2019-08-30T01:20:14.000Z
|
alembic/versions/1d092815507a_add_huawei_2g_managedobjects.py
|
bodastage/bts-database
|
96df7915621dd46daf55016eedf5cfc84dd0e3a2
|
[
"Apache-2.0"
] | 1
|
2018-05-30T09:29:24.000Z
|
2018-05-30T10:04:37.000Z
|
alembic/versions/1d092815507a_add_huawei_2g_managedobjects.py
|
bodastage/bts-database
|
96df7915621dd46daf55016eedf5cfc84dd0e3a2
|
[
"Apache-2.0"
] | 3
|
2018-03-10T23:29:30.000Z
|
2019-02-19T22:11:09.000Z
|
"""Add Huawei 2G managedobjects
Revision ID: 1d092815507a
Revises: 3fa514f1b7a9
Create Date: 2018-02-13 01:38:59.965000
"""
from alembic import op
import sqlalchemy as sa
import datetime
# revision identifiers, used by Alembic.
revision = '1d092815507a'
down_revision = '3fa514f1b7a9'
branch_labels = None
depends_on = None
def upgrade():
managedobjects = sa.sql.table(
'managedobjects',
sa.Column('pk', sa.Integer, sa.Sequence('seq_managedobjects_pk', ), primary_key=True, nullable=False),
sa.Column('name', sa.String(50), nullable=False),
sa.Column('notes', sa.Text),
sa.Column('label', sa.String(200)),
sa.Column('parent_pk', sa.Integer),
sa.Column('affect_level', sa.Integer),
sa.Column('tech_pk', sa.Integer),
sa.Column('vendor_pk', sa.Integer),
sa.Column('modified_by', sa.Integer),
sa.Column('added_by', sa.Integer),
sa.Column('date_added', sa.TIMESTAMP, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow),
sa.Column('date_modified', sa.TIMESTAMP, default=datetime.datetime.utcnow)
)
op.bulk_insert(managedobjects, [
{'name': 'BTSCABINET', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'AITFOTHPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'AITFREV', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALGCTRLPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMBLKPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMBLKSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMCAPACITY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMLVL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMML', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMOSCISW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMOSCITHRD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMSCRN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ALMSHLD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'APPCERT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ATESTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BFDPROTOSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BOXRPT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BRD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCAISS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCAITFTMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCDSTPA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCEXSOFTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCFCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCJBF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCNSPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCPCUTYPE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCPSGBPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCPSSOFTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCPSSTAT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCPSTCDSCPMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCPSUMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCSIGTRC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCTESTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSCTMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSSGPPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BSSLS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSABISMUXFLOW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSABISPRIMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSABISTROP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSAISS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSALM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSALMFLASHTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSALMFLASHTW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSALMPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSAPMUBP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSAPPCERT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSAUTODLDACTINFO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSBAKPWR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSBBMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSBINDLOCGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSBRD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSBREAKPOINT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSBWPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCELLPATCHPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCERTCHKTSK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCERTDEPLOY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCERTMK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCERTREQ', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCHNFALLBACK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCLK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCONNECT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCPRIPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCRC4', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCRLPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCTRLEX', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSCTRLLNK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSDEVIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSDHCPSVRIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSDHEUBP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSDSCPMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSE1T1BER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSEAMRC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSENVALMPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSEQUIPMENT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSESN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSETHOAM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSETHOAMAH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSETHPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSFALLBACK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSFLEXABISPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSFMUABP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSGTRANSPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSGUPWRSHRFP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSIDLETS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSIKECFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSINTRXUSPEC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSIPGUARD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSIPRT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSJBF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLAPDWS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLLDPGLOBAL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLNKBKATTR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLOCGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLOCKBCCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSLSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSMNTMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSMPGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSMPLNK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSOMLBACKUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSOMLDETECT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSOMLTS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSOTHPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSPATCHPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSPINGSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSPLRALM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSPSUFP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRELIALOGSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRET', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRETDEVICEDATA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRETSUBUNIT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRINGATTR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRSV', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRXU2LOCGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRXUBP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRXUBRD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSRXUCHAIN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSSHARING', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTEMPLATERSC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTHEFTALM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTMA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTMADEVICEDATA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTMASUBUNIT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTRANS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTRCMPR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTRUSTCERT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSTRXBACKUP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSVLAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSXFC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'BTSXMUFP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CAB', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CCGN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CELLBIND2BTS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CELLGLDSS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CERTCHKTSK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CERTMK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CERTREQ', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CLK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CLKMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CLKSRC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CONNTYPE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'COPTLNK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CPUTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CRLPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'CSPRECTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DEVIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DEVRSVDPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DSCPMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DSP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DSPLVDSMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DXX', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DXXCONNECT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'DXXTSEXGRELATION', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'E1T1', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'EMSIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ENVALMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ETHIP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ETHPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ETHREDPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ETHSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FACFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FANSPEED', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FCCOMMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'filefooter', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FTPCLTPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FTPSCLT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FTPSCLTDPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FTPSRVSPD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'FTPSSRV', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'G2GNCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'G3GARFCN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'G3GNCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GAFCALMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GALLCELLBLKSTAT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GBSCREDGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELL2GBA1', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELL3GARFCN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLAMRQUL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLBASICPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0, 'affect_level': 1},
{'name': 'GCELLBTSSOFTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCACCESS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCAD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCAMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCTMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCCUTRANSYS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCHMGAD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCHMGBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCONGACALGO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLCSFBPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLDYNTURNOFF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLEGPRSPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLEXTMSRPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLFREQ', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLFREQSCAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLGPRS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLGSMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHO2GBA2', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOAD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOEDBPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOEMG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOFAST', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOFDDBA2', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOFITPEN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOINTERRATLDB', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOIUO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOPANT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOPTP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOTDDBA2', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOUTRANFDD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHOUTRANTDD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLHSRPLCUSRIDFMG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLIBCAII', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLIDLEAD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLIDLEBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLIDLEFDDBA1', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLIDLETDDBA1', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLLCS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLMAGRP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLMAIOPLAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLMOCN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLNC2PARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLNCRESELECTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLNONSTANDARDBW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLNWCTRLMSRPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLOPTREV', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLOSPMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLOTHBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLOTHEXT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLOTHPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPRACH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPRIEUTRANSYS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPRIVATEOPTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSABISPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSBASE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSCHM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSCS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSDIFFSERVICE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSI1', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSOTHERPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSPWPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPSSMALLPKTRESBAL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0,
'added_by': 0},
{'name': 'GCELLPWR2', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPWR3', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLPWRBASIC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLRESELECTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLRESELECTUTRANTDD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0,
'added_by': 0},
{'name': 'GCELLRESELUTRANFDD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLRSVPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLSBC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLSERVPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLSOFT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLSON', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLSRVCC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLSTANDARDOPTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLTA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLTEMPLATERSC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLTMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLTRANPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLUNDPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLVAMOS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLVAMOSPWR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCELLWLAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCNCFGALMTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCNNODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCNOPERATOR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCNOPERATORREV', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCSCHRCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCSCHRSCOPE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GCSFILE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GDSSPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GEXT2GCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GEXT3GCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GEXTLTECELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GFORCESWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GHOSTSTATUS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GKPIALMTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GLOBALROUTESW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GLTENCELL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GMRCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GMRSCOPE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GNODEREDCFGCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GNODEREDUNDANCY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GPSCHRCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GPSCHRSCOPE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GPSKPIALMTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GREDGRPHOSTPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GRSVPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRX', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXBASE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXCHAN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXCHANHOP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXDEV', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXFC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXHOP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXIUO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXRLALM', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'GTRXRSVPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'G_ADJMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'G_ADJNODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'G_IPPATH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'HOSTLOGSPD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'IDRQTEST', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'INFBRDRESCFG', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'INTBRDPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'IPCHK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'IPGUARD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'IPLOGICPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'IPMUX', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'IPRT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'ITWKPIALMTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'L2L3ROUTEPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'LDR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'LICALMTHD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'LICPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'LODCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'LOGLIMIT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'M3DE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'M3LE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'M3LKS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'M3LNK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'M3RT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'MDTLCS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'MNTMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'MOCNPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'MSGSOFTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'MSP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'MTP3TMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'N7DPC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'NRIMSCMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'NRISGSNMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'NSE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'NSVLLOCAL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OBJALMSHLD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OBJAUTHSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OMUCOMMSVCSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OMUETH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OMUPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OMUPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OPC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OPLOCK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OPSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OPT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OSPWDPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'OTHSOFTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PACKETFILTERALMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PHBMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PORTFLOWCTRLPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PORTOSCCTRLPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PSPREFABISCONGCTRL', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PSUSRRESBIND', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PTPBVC', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PWDPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PWRALMSW', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'PWRPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'QUEUEMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'RSVRES', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'RULELIBVER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SAUCENTER', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SCCPTMR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SCTPLNK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SCTPPROF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SCTPSRVPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SCUPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SGSN', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SGSNNODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SNTPCLTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SNTPSRVINFO', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SRCONPATH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SS7PATCHSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SSLAUTHMODE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SSLCONF', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SSLCS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SUBNET', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SUBRACK', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SUBSESSION_NE', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SYNSWITCH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'SYS', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TCPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TCRSVPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TNALMPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TNLOADBALANCEPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TNRSVDPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TNSOFTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRANSPATCHPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRANSPHYLNKPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRANSRSVPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRCLOGSPD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRMFACTOR', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRMLOADTH', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRMMAP', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRUSTCERT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TRXBIND2PHYBRD', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'TZ', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'UMTESTPARA', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'USEREVTRTNPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'USRRESBIND', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'VLANID', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'WEBLOGINPOLICY', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
{'name': 'XPUPORT', 'parent_pk': 0, 'vendor_pk': 2, 'tech_pk': 1, 'modified_by': 0, 'added_by': 0},
])
def downgrade():
op.execute("""DELETE FROM managedobjects WHERE vendor_pk = {0} AND tech_pk = {1}""".format(2, 1))
| 103.055432
| 133
| 0.570162
| 7,093
| 46,478
| 3.446074
| 0.068659
| 0.099906
| 0.116843
| 0.249765
| 0.806202
| 0.802316
| 0.799043
| 0.799043
| 0.799043
| 0.799043
| 0
| 0.056081
| 0.180516
| 46,478
| 450
| 134
| 103.284444
| 0.58567
| 0.003378
| 0
| 0.004587
| 0
| 0
| 0.514111
| 0.00136
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004587
| false
| 0
| 0.006881
| 0
| 0.011468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8a64819227bba93979e4413095e01b50e7c00dec
| 13
|
py
|
Python
|
a2.py
|
Changhong-Jiang/test
|
b907b984cbd9703711f52c9f497cf36b5b4e8752
|
[
"MIT"
] | null | null | null |
a2.py
|
Changhong-Jiang/test
|
b907b984cbd9703711f52c9f497cf36b5b4e8752
|
[
"MIT"
] | 1
|
2020-02-28T08:15:58.000Z
|
2020-02-28T08:16:41.000Z
|
a2.py
|
Changhong-Jiang/test
|
b907b984cbd9703711f52c9f497cf36b5b4e8752
|
[
"MIT"
] | null | null | null |
print('222')
| 6.5
| 12
| 0.615385
| 2
| 13
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.076923
| 13
| 1
| 13
| 13
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
8a88b11de563042688caafdaffa71f1207edee67
| 8,082
|
py
|
Python
|
items/migrations/0001_initial.py
|
tony-joseph/livre
|
3a6a851ed58029d5d14edde647b15ed22d65f24b
|
[
"BSD-3-Clause"
] | 1
|
2020-05-06T16:59:47.000Z
|
2020-05-06T16:59:47.000Z
|
items/migrations/0001_initial.py
|
tony-joseph/livre
|
3a6a851ed58029d5d14edde647b15ed22d65f24b
|
[
"BSD-3-Clause"
] | null | null | null |
items/migrations/0001_initial.py
|
tony-joseph/livre
|
3a6a851ed58029d5d14edde647b15ed22d65f24b
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-21 12:22
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BookCopy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('book_status', models.IntegerField(choices=[(1, 'Available'), (2, 'In Circulation'), (3, 'Temporarily Unavailable'), (4, 'Unavailable'), (5, 'Protected'), (6, 'Damaged')])),
('remarks', models.TextField(blank=True, default='')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='BookDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=1024)),
('author', models.CharField(default='Unknown', max_length=1024)),
('description', models.TextField(blank=True, default='')),
('publisher', models.CharField(blank=True, default='', max_length=512)),
('published_on', models.DateField(blank=True, null=True)),
('pages', models.PositiveIntegerField(blank=True, default=0, null=True)),
('ddc', models.CharField(blank=True, default='', max_length=1024)),
('llcc', models.CharField(blank=True, default='', max_length=1024)),
('isbn', models.CharField(blank=True, default='', max_length=1024)),
('tags', models.CharField(blank=True, max_length=1024, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=512)),
('slug', models.SlugField(max_length=128, unique=True)),
('description', models.TextField(blank=True, default='')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='category_updated_by', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=512)),
('short_code', models.CharField(db_index=True, max_length=8, unique=True)),
('description', models.TextField(blank=True, default='')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('updated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='language_updated_by', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Periodical',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=1024)),
('description', models.TextField(blank=True, default='')),
('publisher', models.CharField(blank=True, default='', max_length=512)),
('tags', models.CharField(blank=True, max_length=1024, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Category')),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('language', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Language')),
('updated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='periodical_updated_by', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PeriodicalIssue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('issue_status', models.IntegerField(choices=[(1, 'Available'), (2, 'In Circulation'), (3, 'Temporarily Unavailable'), (4, 'Unavailable'), (5, 'Protected'), (6, 'Damaged')])),
('published_on', models.DateField(blank=True, null=True)),
('volume', models.PositiveIntegerField(blank=True, null=True)),
('issue', models.PositiveIntegerField(blank=True, null=True)),
('remarks', models.TextField(blank=True, default='')),
('tags', models.CharField(blank=True, max_length=1024, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('periodical', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Periodical')),
('updated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='periodical_issue_updated_by', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='bookdetail',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Category'),
),
migrations.AddField(
model_name='bookdetail',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='bookdetail',
name='language',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Language'),
),
migrations.AddField(
model_name='bookdetail',
name='updated_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='book_detail_updated_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='bookcopy',
name='book_detail',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.BookDetail'),
),
migrations.AddField(
model_name='bookcopy',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='bookcopy',
name='updated_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='book_copy_updated_by', to=settings.AUTH_USER_MODEL),
),
]
| 56.915493
| 191
| 0.614081
| 847
| 8,082
| 5.670602
| 0.136954
| 0.033313
| 0.055382
| 0.087029
| 0.860712
| 0.854466
| 0.804497
| 0.797835
| 0.752446
| 0.752446
| 0
| 0.013031
| 0.240411
| 8,082
| 141
| 192
| 57.319149
| 0.769344
| 0.008043
| 0
| 0.661654
| 1
| 0
| 0.126029
| 0.008735
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030075
| 0
| 0.06015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a9d4177e423a6db85599cff72c82ba14d5a1522
| 883
|
py
|
Python
|
algorithm/python/LeetCode/isValid.py
|
HoneyS2/meaningful
|
78659de1ed74121db4ade211f6565ddc6d117041
|
[
"MIT"
] | null | null | null |
algorithm/python/LeetCode/isValid.py
|
HoneyS2/meaningful
|
78659de1ed74121db4ade211f6565ddc6d117041
|
[
"MIT"
] | null | null | null |
algorithm/python/LeetCode/isValid.py
|
HoneyS2/meaningful
|
78659de1ed74121db4ade211f6565ddc6d117041
|
[
"MIT"
] | null | null | null |
s = "([}}])"
stack = []
if len(s) % 2 == 1:
print(False)
exit()
for i in s:
if i == "(":
stack.append("(")
elif i == "[":
stack.append("[")
elif i == "{":
stack.append("{")
elif i == ")":
if len(stack) < 1:
print(False)
exit()
if stack[-1] == "(":
stack.pop()
else:
print(False)
exit()
elif i == "]":
if len(stack) < 1:
print(False)
exit()
if stack[-1] == "[":
stack.pop()
else:
print(False)
exit()
elif i == "}":
if len(stack) < 1:
print(False)
exit()
if stack[-1] == "{":
stack.pop()
else:
print(False)
exit()
if len(stack) == 0:
print(True)
else:
print(False)
| 18.395833
| 28
| 0.347678
| 89
| 883
| 3.449438
| 0.202247
| 0.260586
| 0.319218
| 0.19544
| 0.76873
| 0.76873
| 0.76873
| 0.76873
| 0.76873
| 0.625407
| 0
| 0.01919
| 0.468856
| 883
| 47
| 29
| 18.787234
| 0.635394
| 0
| 0
| 0.581395
| 0
| 0
| 0.020385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.209302
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8abf08c703d4b07df642c217bba0fae7c6cdc10b
| 141
|
py
|
Python
|
hexafuel_oil/hexafuel_oil_app/apps.py
|
zante95/Hexafuel-Oil
|
41dc4c9d855c74d4bb7dd86f3ac3fb1db27b663b
|
[
"MIT"
] | null | null | null |
hexafuel_oil/hexafuel_oil_app/apps.py
|
zante95/Hexafuel-Oil
|
41dc4c9d855c74d4bb7dd86f3ac3fb1db27b663b
|
[
"MIT"
] | null | null | null |
hexafuel_oil/hexafuel_oil_app/apps.py
|
zante95/Hexafuel-Oil
|
41dc4c9d855c74d4bb7dd86f3ac3fb1db27b663b
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig #pragma: no cover
class HexafuelOilAppConfig(AppConfig): #pragma: no cover
name = 'hexafuel_oil_app'
| 23.5
| 56
| 0.77305
| 18
| 141
| 5.944444
| 0.777778
| 0.280374
| 0.317757
| 0.411215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 141
| 5
| 57
| 28.2
| 0.891667
| 0.22695
| 0
| 0
| 0
| 0
| 0.149533
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8ac83a9b0ffc4d89a43ceecc29a99652f8c7e2f2
| 5,869
|
py
|
Python
|
rspub/util/test/test_resourcefilter.py
|
EHRI/rspub-core
|
1f6b0c84825037b7df442ae0d258d5d897ff6905
|
[
"Apache-2.0"
] | 1
|
2017-02-01T15:03:29.000Z
|
2017-02-01T15:03:29.000Z
|
rspub/util/test/test_resourcefilter.py
|
EHRI/rspub-core
|
1f6b0c84825037b7df442ae0d258d5d897ff6905
|
[
"Apache-2.0"
] | 3
|
2017-02-15T12:25:22.000Z
|
2017-04-10T13:51:54.000Z
|
rspub/util/test/test_resourcefilter.py
|
EHRI/rspub-core
|
1f6b0c84825037b7df442ae0d258d5d897ff6905
|
[
"Apache-2.0"
] | 3
|
2017-02-15T09:04:39.000Z
|
2021-06-21T09:01:59.000Z
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import platform
import unittest
import rspub.util.resourcefilter as rf
def on_windows():
opsys = platform.system()
return opsys == "Windows"
class TestPredicates(unittest.TestCase):
def test_directory_pattern_filter_empty(self):
dpf = rf.directory_pattern_predicate() # should pass all strings
self.assertTrue(dpf(""))
self.assertTrue(dpf("."))
self.assertTrue(dpf("\n"))
self.assertTrue(dpf("foo"))
# rejects not string
self.assertFalse(dpf(None))
self.assertFalse(dpf(42))
self.assertFalse(dpf(self))
def test_directory_pattern_filter(self):
dpf = rf.directory_pattern_predicate("abc")
self.assertTrue(dpf("foo/babcd/bar/some.txt"))
self.assertTrue(dpf("/abc/bar/some.txt"))
self.assertTrue(dpf("/foo/bar/abc/some.txt"))
#
self.assertFalse(dpf("/foo/bar/baz/abc.txt"))
# ##
dpf = rf.directory_pattern_predicate("^/abc")
self.assertTrue(dpf("/abc/bar/some.txt"))
#
self.assertFalse(dpf("abc/bar/some.txt"))
# #
dpf = rf.directory_pattern_predicate("abc$")
self.assertTrue(dpf("foo/bar/abc/some.txt"))
#
self.assertFalse(dpf("abc/abc/bar/some.txt"))
self.assertFalse(dpf("abc/abc/bar/abc.abc"))
@unittest.skipUnless(on_windows(), "Only tested on Windows.")
def test_directory_pattern_filter_windows(self):
dpf = rf.directory_pattern_predicate("abc")
self.assertTrue(dpf("foo/babcd/bar/some.txt"))
self.assertTrue(dpf("/abc/bar/some.txt"))
self.assertTrue(dpf("/foo/bar/abc/some.txt"))
self.assertTrue(dpf("foo\\babcd\\bar\\some.txt"))
self.assertTrue(dpf("c:\\abc\\bar\\some.txt"))
self.assertTrue(dpf("c:\\foo\\bar\\abc\\some.txt"))
#
self.assertFalse(dpf("/foo/bar/baz/abc.txt"))
self.assertFalse(dpf("c:\\foo\\bar\\baz\\abc.txt"))
# ##
dpf = rf.directory_pattern_predicate("^/abc")
self.assertTrue(dpf("/abc/bar/some.txt"))
#
self.assertFalse(dpf("abc/bar/some.txt"))
# #
dpf = rf.directory_pattern_predicate("^c:\\abc")
self.assertTrue(dpf("c:\\abc\\bar\\some.txt"))
#
self.assertFalse(dpf("abc\\bar\\some.txt"))
dpf = rf.directory_pattern_predicate("abc$")
self.assertTrue(dpf("foo/bar/abc/some.txt"))
self.assertTrue(dpf("foo\\bar\\abc\\some.txt"))
#
self.assertFalse(dpf("abc/abc/bar/some.txt"))
self.assertFalse(dpf("abc\\abc\\bar\\some.txt"))
self.assertFalse(dpf("abc/abc/bar/abc.abc"))
self.assertFalse(dpf("abc\\abc\\bar\\abc.abc"))
def test_last_modified_filter(self):
file_name = os.path.realpath(__file__)
lmaf = rf.last_modified_after_predicate()
self.assertTrue(lmaf(file_name))
lmaf = rf.last_modified_after_predicate(3000000000)
# valid until 2065-01-24 06:20:00
self.assertFalse(lmaf(file_name))
lmaf = rf.last_modified_after_predicate("2016-08-01")
self.assertTrue(lmaf(file_name))
def test_example(self):
import rspub.util.resourcefilter as rf
dir_ends_with_abc = rf.directory_pattern_predicate("abc$")
assert dir_ends_with_abc("/foo/bar/folder_abc/my_resource.txt")
assert not dir_ends_with_abc("/foo/bar/folder_def/my_resource.txt")
xml_file = rf.filename_pattern_predicate(".xml$")
assert xml_file("my_resource.xml")
assert not xml_file("my_resource.txt")
import rspub.util.gates as lf
xml_files_in_abc = lf.and_(dir_ends_with_abc, xml_file)
assert xml_files_in_abc("/foo/bar/folder_abc/my_resource.xml")
assert not xml_files_in_abc("/foo/bar/folder_abc/my_resource.txt")
assert not xml_files_in_abc("/foo/bar/folder_def/my_resource.xml")
recent = rf.last_modified_after_predicate("2016-08-01")
includes = [xml_files_in_abc]
excludes = [recent]
resource_gate = lf.gate(includes, excludes)
# print(type(resource_gate))
@unittest.skipUnless(on_windows(), "Only tested on Windows.")
def test_example_windows(self):
import rspub.util.resourcefilter as rf
dir_ends_with_abc = rf.directory_pattern_predicate("abc$")
assert dir_ends_with_abc("/foo/bar/folder_abc/my_resource.txt")
assert not dir_ends_with_abc("/foo/bar/folder_def/my_resource.txt")
xml_file = rf.filename_pattern_predicate(".xml$")
assert xml_file("my_resource.xml")
assert not xml_file("my_resource.txt")
import rspub.util.gates as lf
xml_files_in_abc = lf.and_(dir_ends_with_abc, xml_file)
assert xml_files_in_abc("/foo/bar/folder_abc/my_resource.xml")
assert not xml_files_in_abc("/foo/bar/folder_abc/my_resource.txt")
assert not xml_files_in_abc("/foo/bar/folder_def/my_resource.xml")
assert xml_files_in_abc("c:\\foo\\bar\\folder_abc\\my_resource.xml")
assert not xml_files_in_abc("c:\\foo\\bar\\folder_abc\\my_resource.txt")
assert not xml_files_in_abc("c:\\foo\\bar\\folder_def\\my_resource.xml")
recent = rf.last_modified_after_predicate("2016-08-01")
includes = [xml_files_in_abc]
excludes = [recent]
resource_gate = lf.gate(includes, excludes)
# print(type(resource_gate))
@unittest.skipUnless(on_windows(), "Only tested on Windows.")
def test_windows_to_unix(self):
path = os.path.expanduser("~")
dpf = rf.directory_pattern_predicate("^" + path)
self.assertTrue(dpf(os.path.join(path, "bla")))
dpf = rf.directory_pattern_predicate("^C:\\Users")
self.assertTrue(dpf(os.path.join(path, "bla")))
| 35.143713
| 80
| 0.645596
| 798
| 5,869
| 4.52005
| 0.12782
| 0.089271
| 0.098974
| 0.046853
| 0.867203
| 0.823953
| 0.773496
| 0.769615
| 0.739396
| 0.705018
| 0
| 0.011154
| 0.205657
| 5,869
| 166
| 81
| 35.355422
| 0.762548
| 0.030158
| 0
| 0.616822
| 0
| 0
| 0.223498
| 0.132332
| 0
| 0
| 0
| 0
| 0.523364
| 1
| 0.074766
| false
| 0
| 0.074766
| 0
| 0.168224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76e51515f0db0f6532d593373bce97eb6eda37bb
| 12,689
|
py
|
Python
|
Models.py
|
jmj23/Kaggle-Pneumothorax
|
96153af30468c5bcb49875dd374ac44ed1b4e2fb
|
[
"MIT"
] | null | null | null |
Models.py
|
jmj23/Kaggle-Pneumothorax
|
96153af30468c5bcb49875dd374ac44ed1b4e2fb
|
[
"MIT"
] | null | null | null |
Models.py
|
jmj23/Kaggle-Pneumothorax
|
96153af30468c5bcb49875dd374ac44ed1b4e2fb
|
[
"MIT"
] | 2
|
2019-07-12T15:03:41.000Z
|
2019-08-07T21:24:49.000Z
|
import numpy as np
from keras.applications.inception_v3 import InceptionV3
from keras.initializers import RandomNormal
from keras.layers import (BatchNormalization, Conv2D, Conv2DTranspose, Conv3D,
Cropping2D, Dense, Flatten, GlobalAveragePooling2D,
Input, Lambda, MaxPooling2D, Reshape, UpSampling2D,
ZeroPadding2D, ZeroPadding3D, add, concatenate)
from keras.layers.advanced_activations import ELU, LeakyReLU
from keras.models import Model
# Parameterized 2D Block Model
def BlockModel2D(input_shape, filt_num=16, numBlocks=3):
"""Creates a Block CED model for segmentation problems
Args:
input shape: a list or tuple of [rows,cols,channels] of input images
filt_num: the number of filters in the first and last layers
This number is multipled linearly increased and decreased throughout the model
numBlocks: number of processing blocks. The larger the number the deeper the model
output_chan: number of output channels. Set if doing multi-class segmentation
regression: Whether to have a continuous output with linear activation
Returns:
An unintialized Keras model
Example useage: SegModel = BlockModel2D([256,256,1],filt_num=8)
Notes: Using rows/cols that are powers of 2 is recommended. Otherwise,
the rows/cols must be divisible by 2^numBlocks for skip connections
to match up properly
"""
use_bn = True
# check for input shape compatibility
rows, cols = input_shape[0:2]
assert rows % 2**numBlocks == 0, "Input rows and number of blocks are incompatible"
assert cols % 2**numBlocks == 0, "Input cols and number of blocks are incompatible"
# calculate size reduction
startsize = np.max(input_shape[0:2])
minsize = (startsize-np.sum(2**np.arange(1, numBlocks+1)))/2**numBlocks
assert minsize > 4, "Too small of input for this many blocks. Use fewer blocks or larger input"
# input layer
lay_input = Input(shape=input_shape, name='input_layer')
# contracting blocks
x = lay_input
skip_list = []
for rr in range(1, numBlocks+1):
x1 = Conv2D(filt_num*rr, (1, 1), padding='same',
name='Conv1_{}'.format(rr))(x)
if use_bn:
x1 = BatchNormalization()(x1)
x1 = ELU(name='elu_x1_{}'.format(rr))(x1)
x3 = Conv2D(filt_num*rr, (3, 3), padding='same',
name='Conv3_{}'.format(rr))(x)
if use_bn:
x3 = BatchNormalization()(x3)
x3 = ELU(name='elu_x3_{}'.format(rr))(x3)
x51 = Conv2D(filt_num*rr, (3, 3), padding='same',
name='Conv51_{}'.format(rr))(x)
if use_bn:
x51 = BatchNormalization()(x51)
x51 = ELU(name='elu_x51_{}'.format(rr))(x51)
x52 = Conv2D(filt_num*rr, (3, 3), padding='same',
name='Conv52_{}'.format(rr))(x51)
if use_bn:
x52 = BatchNormalization()(x52)
x52 = ELU(name='elu_x52_{}'.format(rr))(x52)
x = concatenate([x1, x3, x52], name='merge_{}'.format(rr))
x = Conv2D(filt_num*rr, (1, 1), padding='valid',
name='ConvAll_{}'.format(rr))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_all_{}'.format(rr))(x)
x = ZeroPadding2D(padding=(1, 1), name='PrePad_{}'.format(rr))(x)
x = Conv2D(filt_num*rr, (4, 4), padding='valid',
strides=(2, 2), name='DownSample_{}'.format(rr))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_downsample_{}'.format(rr))(x)
x = Conv2D(filt_num*rr, (3, 3), padding='same',
name='ConvClean_{}'.format(rr))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_clean_{}'.format(rr))(x)
skip_list.append(x)
# expanding blocks
expnums = list(range(1, numBlocks+1))
expnums.reverse()
for dd in expnums:
if dd < len(skip_list):
x = concatenate([skip_list[dd-1], x],
name='skip_connect_{}'.format(dd))
x1 = Conv2D(filt_num*dd, (1, 1), padding='same',
name='DeConv1_{}'.format(dd))(x)
if use_bn:
x1 = BatchNormalization()(x1)
x1 = ELU(name='elu_Dx1_{}'.format(dd))(x1)
x3 = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConv3_{}'.format(dd))(x)
if use_bn:
x3 = BatchNormalization()(x3)
x3 = ELU(name='elu_Dx3_{}'.format(dd))(x3)
x51 = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConv51_{}'.format(dd))(x)
if use_bn:
x51 = BatchNormalization()(x51)
x51 = ELU(name='elu_Dx51_{}'.format(dd))(x51)
x52 = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConv52_{}'.format(dd))(x51)
if use_bn:
x52 = BatchNormalization()(x52)
x52 = ELU(name='elu_Dx52_{}'.format(dd))(x52)
x = concatenate([x1, x3, x52], name='Dmerge_{}'.format(dd))
x = Conv2D(filt_num*dd, (1, 1), padding='valid',
name='DeConvAll_{}'.format(dd))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_Dall_{}'.format(dd))(x)
x = UpSampling2D(size=(2, 2), name='UpSample_{}'.format(dd))(x)
x = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConvClean1_{}'.format(dd))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_Dclean1_{}'.format(dd))(x)
x = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConvClean2_{}'.format(dd))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_Dclean2_{}'.format(dd))(x)
# classifier
lay_out = Conv2D(1, (1, 1), activation='sigmoid', name='output_layer')(x)
return Model(lay_input, lay_out)
# Parameterized 2D Block Model
def BlockModel_Classifier(input_shape, filt_num=16, numBlocks=3):
"""Creates a Block model for pretraining on classification task
Args:
input shape: a list or tuple of [rows,cols,channels] of input images
filt_num: the number of filters in the first and last layers
This number is multipled linearly increased and decreased throughout the model
numBlocks: number of processing blocks. The larger the number the deeper the model
output_chan: number of output channels. Set if doing multi-class segmentation
regression: Whether to have a continuous output with linear activation
Returns:
An unintialized Keras model
Example useage: SegModel = BlockModel2D([256,256,1],filt_num=8)
Notes: Using rows/cols that are powers of 2 is recommended. Otherwise,
the rows/cols must be divisible by 2^numBlocks for skip connections
to match up properly
"""
use_bn = True
# check for input shape compatibility
rows, cols = input_shape[0:2]
assert rows % 2**numBlocks == 0, "Input rows and number of blocks are incompatible"
assert cols % 2**numBlocks == 0, "Input cols and number of blocks are incompatible"
# calculate size reduction
startsize = np.max(input_shape[0:2])
minsize = (startsize-np.sum(2**np.arange(1, numBlocks+1)))/2**numBlocks
assert minsize > 4, "Too small of input for this many blocks. Use fewer blocks or larger input"
# input layer
lay_input = Input(shape=input_shape, name='input_layer')
# contracting blocks
x = lay_input
skip_list = []
for rr in range(1, numBlocks+1):
x1 = Conv2D(filt_num*rr, (1, 1), padding='same',
name='Conv1_{}'.format(rr))(x)
if use_bn:
x1 = BatchNormalization()(x1)
x1 = ELU(name='elu_x1_{}'.format(rr))(x1)
x3 = Conv2D(filt_num*rr, (3, 3), padding='same',
name='Conv3_{}'.format(rr))(x)
if use_bn:
x3 = BatchNormalization()(x3)
x3 = ELU(name='elu_x3_{}'.format(rr))(x3)
x51 = Conv2D(filt_num*rr, (3, 3), padding='same',
name='Conv51_{}'.format(rr))(x)
if use_bn:
x51 = BatchNormalization()(x51)
x51 = ELU(name='elu_x51_{}'.format(rr))(x51)
x52 = Conv2D(filt_num*rr, (3, 3), padding='same',
name='Conv52_{}'.format(rr))(x51)
if use_bn:
x52 = BatchNormalization()(x52)
x52 = ELU(name='elu_x52_{}'.format(rr))(x52)
x = concatenate([x1, x3, x52], name='merge_{}'.format(rr))
x = Conv2D(filt_num*rr, (1, 1), padding='valid',
name='ConvAll_{}'.format(rr))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_all_{}'.format(rr))(x)
x = ZeroPadding2D(padding=(1, 1), name='PrePad_{}'.format(rr))(x)
x = Conv2D(filt_num*rr, (4, 4), padding='valid',
strides=(2, 2), name='DownSample_{}'.format(rr))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_downsample_{}'.format(rr))(x)
x = Conv2D(filt_num*rr, (3, 3), padding='same',
name='ConvClean_{}'.format(rr))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_skip_{}'.format(rr))(x)
# average pooling
x = GlobalAveragePooling2D()(x)
# classifier
lay_out = Dense(1, activation='sigmoid', name='output_layer')(x)
return Model(lay_input, lay_out)
def ConvertEncoderToCED(model):
# Returns a model with frozen encoder layers
# and complimentary, unfrozen decoder layers
# get input layer
# model must be compiled again after using this function
lay_input = model.input
# get skip connection layer outputs
skip_list = [l.output for l in model.layers if 'skip' in l.name]
numBlocks = len(skip_list)
filt_num = int(skip_list[0].shape[-1])
x = model.layers[-3].output
# freeze encoder layers
for layer in model.layers:
layer.trainable = False
use_bn = True
# make expanding blocks
expnums = list(range(1, numBlocks+1))
expnums.reverse()
for dd in expnums:
if dd < len(skip_list):
x = concatenate([skip_list[dd-1], x],
name='skip_connect_{}'.format(dd))
x1 = Conv2D(filt_num*dd, (1, 1), padding='same',
name='DeConv1_{}'.format(dd))(x)
if use_bn:
x1 = BatchNormalization()(x1)
x1 = ELU(name='elu_Dx1_{}'.format(dd))(x1)
x3 = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConv3_{}'.format(dd))(x)
if use_bn:
x3 = BatchNormalization()(x3)
x3 = ELU(name='elu_Dx3_{}'.format(dd))(x3)
x51 = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConv51_{}'.format(dd))(x)
if use_bn:
x51 = BatchNormalization()(x51)
x51 = ELU(name='elu_Dx51_{}'.format(dd))(x51)
x52 = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConv52_{}'.format(dd))(x51)
if use_bn:
x52 = BatchNormalization()(x52)
x52 = ELU(name='elu_Dx52_{}'.format(dd))(x52)
x = concatenate([x1, x3, x52], name='Dmerge_{}'.format(dd))
x = Conv2D(filt_num*dd, (1, 1), padding='valid',
name='DeConvAll_{}'.format(dd))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_Dall_{}'.format(dd))(x)
x = UpSampling2D(size=(2, 2), name='UpSample_{}'.format(dd))(x)
x = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConvClean1_{}'.format(dd))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_Dclean1_{}'.format(dd))(x)
x = Conv2D(filt_num*dd, (3, 3), padding='same',
name='DeConvClean2_{}'.format(dd))(x)
if use_bn:
x = BatchNormalization()(x)
x = ELU(name='elu_Dclean2_{}'.format(dd))(x)
# classifier
lay_out = Conv2D(1, (1, 1), activation='sigmoid', name='output_layer')(x)
return Model(lay_input, lay_out)
def Inception_model(input_shape=(299, 299, 3)):
incep_model = InceptionV3(
include_top=False, weights=None, input_shape=input_shape, pooling='avg')
input_layer = incep_model.input
incep_output = incep_model.output
# x = Conv2D(16, (3, 3), activation='relu')(incep_output)
# x = Flatten()(x)
x = Dense(1, activation='sigmoid')(incep_output)
return Model(inputs=input_layer, outputs=x)
| 41.877888
| 99
| 0.585625
| 1,664
| 12,689
| 4.335337
| 0.138822
| 0.033962
| 0.050457
| 0.026615
| 0.828805
| 0.821042
| 0.821042
| 0.821042
| 0.821042
| 0.821042
| 0
| 0.043793
| 0.27118
| 12,689
| 302
| 100
| 42.016556
| 0.736267
| 0.173773
| 0
| 0.857143
| 0
| 0
| 0.123259
| 0
| 0
| 0
| 0
| 0
| 0.02765
| 1
| 0.018433
| false
| 0
| 0.02765
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a6bdfe36df3fc3c2674d86fa755f854cc5eacf6
| 133
|
py
|
Python
|
summarizer/test_summarizer.py
|
bmcilw1/text-summary
|
f594fd4f41279a6e11262ac859cfbdad6aaf1703
|
[
"MIT"
] | null | null | null |
summarizer/test_summarizer.py
|
bmcilw1/text-summary
|
f594fd4f41279a6e11262ac859cfbdad6aaf1703
|
[
"MIT"
] | null | null | null |
summarizer/test_summarizer.py
|
bmcilw1/text-summary
|
f594fd4f41279a6e11262ac859cfbdad6aaf1703
|
[
"MIT"
] | null | null | null |
from summarizer.summarizer import summarize
def test_summarize_whenPassedEmptyString_ReturnsEmpty():
assert summarize("") == ""
| 26.6
| 56
| 0.796992
| 12
| 133
| 8.583333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112782
| 133
| 5
| 57
| 26.6
| 0.872881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
6a64620ee9819bca0e28e6f332c50299811770b5
| 13,981
|
py
|
Python
|
djconnectwise/tests/mocks.py
|
kti-sam/django-connectwise
|
28484faad9435892a46b8ce4a3c957f64c299971
|
[
"MIT"
] | null | null | null |
djconnectwise/tests/mocks.py
|
kti-sam/django-connectwise
|
28484faad9435892a46b8ce4a3c957f64c299971
|
[
"MIT"
] | null | null | null |
djconnectwise/tests/mocks.py
|
kti-sam/django-connectwise
|
28484faad9435892a46b8ce4a3c957f64c299971
|
[
"MIT"
] | null | null | null |
import os
from mock import patch
from datetime import datetime, date, time
import json
import responses
from . import fixtures
from django.utils import timezone
CW_MEMBER_IMAGE_FILENAME = 'AnonymousMember.png'
def create_mock_call(method_name, return_value, side_effect=None):
"""Utility function for mocking the specified function or method"""
_patch = patch(method_name, side_effect=side_effect)
mock_get_call = _patch.start()
if not side_effect:
mock_get_call.return_value = return_value
return mock_get_call, _patch
def company_info_get_company_info_call(return_value):
method_name = 'djconnectwise.api.CompanyInfoManager.get_company_info'
return create_mock_call(method_name, return_value)
def company_api_get_call(return_value):
method_name = 'djconnectwise.api.CompanyAPIClient.get_companies'
return create_mock_call(method_name, return_value)
def company_api_by_id_call(return_value, raised=None):
method_name = 'djconnectwise.api.CompanyAPIClient.by_id'
return create_mock_call(method_name, return_value, side_effect=raised)
def company_api_get_company_statuses_call(return_value, raised=None):
method_name = 'djconnectwise.api.CompanyAPIClient.get_company_statuses'
return create_mock_call(method_name, return_value, side_effect=raised)
def company_api_get_company_types_call(return_value, raised=None):
method_name = 'djconnectwise.api.CompanyAPIClient.get_company_types'
return create_mock_call(method_name, return_value, side_effect=raised)
def projects_api_get_project_statuses_call(return_value, raised=None):
method_name = 'djconnectwise.api.ProjectAPIClient.get_project_statuses'
return create_mock_call(method_name, return_value, side_effect=raised)
def projects_api_get_project_types_call(return_value, raised=None):
method_name = 'djconnectwise.api.ProjectAPIClient.get_project_types'
return create_mock_call(method_name, return_value, side_effect=raised)
def projects_api_get_project_phases_call(return_value, raised=None):
method_name = 'djconnectwise.api.ProjectAPIClient.get_project_phases'
return create_mock_call(method_name, return_value, side_effect=raised)
def project_api_get_projects_call(return_value):
method_name = 'djconnectwise.api.ProjectAPIClient.get_projects'
return create_mock_call(method_name, return_value)
def project_api_get_project_call(return_value, raised=None):
method_name = 'djconnectwise.api.ProjectAPIClient.get_project'
return create_mock_call(method_name, return_value, side_effect=raised)
def _project_api_tickets_call(page=1, page_size=25, conditions=[]):
return_value = []
test_date = date(1948, 5, 14)
test_time = time(12, 0, 0, tzinfo=timezone.get_current_timezone())
test_datetime = datetime.combine(test_date, test_time)
conditions.append('lastUpdated>' + timezone.localtime(
value=test_datetime).isoformat()
)
if page == 1:
return_value = [fixtures.API_PROJECT_TICKET]
return return_value
def project_api_tickets_call():
method_name = 'djconnectwise.api.TicketAPIMixin.get_tickets'
mock_call, _patch = create_mock_call(
method_name,
None,
side_effect=_project_api_tickets_call)
return mock_call, _patch
def project_api_tickets_test_command(return_value):
method_name = 'djconnectwise.api.TicketAPIMixin.get_tickets'
mock_call, _patch = create_mock_call(method_name, return_value)
return mock_call, _patch
def sales_api_by_id_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.by_id'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_opportunities_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_opportunities'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_opportunity_statuses_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_opportunity_statuses'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_opportunity_types_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_opportunity_types'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_opportunity_stages_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_opportunity_stages'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_sales_probabilities_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_probabilities'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_schedule_types_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_schedule_types'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_schedule_statuses_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_schedule_statuses'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_schedule_entries_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_schedule_entries'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_schedule_entry_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_schedule_entry'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_calendars_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_calendars'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_holidays_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_holidays'
return create_mock_call(method_name, return_value, side_effect=raised)
def schedule_api_get_holiday_lists_call(return_value, raised=None):
method_name = 'djconnectwise.api.ScheduleAPIClient.get_holiday_lists'
return create_mock_call(method_name, return_value, side_effect=raised)
def time_api_get_time_entries_call(return_value, raised=None):
method_name = 'djconnectwise.api.TimeAPIClient.get_time_entries'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_activities_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_activities'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_activities_statuses_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_activity_statuses'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_activities_types_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_activity_types'
return create_mock_call(method_name, return_value, side_effect=raised)
def sales_api_get_single_activity_call(return_value, raised=None):
method_name = 'djconnectwise.api.SalesAPIClient.get_single_activity'
return create_mock_call(method_name, return_value, side_effect=raised)
def _service_api_tickets_call(page=1, page_size=25, conditions=[]):
return_value = []
test_date = date(1948, 5, 14)
test_time = time(12, 0, 0, tzinfo=timezone.get_current_timezone())
test_datetime = datetime.combine(test_date, test_time)
conditions.append('lastUpdated>' + timezone.localtime(
value=test_datetime).isoformat()
)
if page == 1:
return_value = [fixtures.API_SERVICE_TICKET]
return return_value
def service_api_tickets_call():
method_name = 'djconnectwise.api.TicketAPIMixin.get_tickets'
mock_call, _patch = create_mock_call(
method_name,
None,
side_effect=_service_api_tickets_call)
return mock_call, _patch
def _service_api_get_ticket_call(ticket_id):
return fixtures.API_SERVICE_TICKET_MAP.get(ticket_id)
def service_api_get_ticket_call(raised=None):
method_name = 'djconnectwise.api.TicketAPIMixin.get_ticket'
mock_call, _patch = create_mock_call(
method_name,
None,
side_effect=raised if raised else _service_api_get_ticket_call)
return mock_call, _patch
def service_api_get_boards_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_boards'
return create_mock_call(method_name, return_value)
def service_api_update_ticket_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.update_ticket'
return create_mock_call(method_name, return_value)
def service_api_get_statuses_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_statuses'
return create_mock_call(method_name, return_value)
def service_api_get_priorities_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_priorities'
return create_mock_call(method_name, return_value)
def service_api_get_teams_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_teams'
return create_mock_call(method_name, return_value)
def service_api_get_notes_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_notes'
return create_mock_call(method_name, return_value)
def service_api_get_slas_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_slas'
return create_mock_call(method_name, return_value)
def service_api_get_sla_priorities_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_slapriorities'
return create_mock_call(method_name, return_value)
def service_api_get_types_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_types'
return create_mock_call(method_name, return_value)
def service_api_get_subtypes_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_subtypes'
return create_mock_call(method_name, return_value)
def service_api_get_items_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_items'
return create_mock_call(method_name, return_value)
def sales_api_get_opportunity_notes_call(return_value):
method_name = 'djconnectwise.api.SalesAPIClient.get_notes'
return create_mock_call(method_name, return_value)
def service_api_get_locations_call(return_value):
method_name = 'djconnectwise.api.ServiceAPIClient.get_locations'
return create_mock_call(method_name, return_value)
def system_api_get_connectwise_version_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.get_connectwise_version'
return create_mock_call(method_name, return_value)
def system_api_get_members_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.get_members'
return create_mock_call(method_name, return_value)
def system_api_get_member_image_by_photo_id_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.' \
+ 'get_member_image_by_photo_id'
return create_mock_call(method_name, return_value)
def system_api_get_member_count_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.get_members'
return create_mock_call(method_name, return_value)
def system_api_create_callback_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.create_callback'
return create_mock_call(method_name, return_value)
def system_api_delete_callback_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.delete_callback'
return create_mock_call(method_name, return_value)
def system_api_get_callbacks_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.get_callbacks'
return create_mock_call(method_name, return_value)
def system_api_get_territories_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.get_territories'
return create_mock_call(method_name, return_value)
def system_api_get_other_call(return_value):
method_name = 'djconnectwise.api.SystemAPIClient.get_mycompanyother'
return create_mock_call(method_name, return_value)
def cw_api_fetch_resource_call(return_value):
method_name = 'djconnectwise.api.ConnectWiseAPIClient.fetch_resource'
return create_mock_call(method_name, return_value)
def get(url, data, headers=None, status=200):
"""Set up requests mock for given URL and JSON-serializable data."""
get_raw(url, json.dumps(data), "application/json", headers, status=status)
def time_api_get_work_types_call(return_value):
method_name = 'djconnectwise.api.TimeAPIClient.get_work_types'
return create_mock_call(method_name, return_value)
def time_api_get_work_roles_call(return_value):
method_name = 'djconnectwise.api.TimeAPIClient.get_work_roles'
return create_mock_call(method_name, return_value)
def finance_api_get_agreements_call(return_value):
method_name = 'djconnectwise.api.FinanceAPIClient.get_agreements'
return create_mock_call(method_name, return_value)
def get_raw(url, data, content_type="application/octet-stream", headers=None,
status=200):
"""Set up requests mock for given URL."""
responses.add(
responses.GET,
url,
body=data,
status=status,
content_type=content_type,
adding_headers=headers,
)
def get_member_avatar():
"""Return the avatar image data in the tests directory."""
cw_member_image_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
CW_MEMBER_IMAGE_FILENAME
)
with open(cw_member_image_path, 'rb') as anonymous_image_file:
return anonymous_image_file.read()
| 36.126615
| 78
| 0.800801
| 1,878
| 13,981
| 5.517572
| 0.089989
| 0.126327
| 0.082417
| 0.113878
| 0.848292
| 0.827157
| 0.807856
| 0.78701
| 0.771183
| 0.648716
| 0
| 0.002936
| 0.123096
| 13,981
| 386
| 79
| 36.220207
| 0.842251
| 0.015235
| 0
| 0.359184
| 0
| 0
| 0.213428
| 0.208991
| 0
| 0
| 0
| 0
| 0
| 1
| 0.265306
| false
| 0
| 0.028571
| 0.004082
| 0.55102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
6a6837a4b97157cac91cdd54ef662d5a158d6207
| 22,699
|
py
|
Python
|
tests/test_dynamics.py
|
leasanchez/BiorbdOptim
|
28fac818af031668ecd82bc1929f78303c5d58d2
|
[
"MIT"
] | 34
|
2020-12-14T17:09:41.000Z
|
2022-03-31T17:03:37.000Z
|
tests/test_dynamics.py
|
pariterre/bioptim
|
4064138e7d3fce34e21d488df19941937ce30557
|
[
"MIT"
] | 229
|
2020-09-30T16:53:40.000Z
|
2022-03-29T21:11:46.000Z
|
tests/test_dynamics.py
|
fbailly/bioptim
|
3a5473ee7c39d645d960611596a45b044e8ccf58
|
[
"MIT"
] | 15
|
2020-11-20T12:32:59.000Z
|
2022-01-22T22:59:08.000Z
|
import pytest
import numpy as np
from casadi import MX, SX
import biorbd_casadi as biorbd
from bioptim.dynamics.configure_problem import ConfigureProblem
from bioptim.dynamics.dynamics_functions import DynamicsFunctions
from bioptim.interfaces.biorbd_interface import BiorbdInterface
from bioptim.misc.enums import ControlType
from bioptim.optimization.non_linear_program import NonLinearProgram
from bioptim.optimization.optimization_vector import OptimizationVector
from bioptim.dynamics.configure_problem import DynamicsFcn, Dynamics
from .utils import TestUtils
class OptimalControlProgram:
def __init__(self, nlp):
self.n_phases = 1
self.nlp = [nlp]
self.v = OptimizationVector(self)
@pytest.mark.parametrize("cx", [MX, SX])
@pytest.mark.parametrize("with_external_force", [False, True])
@pytest.mark.parametrize("with_contact", [False, True])
def test_torque_driven(with_contact, with_external_force, cx):
# Prepare the program
nlp = NonLinearProgram()
nlp.model = biorbd.Model(
TestUtils.bioptim_folder() + "/examples/getting_started/models/2segments_4dof_2contacts.bioMod"
)
nlp.ns = 5
nlp.cx = cx
nlp.x_bounds = np.zeros((nlp.model.nbQ() * 3, 1))
nlp.u_bounds = np.zeros((nlp.model.nbQ(), 1))
ocp = OptimalControlProgram(nlp)
nlp.control_type = ControlType.CONSTANT
NonLinearProgram.add(ocp, "dynamics_type", Dynamics(DynamicsFcn.TORQUE_DRIVEN, with_contact=with_contact), False)
np.random.seed(42)
if with_external_force:
external_forces = [np.random.rand(6, nlp.model.nbSegment(), nlp.ns)]
nlp.external_forces = BiorbdInterface.convert_array_to_external_forces(external_forces)[0]
# Prepare the dynamics
ConfigureProblem.initialize(ocp, nlp)
# Test the results
states = np.random.rand(nlp.states.shape, nlp.ns)
controls = np.random.rand(nlp.controls.shape, nlp.ns)
params = np.random.rand(nlp.parameters.shape, nlp.ns)
x_out = np.array(nlp.dynamics_func(states, controls, params))
if with_contact:
contact_out = np.array(nlp.contact_forces_func(states, controls, params))
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.8631034, 0.3251833, 0.1195942, 0.4937956, -7.7700092, -7.5782306, 21.7073786, -16.3059315],
)
np.testing.assert_almost_equal(contact_out[:, 0], [-47.8131136, 111.1726516, -24.4449121])
else:
np.testing.assert_almost_equal(
x_out[:, 0], [0.6118529, 0.785176, 0.6075449, 0.8083973, -0.3214905, -0.1912131, 0.6507164, -0.2359716]
)
np.testing.assert_almost_equal(contact_out[:, 0], [-2.444071, 128.8816865, 2.7245124])
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.86310343, 0.32518332, 0.11959425, 0.4937956, 0.30731739, -9.97912778, 1.15263778, 36.02430956],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.61185289, 0.78517596, 0.60754485, 0.80839735, -0.30241366, -10.38503791, 1.60445173, 35.80238642],
)
@pytest.mark.parametrize("cx", [MX, SX])
@pytest.mark.parametrize("with_external_force", [False, True])
@pytest.mark.parametrize("with_contact", [False, True])
def test_torque_derivative_driven(with_contact, with_external_force, cx):
# Prepare the program
nlp = NonLinearProgram()
nlp.model = biorbd.Model(
TestUtils.bioptim_folder() + "/examples/getting_started/models/2segments_4dof_2contacts.bioMod"
)
nlp.ns = 5
nlp.cx = cx
nlp.x_bounds = np.zeros((nlp.model.nbQ() * 3, 1))
nlp.u_bounds = np.zeros((nlp.model.nbQ(), 1))
ocp = OptimalControlProgram(nlp)
nlp.control_type = ControlType.CONSTANT
NonLinearProgram.add(
ocp, "dynamics_type", Dynamics(DynamicsFcn.TORQUE_DERIVATIVE_DRIVEN, with_contact=with_contact), False
)
np.random.seed(42)
if with_external_force:
external_forces = [np.random.rand(6, nlp.model.nbSegment(), nlp.ns)]
nlp.external_forces = BiorbdInterface.convert_array_to_external_forces(external_forces)[0]
# Prepare the dynamics
ConfigureProblem.initialize(ocp, nlp)
# Test the results
states = np.random.rand(nlp.states.shape, nlp.ns)
controls = np.random.rand(nlp.controls.shape, nlp.ns)
params = np.random.rand(nlp.parameters.shape, nlp.ns)
x_out = np.array(nlp.dynamics_func(states, controls, params))
if with_contact:
contact_out = np.array(nlp.contact_forces_func(states, controls, params))
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.8631034,
0.3251833,
0.1195942,
0.4937956,
-7.7700092,
-7.5782306,
21.7073786,
-16.3059315,
0.8074402,
0.4271078,
0.417411,
0.3232029,
],
)
np.testing.assert_almost_equal(contact_out[:, 0], [-47.8131136, 111.1726516, -24.4449121])
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.61185289,
0.78517596,
0.60754485,
0.80839735,
-0.32149054,
-0.19121314,
0.65071636,
-0.23597164,
0.38867729,
0.54269608,
0.77224477,
0.72900717,
],
)
np.testing.assert_almost_equal(contact_out[:, 0], [-2.444071, 128.8816865, 2.7245124])
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.86310343,
0.32518332,
0.11959425,
0.4937956,
0.30731739,
-9.97912778,
1.15263778,
36.02430956,
0.80744016,
0.42710779,
0.417411,
0.32320293,
],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.61185289,
0.78517596,
0.60754485,
0.80839735,
-0.30241366,
-10.38503791,
1.60445173,
35.80238642,
0.38867729,
0.54269608,
0.77224477,
0.72900717,
],
)
@pytest.mark.parametrize("cx", [MX, SX])
@pytest.mark.parametrize("with_external_force", [False, True])
@pytest.mark.parametrize("with_contact", [False, True])
def test_torque_activation_driven(with_contact, with_external_force, cx):
# Prepare the program
nlp = NonLinearProgram()
nlp.model = biorbd.Model(
TestUtils.bioptim_folder() + "/examples/getting_started/models/2segments_4dof_2contacts.bioMod"
)
nlp.ns = 5
nlp.cx = cx
nlp.x_bounds = np.zeros((nlp.model.nbQ() * 2, 1))
nlp.u_bounds = np.zeros((nlp.model.nbQ(), 1))
ocp = OptimalControlProgram(nlp)
nlp.control_type = ControlType.CONSTANT
NonLinearProgram.add(
ocp, "dynamics_type", Dynamics(DynamicsFcn.TORQUE_ACTIVATIONS_DRIVEN, with_contact=with_contact), False
)
np.random.seed(42)
if with_external_force:
external_forces = [np.random.rand(6, nlp.model.nbSegment(), nlp.ns)]
nlp.external_forces = BiorbdInterface.convert_array_to_external_forces(external_forces)[0]
# Prepare the dynamics
ConfigureProblem.initialize(ocp, nlp)
# Test the results
states = np.random.rand(nlp.states.shape, nlp.ns)
controls = np.random.rand(nlp.controls.shape, nlp.ns)
params = np.random.rand(nlp.parameters.shape, nlp.ns)
x_out = np.array(nlp.dynamics_func(states, controls, params))
if with_contact:
contact_out = np.array(nlp.contact_forces_func(states, controls, params))
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.8631, 0.32518, 0.11959, 0.4938, 19.01887, 18.51503, -53.08574, 58.48719],
decimal=5,
)
np.testing.assert_almost_equal(contact_out[:, 0], [109.8086936, 3790.3932439, -3571.7858574])
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.61185289, 0.78517596, 0.60754485, 0.80839735, 0.78455384, -0.16844256, -1.56184114, 1.97658587],
decimal=5,
)
np.testing.assert_almost_equal(contact_out[:, 0], [-7.88958997, 329.70828173, -263.55516549])
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
8.63103426e-01,
3.25183322e-01,
1.19594246e-01,
4.93795596e-01,
1.73558072e01,
-4.69891264e01,
1.81396922e02,
3.61170139e03,
],
decimal=5,
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
6.11852895e-01,
7.85175961e-01,
6.07544852e-01,
8.08397348e-01,
-2.38262975e01,
-5.82033454e01,
1.27439020e02,
3.66531163e03,
],
decimal=5,
)
@pytest.mark.parametrize("cx", [MX, SX])
@pytest.mark.parametrize("with_external_force", [False, True])
@pytest.mark.parametrize("with_contact", [False, True])
@pytest.mark.parametrize("with_torque", [False, True])
@pytest.mark.parametrize("with_excitations", [False, True])
def test_muscle_driven(with_excitations, with_contact, with_torque, with_external_force, cx):
# Prepare the program
nlp = NonLinearProgram()
nlp.model = biorbd.Model(
TestUtils.bioptim_folder() + "/examples/muscle_driven_ocp/models/arm26_with_contact.bioMod"
)
nlp.ns = 5
nlp.cx = cx
nlp.x_bounds = np.zeros((nlp.model.nbQ() * 2 + nlp.model.nbMuscles(), 1))
nlp.u_bounds = np.zeros((nlp.model.nbMuscles(), 1))
ocp = OptimalControlProgram(nlp)
nlp.control_type = ControlType.CONSTANT
NonLinearProgram.add(
ocp,
"dynamics_type",
Dynamics(
DynamicsFcn.MUSCLE_DRIVEN,
with_torque=with_torque,
with_excitations=with_excitations,
with_contact=with_contact,
),
False,
)
np.random.seed(42)
if with_external_force:
external_forces = [np.random.rand(6, nlp.model.nbSegment(), nlp.ns)]
nlp.external_forces = BiorbdInterface.convert_array_to_external_forces(external_forces)[0]
# Prepare the dynamics
ConfigureProblem.initialize(ocp, nlp)
# Test the results
states = np.random.rand(nlp.states.shape, nlp.ns)
controls = np.random.rand(nlp.controls.shape, nlp.ns)
params = np.random.rand(nlp.parameters.shape, nlp.ns)
x_out = np.array(nlp.dynamics_func(states, controls, params))
if with_contact: # Warning this test is a bit bogus, there since the model does not have contacts
if with_torque:
if with_excitations:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.6158501,
0.50313626,
0.64241928,
1.07179622,
-33.76217857,
36.21815923,
46.87928022,
-1.80189035,
53.3914525,
48.30056919,
63.69373374,
-28.15700995,
],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
1.83404510e-01,
6.11852895e-01,
7.85175961e-01,
-9.29662878e00,
3.00872062e02,
-9.50354903e02,
8.60630831e00,
3.19433638e00,
2.97405608e01,
-2.02754226e01,
-2.32467778e01,
-4.19135012e01,
],
decimal=6,
)
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[6.15850098e-01, 5.03136259e-01, 6.42419278e-01, -8.06478367e00, 2.42279101e02, -7.72114103e02],
decimal=6,
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[1.83404510e-01, 6.11852895e-01, 7.85175961e-01, -3.80892207e00, 1.20476051e02, -4.33291346e02],
decimal=6,
)
else:
if with_excitations:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.6158501,
0.50313626,
0.64241928,
0.91952705,
-39.04876174,
45.31837288,
55.65557816,
50.47052688,
0.36025589,
58.92377491,
29.70094194,
-15.13534937,
],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
1.83404510e-01,
6.11852895e-01,
7.85175961e-01,
-9.72712350e00,
3.10866170e02,
-9.82725656e02,
-7.72228930e00,
-1.13759732e01,
9.51906209e01,
4.45077128e00,
-5.20261014e00,
-2.80864106e01,
],
decimal=6,
)
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.6158501, 0.50313626, 0.64241928, 0.91952705, -39.04876174, 45.31837288],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[1.83404510e-01, 6.11852895e-01, 7.85175961e-01, -9.72712350e00, 3.10866170e02, -9.82725656e02],
decimal=6,
)
else:
if with_torque:
if with_excitations:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.6158501,
0.50313626,
0.64241928,
1.07179622,
-33.76217857,
36.21815923,
46.87928022,
-1.80189035,
53.3914525,
48.30056919,
63.69373374,
-28.15700995,
],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
1.83404510e-01,
6.11852895e-01,
7.85175961e-01,
-9.29662878e00,
3.00872062e02,
-9.50354903e02,
8.60630831e00,
3.19433638e00,
2.97405608e01,
-2.02754226e01,
-2.32467778e01,
-4.19135012e01,
],
decimal=6,
)
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[6.15850098e-01, 5.03136259e-01, 6.42419278e-01, -8.06478367e00, 2.42279101e02, -7.72114103e02],
decimal=6,
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[1.83404510e-01, 6.11852895e-01, 7.85175961e-01, -3.80892207e00, 1.20476051e02, -4.33291346e02],
decimal=6,
)
else:
if with_excitations:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[
0.6158501,
0.50313626,
0.64241928,
0.91952705,
-39.04876174,
45.31837288,
55.65557816,
50.47052688,
0.36025589,
58.92377491,
29.70094194,
-15.13534937,
],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[
1.83404510e-01,
6.11852895e-01,
7.85175961e-01,
-9.72712350e00,
3.10866170e02,
-9.82725656e02,
-7.72228930e00,
-1.13759732e01,
9.51906209e01,
4.45077128e00,
-5.20261014e00,
-2.80864106e01,
],
decimal=6,
)
else:
if with_external_force:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.6158501, 0.50313626, 0.64241928, 0.91952705, -39.04876174, 45.31837288],
)
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[1.83404510e-01, 6.11852895e-01, 7.85175961e-01, -9.72712350e00, 3.10866170e02, -9.82725656e02],
decimal=6,
)
@pytest.mark.parametrize("with_contact", [False, True])
def test_custom_dynamics(with_contact):
def custom_dynamic(states, controls, parameters, nlp, with_contact=False) -> tuple:
DynamicsFunctions.apply_parameters(parameters, nlp)
q = DynamicsFunctions.get(nlp.states["q"], states)
qdot = DynamicsFunctions.get(nlp.states["qdot"], states)
tau = DynamicsFunctions.get(nlp.controls["tau"], controls)
dq = DynamicsFunctions.compute_qdot(nlp, q, qdot)
ddq = DynamicsFunctions.forward_dynamics(nlp, q, qdot, tau, with_contact)
return dq, ddq
def configure(ocp, nlp, with_contact=None):
ConfigureProblem.configure_q(nlp, True, False)
ConfigureProblem.configure_qdot(nlp, True, False)
ConfigureProblem.configure_tau(nlp, False, True)
ConfigureProblem.configure_dynamics_function(ocp, nlp, custom_dynamic, with_contact=with_contact)
if with_contact:
ConfigureProblem.configure_contact_function(ocp, nlp, DynamicsFunctions.forces_from_torque_driven)
# Prepare the program
nlp = NonLinearProgram()
nlp.model = biorbd.Model(
TestUtils.bioptim_folder() + "/examples/getting_started/models/2segments_4dof_2contacts.bioMod"
)
nlp.ns = 5
nlp.cx = MX
nlp.x_bounds = np.zeros((nlp.model.nbQ() * 3, 1))
nlp.u_bounds = np.zeros((nlp.model.nbQ(), 1))
ocp = OptimalControlProgram(nlp)
nlp.control_type = ControlType.CONSTANT
NonLinearProgram.add(
ocp, "dynamics_type", Dynamics(configure, dynamic_function=custom_dynamic, with_contact=with_contact), False
)
np.random.seed(42)
# Prepare the dynamics
ConfigureProblem.initialize(ocp, nlp)
# Test the results
states = np.random.rand(nlp.states.shape, nlp.ns)
controls = np.random.rand(nlp.controls.shape, nlp.ns)
params = np.random.rand(nlp.parameters.shape, nlp.ns)
x_out = np.array(nlp.dynamics_func(states, controls, params))
if with_contact:
contact_out = np.array(nlp.contact_forces_func(states, controls, params))
np.testing.assert_almost_equal(
x_out[:, 0], [0.6118529, 0.785176, 0.6075449, 0.8083973, -0.3214905, -0.1912131, 0.6507164, -0.2359716]
)
np.testing.assert_almost_equal(contact_out[:, 0], [-2.444071, 128.8816865, 2.7245124])
else:
np.testing.assert_almost_equal(
x_out[:, 0],
[0.61185289, 0.78517596, 0.60754485, 0.80839735, -0.30241366, -10.38503791, 1.60445173, 35.80238642],
)
| 37.958194
| 120
| 0.491784
| 2,187
| 22,699
| 4.945588
| 0.142204
| 0.030788
| 0.051313
| 0.071838
| 0.834504
| 0.823132
| 0.801683
| 0.801683
| 0.792622
| 0.781065
| 0
| 0.217914
| 0.410282
| 22,699
| 597
| 121
| 38.021776
| 0.590094
| 0.016212
| 0
| 0.704887
| 0
| 0
| 0.025096
| 0.014162
| 0
| 0
| 0
| 0
| 0.069549
| 1
| 0.015038
| false
| 0
| 0.022556
| 0
| 0.041353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6a98f5590ec68008681144a3cad2ee8a6d9f0359
| 55
|
py
|
Python
|
model/__init__.py
|
sun1638650145/CRNN
|
485157e5803b9be861a63ebb04f04fccb16ef5f1
|
[
"Apache-2.0"
] | 11
|
2020-09-18T02:35:48.000Z
|
2022-02-26T21:31:55.000Z
|
model/__init__.py
|
sun1638650145/CRNN
|
485157e5803b9be861a63ebb04f04fccb16ef5f1
|
[
"Apache-2.0"
] | null | null | null |
model/__init__.py
|
sun1638650145/CRNN
|
485157e5803b9be861a63ebb04f04fccb16ef5f1
|
[
"Apache-2.0"
] | null | null | null |
from .crnn import CRNN
from .crnn import CRNN_Attention
| 27.5
| 32
| 0.836364
| 9
| 55
| 5
| 0.444444
| 0.355556
| 0.622222
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 55
| 2
| 32
| 27.5
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6a9b47904f23c1124cf4fbc27654a8fe5f3b7493
| 42
|
py
|
Python
|
resources/__init__.py
|
Boryslavq/UHMI_Chalenge
|
4b7df902c0a0901c727a6fb26347dabca1067494
|
[
"MIT"
] | null | null | null |
resources/__init__.py
|
Boryslavq/UHMI_Chalenge
|
4b7df902c0a0901c727a6fb26347dabca1067494
|
[
"MIT"
] | null | null | null |
resources/__init__.py
|
Boryslavq/UHMI_Chalenge
|
4b7df902c0a0901c727a6fb26347dabca1067494
|
[
"MIT"
] | null | null | null |
from . import rest
from . import helpers
| 14
| 22
| 0.738095
| 6
| 42
| 5.166667
| 0.666667
| 0.645161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 42
| 2
| 23
| 21
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6ab1bd9218aece261b575574072df1d919112085
| 1,108
|
py
|
Python
|
lib/galaxy/web/__init__.py
|
rikeshi/galaxy
|
c536a877e4a9b3d12aa0d00fd4d5e705109a0d0a
|
[
"CC-BY-3.0"
] | 4
|
2015-05-12T20:36:41.000Z
|
2017-06-26T15:34:02.000Z
|
lib/galaxy/web/__init__.py
|
rikeshi/galaxy
|
c536a877e4a9b3d12aa0d00fd4d5e705109a0d0a
|
[
"CC-BY-3.0"
] | 52
|
2015-03-16T14:02:14.000Z
|
2021-12-24T09:50:23.000Z
|
lib/galaxy/web/__init__.py
|
rikeshi/galaxy
|
c536a877e4a9b3d12aa0d00fd4d5e705109a0d0a
|
[
"CC-BY-3.0"
] | 1
|
2016-03-21T12:54:06.000Z
|
2016-03-21T12:54:06.000Z
|
"""
The Galaxy web application framework
"""
from .framework import url_for
from .framework.base import httpexceptions
from .framework.decorators import (
do_not_cache,
error,
expose,
expose_api,
expose_api_anonymous,
expose_api_anonymous_and_sessionless,
expose_api_raw,
expose_api_raw_anonymous,
expose_api_raw_anonymous_and_sessionless,
format_return_as_json,
json,
json_pretty,
legacy_expose_api,
legacy_expose_api_anonymous,
legacy_expose_api_raw,
legacy_expose_api_raw_anonymous,
require_admin,
require_login,
)
__all__ = ('FormBuilder', 'do_not_cache', 'error', 'expose', 'expose_api',
'expose_api_anonymous', 'expose_api_anonymous_and_sessionless',
'expose_api_raw', 'expose_api_raw_anonymous',
'expose_api_raw_anonymous_and_sessionless', 'form',
'format_return_as_json', 'httpexceptions', 'json', 'json_pretty',
'legacy_expose_api', 'legacy_expose_api_anonymous',
'legacy_expose_api_raw', 'legacy_expose_api_raw_anonymous',
'require_admin', 'require_login', 'url_for')
| 30.777778
| 74
| 0.737365
| 135
| 1,108
| 5.459259
| 0.251852
| 0.244233
| 0.162822
| 0.170963
| 0.7327
| 0.7327
| 0.7327
| 0.7327
| 0.7327
| 0.7327
| 0
| 0
| 0.173285
| 1,108
| 35
| 75
| 31.657143
| 0.804585
| 0.032491
| 0
| 0
| 0
| 0
| 0.339286
| 0.18797
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aea2be020c7e8aa245e0f3059dcd2d6daefd1b7
| 2,865
|
py
|
Python
|
advent/model/discriminator.py
|
ChristopheGraveline064/ADVENT
|
fc0ecd099862ed68979b2197423f1bb34df09c74
|
[
"Apache-2.0"
] | 1
|
2021-01-17T06:02:10.000Z
|
2021-01-17T06:02:10.000Z
|
advent/model/discriminator.py
|
ChristopheGraveline064/ADVENT
|
fc0ecd099862ed68979b2197423f1bb34df09c74
|
[
"Apache-2.0"
] | 2
|
2021-01-17T06:21:29.000Z
|
2021-01-17T20:19:50.000Z
|
advent/model/discriminator.py
|
ChristopheGraveline064/ADVENT
|
fc0ecd099862ed68979b2197423f1bb34df09c74
|
[
"Apache-2.0"
] | null | null | null |
from torch import nn
def get_fc_discriminator(num_classes, ndf=64):
return nn.Sequential(
nn.Conv2d(num_classes, ndf, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 8, 1, kernel_size=4, stride=2, padding=1),
)
# def get_fe_discriminator(num_classes, ndf=64): # 256-128-64-32-16
# return nn.Sequential(
# nn.Conv2d(num_classes, ndf * 4, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, ndf * 2, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 2, ndf, kernel_size=2, stride=2, padding=0),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# # nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# # nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf, 1, kernel_size=2, stride=2, padding=0),
# )
# def get_fe_discriminator(num_classes, ndf=64):
# return nn.Sequential(
# nn.Conv2d(num_classes, ndf, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# # nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# # nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, 1, kernel_size=1, stride=1, padding=0),
# )
def get_fe_discriminator(num_classes, ndf=64): # H/8,H/8,(1024 -> 256 -> 128 -> 64 -> 1)
return nn.Sequential(
nn.Conv2d(num_classes, ndf * 4, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 2, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf, 1, kernel_size=1, stride=1, padding=0),
)
| 49.396552
| 90
| 0.624433
| 450
| 2,865
| 3.86
| 0.086667
| 0.092113
| 0.175014
| 0.221071
| 0.967761
| 0.967761
| 0.967761
| 0.938975
| 0.8981
| 0.8981
| 0
| 0.077502
| 0.211867
| 2,865
| 58
| 91
| 49.396552
| 0.691763
| 0.539267
| 0
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.043478
| 0.086957
| 0.217391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a77c9190291f537620b1ae307203f1368f48062
| 853
|
py
|
Python
|
app/decorators.py
|
GinnyGaga/lanbo
|
d0bd200b93643d3ede69b5fcce72cefd5c167e37
|
[
"MIT"
] | null | null | null |
app/decorators.py
|
GinnyGaga/lanbo
|
d0bd200b93643d3ede69b5fcce72cefd5c167e37
|
[
"MIT"
] | null | null | null |
app/decorators.py
|
GinnyGaga/lanbo
|
d0bd200b93643d3ede69b5fcce72cefd5c167e37
|
[
"MIT"
] | null | null | null |
from functools import wraps
from flask import abort
from flask_login import current_user
from .models import Permission
<<<<<<< HEAD
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
=======
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args,**kwargs):
if not current_user.can(permission):
abort(403)
return f(*args,**kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
>>>>>>> 17-app-1
| 25.088235
| 56
| 0.695193
| 104
| 853
| 5.567308
| 0.288462
| 0.124352
| 0.193437
| 0.107081
| 0.80829
| 0.80829
| 0.80829
| 0.80829
| 0.80829
| 0.80829
| 0
| 0.013062
| 0.192263
| 853
| 33
| 57
| 25.848485
| 0.827286
| 0
| 0
| 0.758621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.137931
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0ac87693a78b8ba6514e5ac5aa8d9530546bb44b
| 39,691
|
py
|
Python
|
uiSetup.py
|
smokedpirate/Encryption-hash-generator
|
47bf3f1f6b6b24ca3e9078fefe46b1e6409d59e5
|
[
"Apache-2.0"
] | 4
|
2020-09-24T16:34:03.000Z
|
2020-10-23T09:52:59.000Z
|
uiSetup.py
|
Atharv-Khatri/Password-Encryption-Generator-Timathon-Submission-
|
3a3db2fa9dc27c8f604d0eb0917e8ffa717f4786
|
[
"Apache-2.0"
] | 1
|
2020-08-02T08:46:06.000Z
|
2020-08-02T08:46:06.000Z
|
uiSetup.py
|
Atharv-Khatri/Password-Encryption-Generator-Timathon-Submission-
|
3a3db2fa9dc27c8f604d0eb0917e8ffa717f4786
|
[
"Apache-2.0"
] | 1
|
2020-08-02T08:33:46.000Z
|
2020-08-02T08:33:46.000Z
|
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5 import QtGui, QtCore
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(577, 341)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
MainWindow.setPalette(palette)
MainWindow.setAutoFillBackground(False)
MainWindow.setStyleSheet("background-color: rgb(84, 84, 84);")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.Algorithms = QtWidgets.QComboBox(self.centralwidget)
self.Algorithms.setGeometry(QtCore.QRect(190, 60, 191, 41))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.Algorithms.setPalette(palette)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.Algorithms.setFont(font)
self.Algorithms.setStyleSheet("QComboBox {\n"
" color: #333;\n"
"\n"
" \n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" \n"
" }\n"
"\n"
"\n"
"QComboBox:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QComboBox:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"\n"
"")
self.Algorithms.setObjectName("Algorithms")
self.Algorithms.addItem("")
self.Algorithms.addItem("")
self.Algorithms.addItem("")
self.Algorithms.addItem("")
self.Algorithms.addItem("")
self.Algorithms.addItem("")
self.Generate = QtWidgets.QPushButton(self.centralwidget)
self.Generate.setGeometry(QtCore.QRect(190, 120, 191, 41))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.Generate.setPalette(palette)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.Generate.setFont(font)
self.Generate.setStyleSheet("QPushButton {\n"
" color: #333;\n"
"\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
self.Generate.setObjectName("Generate")
self.UserInput = QtWidgets.QLineEdit(self.centralwidget)
self.UserInput.setGeometry(QtCore.QRect(190, 20, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.UserInput.setPalette(palette)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.UserInput.setFont(font)
self.UserInput.setObjectName("UserInput")
self.Password = QtWidgets.QLineEdit(self.centralwidget)
self.Password.setGeometry(QtCore.QRect(200, 210, 141, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.Password.setPalette(palette)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.Password.setFont(font)
self.Password.setText("")
self.Password.setEchoMode(QtWidgets.QLineEdit.Password)
self.Password.setReadOnly(True)
self.Password.setObjectName("Password")
self.HideShow = QtWidgets.QPushButton(self.centralwidget)
self.HideShow.setGeometry(QtCore.QRect(350, 210, 31, 31))
self.HideShow.setStyleSheet("QPushButton {\n"
" color: #333;\n"
"\n"
" border-radius: 7px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
self.HideShow.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../../Desktop/EYECLOSE.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.HideShow.setIcon(icon)
self.HideShow.setIconSize(QtCore.QSize(30, 30))
self.HideShow.setObjectName("HideShow")
self.Copy = QtWidgets.QPushButton(self.centralwidget)
self.Copy.setGeometry(QtCore.QRect(190, 250, 201, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
gradient = QtGui.QRadialGradient(0.3, -0.4, 1.35, 0.3, -0.4)
gradient.setSpread(QtGui.QGradient.PadSpread)
gradient.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
gradient.setColorAt(0.0, QtGui.QColor(255, 255, 255))
gradient.setColorAt(1.0, QtGui.QColor(136, 136, 136))
brush = QtGui.QBrush(gradient)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(51, 51, 51, 128))
brush.setStyle(QtCore.Qt.NoBrush)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.Copy.setPalette(palette)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.Copy.setFont(font)
self.Copy.setStyleSheet("QPushButton {\n"
" color: #333;\n"
" \n"
" border-radius: 13px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
self.Copy.setObjectName("Copy")
self.hexify = QtWidgets.QCheckBox(self.centralwidget)
self.hexify.setGeometry(QtCore.QRect(250, 180, 81, 21))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 84, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
self.hexify.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.hexify.setFont(font)
self.hexify.setObjectName("hexify")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 577, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.Algorithms.setCurrentText(_translate("MainWindow", "Select encryption algorithm"))
self.Algorithms.setItemText(0, _translate("MainWindow", "Select encryption algorithm"))
self.Algorithms.setItemText(1, _translate("MainWindow", "sha256"))
self.Algorithms.setItemText(2, _translate("MainWindow", "md5"))
self.Algorithms.setItemText(3, _translate("MainWindow", "sha224"))
self.Algorithms.setItemText(4, _translate("MainWindow", "sha1"))
self.Algorithms.setItemText(5, _translate("MainWindow", "sha512"))
self.Generate.setText(_translate("MainWindow", "GENERATE"))
self.Copy.setText(_translate("MainWindow", "COPY TO CLIPBOARD"))
self.hexify.setText(_translate("MainWindow", "Hexify?"))
self.HideShow.setIcon(QtGui.QIcon("Assets//EYECLOSE.png"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 54.222678
| 105
| 0.659192
| 4,719
| 39,691
| 5.539521
| 0.038356
| 0.131785
| 0.078956
| 0.138174
| 0.901725
| 0.89285
| 0.891511
| 0.890211
| 0.884664
| 0.879194
| 0
| 0.06199
| 0.206243
| 39,691
| 731
| 106
| 54.296854
| 0.767751
| 0
| 0
| 0.868785
| 0
| 0.016575
| 0.07503
| 0.000667
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002762
| false
| 0.01105
| 0.004144
| 0
| 0.008287
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0af8f9b563483812450b36d24892bee1c8265e62
| 388
|
py
|
Python
|
terrascript/resource/sematext.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/resource/sematext.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/resource/sematext.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/resource/sematext.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:26:36 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.resource.sematext
#
# instead of
#
# >>> import terrascript.resource.sematext.sematext
#
# This is only available for 'official' and 'partner' providers.
from terrascript.resource.sematext.sematext import *
| 25.866667
| 73
| 0.75
| 49
| 388
| 5.938776
| 0.693878
| 0.261168
| 0.371134
| 0.226804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035608
| 0.131443
| 388
| 14
| 74
| 27.714286
| 0.827893
| 0.796392
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7c12ff613b7b049edec918f0aa7806f03a342762
| 9,197
|
py
|
Python
|
First_course/test5_base.py
|
laetrid/learning
|
b28312c34db2118fb7d5691834b8f7e628117642
|
[
"Apache-2.0"
] | null | null | null |
First_course/test5_base.py
|
laetrid/learning
|
b28312c34db2118fb7d5691834b8f7e628117642
|
[
"Apache-2.0"
] | null | null | null |
First_course/test5_base.py
|
laetrid/learning
|
b28312c34db2118fb7d5691834b8f7e628117642
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
sw1_show_cdp_neighbors = '''
SW1>show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater, P - Phone
Device ID Local Intrfce Holdtme Capability Platform Port ID
R1 Fas 0/11 153 R S I 881 Fas 1
R2 Fas 0/12 123 R S I 881 Fas 1
R3 Fas 0/13 129 R S I 881 Fas 1
R4 Fas 0/14 173 R S I 881 Fas 1
R5 Fas 0/15 144 R S I 881 Fas 1
'''
sw1_show_cdp_neighbors_detail = '''
SW1> show cdp neighbors detail
--------------------------
Device ID: R1
Entry address(es):
IP address: 10.1.1.1
Platform: Cisco 881, Capabilities: Router Switch IGMP
Interface: FastEthernet0/11, Port ID (outgoing port): FastEthernet1
Holdtime: 153 sec
Version :
Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2010 by Cisco Systems, Inc.
Compiled Fri 29-Oct-10 00:02 by prod_rel_team
advertisement version: 2
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
Management address(es):
--------------------------
Device ID: R2
Entry address(es):
IP address: 10.1.1.2
Platform: Cisco 881, Capabilities: Router Switch IGMP
Interface: FastEthernet0/12, Port ID (outgoing port): FastEthernet1
Holdtime: 123 sec
Version :
Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2010 by Cisco Systems, Inc.
Compiled Fri 29-Oct-10 00:02 by prod_rel_team
advertisement version: 2
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
Management address(es):
--------------------------
Device ID: R3
Entry address(es):
IP address: 10.1.1.3
Platform: Cisco 881, Capabilities: Router Switch IGMP
Interface: FastEthernet0/13, Port ID (outgoing port): FastEthernet1
Holdtime: 129 sec
Version :
Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2010 by Cisco Systems, Inc.
Compiled Fri 29-Oct-10 00:02 by prod_rel_team
advertisement version: 2
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
Management address(es):
--------------------------
Device ID: R4
Entry address(es):
IP address: 10.1.1.4
Platform: Cisco 881, Capabilities: Router Switch IGMP
Interface: FastEthernet0/14, Port ID (outgoing port): FastEthernet1
Holdtime: 173 sec
Version :
Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2010 by Cisco Systems, Inc.
Compiled Fri 29-Oct-10 00:02 by prod_rel_team
advertisement version: 2
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
Management address(es):
--------------------------
Device ID: R5
Entry address(es):
IP address: 10.1.1.5
Platform: Cisco 881, Capabilities: Router Switch IGMP
Interface: FastEthernet0/15, Port ID (outgoing port): FastEthernet1
Holdtime: 144 sec
Version :
Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)
Technical Support: http://www.cisco.com/techsupport
Copyright (c) 1986-2010 by Cisco Systems, Inc.
Compiled Fri 29-Oct-10 00:02 by prod_rel_team
advertisement version: 2
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
Management address(es):
'''
r1_show_cdp_neighbors = '''
R1>show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Intrfce Holdtme Capability Platform Port ID
SW1 Fas 1 150 S I WS-C2950- Fas 0/11
'''
r1_show_cdp_neighbors_detail = '''
R1>show cdp neighbors detail
-------------------------
Device ID: SW1
Entry address(es):
IP address: 10.1.1.22
Platform: cisco WS-C2950-24, Capabilities: Switch IGMP
Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/11
Holdtime : 145 sec
Version :
Cisco Internetwork Operating System Software
IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1)
Copyright (c) 1986-2006 by cisco Systems, Inc.
Compiled Fri 28-Jul-06 15:16 by weiliu
advertisement version: 2
Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
'''
r2_show_cdp_neighbors = '''
R2>show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Intrfce Holdtme Capability Platform Port ID
SW1 Fas 1 150 S I WS-C2950- Fas 0/12
'''
r2_show_cdp_neighbors_detail = '''
R2>show cdp neighbors detail
-------------------------
Device ID: SW1
Entry address(es):
IP address: 10.1.1.22
Platform: cisco WS-C2950-24, Capabilities: Switch IGMP
Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/12
Holdtime : 145 sec
Version :
Cisco Internetwork Operating System Software
IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1)
Copyright (c) 1986-2006 by cisco Systems, Inc.
Compiled Fri 28-Jul-06 15:16 by weiliu
advertisement version: 2
Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
'''
r3_show_cdp_neighbors = '''
R3>show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Intrfce Holdtme Capability Platform Port ID
SW1 Fas 1 150 S I WS-C2950- Fas 0/13
'''
r3_show_cdp_neighbors_detail = '''
R3>show cdp neighbors detail
-------------------------
Device ID: SW1
Entry address(es):
IP address: 10.1.1.22
Platform: cisco WS-C2950-24, Capabilities: Switch IGMP
Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/13
Holdtime : 145 sec
Version :
Cisco Internetwork Operating System Software
IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1)
Copyright (c) 1986-2006 by cisco Systems, Inc.
Compiled Fri 28-Jul-06 15:16 by weiliu
advertisement version: 2
Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
'''
r4_show_cdp_neighbors = '''
R4>show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Intrfce Holdtme Capability Platform Port ID
SW1 Fas 1 150 S I WS-C2950- Fas 0/14
'''
r4_show_cdp_neighbors_detail = '''
R4>show cdp neighbors detail
-------------------------
Device ID: SW1
Entry address(es):
IP address: 10.1.1.22
Platform: cisco WS-C2950-24, Capabilities: Switch IGMP
Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/14
Holdtime : 145 sec
Version :
Cisco Internetwork Operating System Software
IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1)
Copyright (c) 1986-2006 by cisco Systems, Inc.
Compiled Fri 28-Jul-06 15:16 by weiliu
advertisement version: 2
Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
'''
r5_show_cdp_neighbors = '''
R5>show cdp neighbors
Capability Codes: R - Router, T - Trans Bridge, B - Source Route Bridge
S - Switch, H - Host, I - IGMP, r - Repeater
Device ID Local Intrfce Holdtme Capability Platform Port ID
SW1 Fas 1 150 S I WS-C2950- Fas 0/15
'''
r5_show_cdp_neighbors_detail = '''
R5>show cdp neighbors detail
-------------------------
Device ID: SW1
Entry address(es):
IP address: 10.1.1.22
Platform: cisco WS-C2950-24, Capabilities: Switch IGMP
Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/15
Holdtime : 145 sec
Version :
Cisco Internetwork Operating System Software
IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1)
Copyright (c) 1986-2006 by cisco Systems, Inc.
Compiled Fri 28-Jul-06 15:16 by weiliu
advertisement version: 2
Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000
VTP Management Domain: ''
Native VLAN: 1
Duplex: full
'''
| 37.084677
| 127
| 0.677938
| 1,287
| 9,197
| 4.804196
| 0.108003
| 0.027171
| 0.062106
| 0.042698
| 0.958273
| 0.932719
| 0.888242
| 0.888242
| 0.866408
| 0.804787
| 0
| 0.126139
| 0.212134
| 9,197
| 247
| 128
| 37.234818
| 0.72716
| 0.002175
| 0
| 0.74359
| 0
| 0.042735
| 0.953248
| 0.074106
| 0
| 0
| 0.007629
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7c7d98835e8aa5d863003dad874d15530ea2ef72
| 7,799
|
py
|
Python
|
myenv/lib/python3.5/site-packages/tests/handlers/logging/logging_tests.py
|
rupeshparab/techscan
|
ce2558602ddad31873d7129f25b1cc61895b9939
|
[
"MIT"
] | 1
|
2019-11-01T11:45:22.000Z
|
2019-11-01T11:45:22.000Z
|
myenv/lib/python3.5/site-packages/tests/handlers/logging/logging_tests.py
|
rupeshparab/techscan
|
ce2558602ddad31873d7129f25b1cc61895b9939
|
[
"MIT"
] | 3
|
2020-02-11T23:03:45.000Z
|
2021-06-10T18:05:11.000Z
|
myenv/lib/python3.5/site-packages/tests/handlers/logging/logging_tests.py
|
rupeshparab/techscan
|
ce2558602ddad31873d7129f25b1cc61895b9939
|
[
"MIT"
] | 1
|
2019-11-01T11:38:54.000Z
|
2019-11-01T11:38:54.000Z
|
import logging
from opbeat.handlers.logging import OpbeatHandler
from opbeat.utils.stacks import iter_stack_frames
from tests.helpers import get_tempstoreclient
from tests.utils.compat import TestCase
class LoggingIntegrationTest(TestCase):
def setUp(self):
self.client = get_tempstoreclient(include_paths=['tests', 'opbeat'])
self.handler = OpbeatHandler(self.client)
self.logger = logging.getLogger(__name__)
self.logger.handlers = []
self.logger.addHandler(self.handler)
def test_logger_basic(self):
self.logger.error('This is a test error')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['logger'], __name__)
self.assertEquals(event['level'], "error")
self.assertEquals(event['message'], 'This is a test error')
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test error')
self.assertEquals(msg['params'], ())
def test_logger_warning(self):
self.logger.warning('This is a test warning')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['logger'], __name__)
self.assertEquals(event['level'], "warning")
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test warning')
self.assertEquals(msg['params'], ())
def test_logger_extra_data(self):
self.logger.info('This is a test info with a url', extra=dict(
data=dict(
url='http://example.com',
),
))
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['extra']['url'], 'http://example.com')
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test info with a url')
self.assertEquals(msg['params'], ())
def test_logger_exc_info(self):
try:
raise ValueError('This is a test ValueError')
except ValueError:
self.logger.info('This is a test info with an exception', exc_info=True)
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
# self.assertEquals(event['message'], 'This is a test info with an exception')
self.assertTrue('stacktrace' in event)
self.assertTrue('exception' in event)
exc = event['exception']
self.assertEquals(exc['type'], 'ValueError')
self.assertEquals(exc['value'], 'This is a test ValueError')
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test info with an exception')
self.assertEquals(msg['params'], ())
def test_message_params(self):
self.logger.info('This is a test of %s', 'args')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
# self.assertEquals(event['message'], 'This is a test of args')
# print event.keys()
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of %s')
self.assertEquals(msg['params'], ('args',))
def test_record_stack(self):
self.logger.info('This is a test of stacks', extra={'stack': True})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertTrue('stacktrace' in event)
frames = event['stacktrace']['frames']
self.assertNotEquals(len(frames), 1)
frame = frames[0]
self.assertEquals(frame['module'], __name__)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of stacks')
self.assertEquals(msg['params'], ())
self.assertEquals(event['culprit'], 'tests.handlers.logging.logging_tests.test_record_stack')
self.assertEquals(event['message'], 'This is a test of stacks')
def test_no_record_stack(self):
self.logger.info('This is a test of no stacks', extra={'stack': False})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event.get('culprit'), None)
self.assertEquals(event['message'], 'This is a test of no stacks')
self.assertFalse('stacktrace' in event)
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of no stacks')
self.assertEquals(msg['params'], ())
def test_explicit_stack(self):
self.logger.info('This is a test of stacks', extra={'stack': iter_stack_frames()})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertTrue('culprit' in event, event)
self.assertEquals(event['culprit'], 'tests.handlers.logging.logging_tests.test_explicit_stack')
self.assertTrue('message' in event, event)
self.assertEquals(event['message'], 'This is a test of stacks')
self.assertFalse('exception' in event)
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test of stacks')
self.assertEquals(msg['params'], ())
self.assertTrue('stacktrace' in event)
def test_extra_culprit(self):
self.logger.info('This is a test of stacks', extra={'culprit': 'foo.bar'})
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['culprit'], 'foo.bar')
def test_logger_exception(self):
try:
raise ValueError('This is a test ValueError')
except ValueError:
self.logger.exception('This is a test with an exception')
self.assertEquals(len(self.client.events), 1)
event = self.client.events.pop(0)
self.assertEquals(event['message'], 'This is a test with an exception')
self.assertTrue('stacktrace' in event)
self.assertTrue('exception' in event)
exc = event['exception']
self.assertEquals(exc['type'], 'ValueError')
self.assertEquals(exc['value'], 'This is a test ValueError')
self.assertTrue('param_message' in event)
msg = event['param_message']
self.assertEquals(msg['message'], 'This is a test with an exception')
self.assertEquals(msg['params'], ())
class LoggingHandlerTest(TestCase):
def test_client_arg(self):
client = get_tempstoreclient(include_paths=['tests'])
handler = OpbeatHandler(client)
self.assertEquals(handler.client, client)
def test_client_kwarg(self):
client = get_tempstoreclient(include_paths=['tests'])
handler = OpbeatHandler(client=client)
self.assertEquals(handler.client, client)
def test_invalid_first_arg_type(self):
self.assertRaises(ValueError, OpbeatHandler, object)
| 43.569832
| 103
| 0.647391
| 952
| 7,799
| 5.216387
| 0.09979
| 0.164317
| 0.042288
| 0.066452
| 0.809505
| 0.793798
| 0.781514
| 0.731575
| 0.700967
| 0.659283
| 0
| 0.003624
| 0.221567
| 7,799
| 178
| 104
| 43.814607
| 0.814363
| 0.020131
| 0
| 0.562092
| 0
| 0
| 0.216418
| 0.014402
| 0
| 0
| 0
| 0
| 0.522876
| 1
| 0.091503
| false
| 0
| 0.03268
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c9109fd0312f441ea7db6be13582d7563d361c0
| 196
|
py
|
Python
|
frappe/patches/v13_0/remove_web_view.py
|
chentaoz/frappe
|
ee3c4943bf6177ad3b410cdb0d802af486751a65
|
[
"MIT"
] | 3,755
|
2015-01-06T07:47:43.000Z
|
2022-03-31T20:54:23.000Z
|
frappe/patches/v13_0/remove_web_view.py
|
chentaoz/frappe
|
ee3c4943bf6177ad3b410cdb0d802af486751a65
|
[
"MIT"
] | 7,369
|
2015-01-01T19:59:41.000Z
|
2022-03-31T23:02:05.000Z
|
frappe/patches/v13_0/remove_web_view.py
|
chentaoz/frappe
|
ee3c4943bf6177ad3b410cdb0d802af486751a65
|
[
"MIT"
] | 2,685
|
2015-01-07T17:51:03.000Z
|
2022-03-31T23:16:24.000Z
|
import frappe
def execute():
frappe.delete_doc_if_exists("DocType", "Web View")
frappe.delete_doc_if_exists("DocType", "Web View Component")
frappe.delete_doc_if_exists("DocType", "CSS Class")
| 32.666667
| 61
| 0.77551
| 29
| 196
| 4.931034
| 0.482759
| 0.251748
| 0.314685
| 0.356643
| 0.727273
| 0.727273
| 0.517483
| 0.517483
| 0
| 0
| 0
| 0
| 0.086735
| 196
| 6
| 62
| 32.666667
| 0.798883
| 0
| 0
| 0
| 0
| 0
| 0.284264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c938029fd9d5d4852f7e0ef36d2f9a92b855733
| 2,962
|
py
|
Python
|
tests/assemblers/test_ensemble.py
|
yarix/m2cgen
|
f1aa01e4c70a6d1a8893e27bfbe3c36fcb1e8546
|
[
"MIT"
] | 1
|
2021-05-28T06:59:21.000Z
|
2021-05-28T06:59:21.000Z
|
tests/assemblers/test_ensemble.py
|
yarix/m2cgen
|
f1aa01e4c70a6d1a8893e27bfbe3c36fcb1e8546
|
[
"MIT"
] | null | null | null |
tests/assemblers/test_ensemble.py
|
yarix/m2cgen
|
f1aa01e4c70a6d1a8893e27bfbe3c36fcb1e8546
|
[
"MIT"
] | null | null | null |
from sklearn import ensemble
from m2cgen import assemblers, ast
from tests import utils
def test_single_condition():
estimator = ensemble.RandomForestRegressor(n_estimators=2, random_state=1)
estimator.fit([[1], [2]], [1, 2])
assembler = assemblers.RandomForestModelAssembler(estimator)
actual = assembler.assemble()
expected = ast.BinNumExpr(
ast.BinNumExpr(
ast.NumVal(1.0),
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(1.5),
ast.CompOpType.LTE),
ast.NumVal(1.0),
ast.NumVal(2.0)),
ast.BinNumOpType.ADD),
ast.NumVal(0.5),
ast.BinNumOpType.MUL)
assert utils.cmp_exprs(actual, expected)
def test_two_conditions():
estimator = ensemble.RandomForestRegressor(n_estimators=2, random_state=13)
estimator.fit([[1], [2], [3]], [1, 2, 3])
assembler = assemblers.RandomForestModelAssembler(estimator)
actual = assembler.assemble()
expected = ast.BinNumExpr(
ast.BinNumExpr(
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(1.5),
ast.CompOpType.LTE),
ast.NumVal(1.0),
ast.NumVal(2.0)),
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(2.5),
ast.CompOpType.LTE),
ast.NumVal(2.0),
ast.NumVal(3.0)),
ast.BinNumOpType.ADD),
ast.NumVal(0.5),
ast.BinNumOpType.MUL)
assert utils.cmp_exprs(actual, expected)
def test_multi_class():
estimator = ensemble.RandomForestClassifier(
n_estimators=2, random_state=13)
estimator.fit([[1], [2], [3]], [1, -1, 1])
assembler = assemblers.RandomForestModelAssembler(estimator)
actual = assembler.assemble()
expected = ast.BinVectorNumExpr(
ast.BinVectorExpr(
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(1.5),
ast.CompOpType.LTE),
ast.VectorVal([
ast.NumVal(0.0),
ast.NumVal(1.0)]),
ast.VectorVal([
ast.NumVal(1.0),
ast.NumVal(0.0)])),
ast.IfExpr(
ast.CompExpr(
ast.FeatureRef(0),
ast.NumVal(2.5),
ast.CompOpType.LTE),
ast.VectorVal([
ast.NumVal(1.0),
ast.NumVal(0.0)]),
ast.VectorVal([
ast.NumVal(0.0),
ast.NumVal(1.0)])),
ast.BinNumOpType.ADD),
ast.NumVal(0.5),
ast.BinNumOpType.MUL)
assert utils.cmp_exprs(actual, expected)
| 29.326733
| 79
| 0.502701
| 296
| 2,962
| 4.97973
| 0.175676
| 0.140434
| 0.081411
| 0.052239
| 0.852103
| 0.843962
| 0.839891
| 0.839891
| 0.772727
| 0.713026
| 0
| 0.041237
| 0.377785
| 2,962
| 100
| 80
| 29.62
| 0.758546
| 0
| 0
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.037037
| false
| 0
| 0.037037
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cba9b9fb8b398f82ae1a8d924fec2ad7e1b9ddf
| 2,437
|
py
|
Python
|
matdgl/layers/partitionpaddinglayer.py
|
huzongxiang/CrystalNetwork
|
a434f76fa4347d42b3c905852ce265cd0bcefca3
|
[
"BSD-2-Clause"
] | 6
|
2022-03-30T13:47:03.000Z
|
2022-03-31T09:27:46.000Z
|
matdgl/layers/partitionpaddinglayer.py
|
huzongxiang/CrystalNetwork
|
a434f76fa4347d42b3c905852ce265cd0bcefca3
|
[
"BSD-2-Clause"
] | null | null | null |
matdgl/layers/partitionpaddinglayer.py
|
huzongxiang/CrystalNetwork
|
a434f76fa4347d42b3c905852ce265cd0bcefca3
|
[
"BSD-2-Clause"
] | 2
|
2022-03-30T20:53:11.000Z
|
2022-03-31T22:20:05.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 13 14:47:13 2021
@author: huzongxiang
"""
import tensorflow as tf
from tensorflow.keras import layers
class PartitionPadding(layers.Layer):
def __init__(self, batch_size, **kwargs):
super().__init__(**kwargs)
self.batch_size = batch_size
def call(self, inputs):
features, graph_indices = inputs
# Obtain subgraphs
features = tf.dynamic_partition(
features, graph_indices, self.batch_size
)
# Pad and stack subgraphs
num_features = [tf.shape(f)[0] for f in features]
max_num = tf.reduce_max(num_features)
features_padded = tf.stack(
[
tf.pad(f, [(0, max_num - n), (0, 0)])
for f, n in zip(features, num_features)
],
axis=0,
)
# Remove empty subgraphs (usually for last batch)
nonempty_examples = tf.where(tf.reduce_sum(features_padded, (1, 2)) != 0)
nonempty_examples = tf.squeeze(nonempty_examples, axis=-1)
features_batch = tf.gather(features_padded, nonempty_examples, axis=0)
return features_batch
def get_config(self):
config = super().get_config()
config.update({"batch": self.batch_size})
return config
class PartitionPaddingPair(layers.Layer):
def __init__(self, batch_size, **kwargs):
super().__init__(**kwargs)
self.batch_size = batch_size
def call(self, inputs):
features, graph_indices = inputs
# Obtain subgraphs
features = tf.dynamic_partition(
features, graph_indices, self.batch_size
)
# Pad and stack subgraphs
num_features = [tf.shape(f)[0] for f in features]
max_num = tf.reduce_max(num_features)
features_padded = tf.stack(
[
tf.pad(f, [(0, max_num - n), (0, 0)])
for f, n in zip(features, num_features)
],
axis=0,
)
# Remove empty subgraphs (usually for last batch)
nonempty_examples = tf.unique(graph_indices)[0]
features_batch = tf.gather(features_padded, nonempty_examples, axis=0)
return features_batch
def get_config(self):
config = super().get_config()
config.update({"batch_size": self.batch_size})
return config
| 27.077778
| 81
| 0.585556
| 287
| 2,437
| 4.74216
| 0.250871
| 0.072741
| 0.076414
| 0.026451
| 0.814107
| 0.77737
| 0.77737
| 0.77737
| 0.77737
| 0.77737
| 0
| 0.017889
| 0.311859
| 2,437
| 90
| 82
| 27.077778
| 0.793679
| 0.105868
| 0
| 0.716981
| 0
| 0
| 0.006928
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113208
| false
| 0
| 0.037736
| 0
| 0.264151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ccbe673fd6019f10368e191ac41278443f5c053
| 9,554
|
py
|
Python
|
python/paddle/fluid/tests/unittests/ir/inference/test_trt_reduce_mean_op.py
|
zmxdream/Paddle
|
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
|
[
"Apache-2.0"
] | 8
|
2016-08-15T07:02:27.000Z
|
2016-08-24T09:34:00.000Z
|
python/paddle/fluid/tests/unittests/ir/inference/test_trt_reduce_mean_op.py
|
zmxdream/Paddle
|
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
|
[
"Apache-2.0"
] | 1
|
2021-11-01T06:28:16.000Z
|
2021-11-01T06:28:16.000Z
|
python/paddle/fluid/tests/unittests/ir/inference/test_trt_reduce_mean_op.py
|
zmxdream/Paddle
|
04f042a5d507ad98f7f2cfc3cbc44b06d7a7f45c
|
[
"Apache-2.0"
] | 5
|
2021-12-10T11:20:06.000Z
|
2022-02-18T05:18:12.000Z
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from inference_pass_test import InferencePassTest
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.core import PassVersionChecker
from paddle.fluid.core import AnalysisConfig
class TRTReduceMeanTest(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 3, -1, -1], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(
data, dim=[2, -1], keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([3, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanTest.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Float32, False, False)
self.fetch_list = [out]
self.dynamic_shape_params = TRTReduceMeanTest.DynamicShapeParam({
'data': [1, 3, 16, 16]
}, {'data': [3, 3, 56, 56]}, {'data': [3, 3, 56, 56]}, False)
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanAllNoBatchTest(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 3, -1, -1], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(data, keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([3, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanAllNoBatchTest.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Float32, False, False)
self.fetch_list = [out]
self.dynamic_shape_params = TRTReduceMeanAllNoBatchTest.DynamicShapeParam(
{
'data': [1, 3, 16, 16]
}, {'data': [3, 3, 56, 56]}, {'data': [3, 3, 56, 56]}, False)
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanTestFP16(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 3, -1, -1], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(
data, dim=[2, -1], keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([3, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanTestFP16.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Half, False, False)
self.fetch_list = [out]
self.dynamic_shape_params = TRTReduceMeanTestFP16.DynamicShapeParam({
'data': [1, 3, 16, 16]
}, {'data': [3, 3, 56, 56]}, {'data': [3, 3, 56, 56]}, False)
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanAllTest(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[-1, 3, 56, 56], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(data, keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([3, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanAllTest.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Float32, False, False)
self.fetch_list = [out]
self.dynamic_shape_params = TRTReduceMeanAllTest.DynamicShapeParam({
'data': [1, 3, 56, 56]
}, {'data': [3, 3, 56, 56]}, {'data': [3, 3, 56, 56]}, False)
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanTestStatic(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[3, 3, 56, 56], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(
data, dim=[2, -1], keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([3, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanTestStatic.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Float32, False, False)
self.fetch_list = [out]
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanStaticAllTest(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[4, 3, 56, 56], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(data, keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([4, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanStaticAllTest.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Float32, False, False)
self.fetch_list = [out]
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanStaticFP16(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[4, 3, 56, 56], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(data, keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([4, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanStaticFP16.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Half, False, False)
self.fetch_list = [out]
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
class TRTReduceMeanFP16Static(InferencePassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[4, 3, 56, 56], dtype="float32")
reduce_mean = fluid.layers.reduce_mean(data, keep_dim=True)
out = fluid.layers.batch_norm(reduce_mean, is_test=True)
self.feeds = {
"data": np.random.random([4, 3, 56, 56]).astype("float32"),
}
self.enable_trt = True
self.trt_parameters = TRTReduceMeanFP16Static.TensorRTParam(
1 << 30, 32, 1, AnalysisConfig.Precision.Half, True, False)
self.fetch_list = [out]
def test_check_output(self):
if core.is_compiled_with_cuda():
use_gpu = True
self.check_output_with_option(use_gpu, flatten=True)
self.assertTrue(
PassVersionChecker.IsCompatible('tensorrt_subgraph_pass'))
if __name__ == "__main__":
unittest.main()
| 40.483051
| 82
| 0.626125
| 1,132
| 9,554
| 5.094523
| 0.130742
| 0.044391
| 0.019074
| 0.014566
| 0.80059
| 0.79192
| 0.79192
| 0.79192
| 0.783076
| 0.783076
| 0
| 0.039537
| 0.25874
| 9,554
| 235
| 83
| 40.655319
| 0.774781
| 0.061022
| 0
| 0.768817
| 0
| 0
| 0.045551
| 0.019649
| 0
| 0
| 0
| 0
| 0.043011
| 1
| 0.086022
| false
| 0.096774
| 0.043011
| 0
| 0.172043
| 0.005376
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
7ccec8a64f1094a2aaa4d1c42f4858ca203734a3
| 122
|
py
|
Python
|
packages/gtmapi/lmsrvcore/api/interfaces/__init__.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | 60
|
2018-09-26T15:46:00.000Z
|
2021-10-10T02:37:14.000Z
|
packages/gtmapi/lmsrvcore/api/interfaces/__init__.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | 1,706
|
2018-09-26T16:11:22.000Z
|
2021-08-20T13:37:59.000Z
|
packages/gtmapi/lmsrvcore/api/interfaces/__init__.py
|
jjwatts/gigantum-client
|
88ce0475fb6880322bdd06d987c494e29064f278
|
[
"MIT"
] | 11
|
2019-03-14T13:23:51.000Z
|
2022-01-25T01:29:16.000Z
|
from lmsrvcore.api.interfaces.user import User
from lmsrvcore.api.interfaces.git import GitCommit, GitRef, GitRepository
| 30.5
| 73
| 0.844262
| 16
| 122
| 6.4375
| 0.625
| 0.252427
| 0.31068
| 0.504854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090164
| 122
| 3
| 74
| 40.666667
| 0.927928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7ce35b3e99fb727d7c2e93f64173851978f39e8c
| 30,955
|
py
|
Python
|
saleor/checkout/tests/test_base_calculations.py
|
nestfiy/saleor
|
6fce3bc5c0ca72ac28db99553e6d2b49249c6dac
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/checkout/tests/test_base_calculations.py
|
nestfiy/saleor
|
6fce3bc5c0ca72ac28db99553e6d2b49249c6dac
|
[
"CC-BY-4.0"
] | 76
|
2021-11-01T04:53:42.000Z
|
2022-03-28T04:51:25.000Z
|
saleor/checkout/tests/test_base_calculations.py
|
nestfiy/saleor
|
6fce3bc5c0ca72ac28db99553e6d2b49249c6dac
|
[
"CC-BY-4.0"
] | null | null | null |
from decimal import Decimal
from prices import Money, TaxedMoney
from ...discount import DiscountValueType, VoucherType
from ...discount.utils import get_product_discount_on_sale
from ..base_calculations import (
base_checkout_total,
base_tax_rate,
calculate_base_line_total_price,
calculate_base_line_unit_price,
)
from ..fetch import fetch_checkout_lines
def test_calculate_base_line_unit_price(checkout_with_single_item):
# given
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert not checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price
def test_calculate_base_line_unit_price_with_custom_price(checkout_with_single_item):
# given
line = checkout_with_single_item.lines.first()
price_override = Decimal("12.22")
line.price_override = price_override
line.save(update_fields=["price_override"])
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert not checkout_line_info.voucher
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
currency = checkout_line_info.channel_listing.currency
expected_price = Money(price_override, currency)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price
def test_calculate_base_line_unit_price_with_variant_on_sale(
checkout_with_single_item, discount_info, category
):
# given
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert not checkout_line_info.voucher
variant = checkout_line_info.variant
# set category on sale
variant.product.category = category
variant.product.save()
checkout_line_info.product = variant.product
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[discount_info]
)
# then
expected_undiscounted_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
product_collections = set(pc.id for pc in checkout_line_info.collections)
_, sale_discount = get_product_discount_on_sale(
product=checkout_line_info.product,
product_collections=product_collections,
discount=discount_info,
channel=checkout_with_single_item.channel,
variant_id=variant.id,
)
expected_price = sale_discount(expected_undiscounted_price)
assert prices_data.undiscounted_price == expected_undiscounted_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price
def test_calculate_base_line_unit_price_with_variant_on_sale_custom_price(
checkout_with_single_item, discount_info, category
):
# given
line = checkout_with_single_item.lines.first()
price_override = Decimal("20.00")
line.price_override = price_override
line.save(update_fields=["price_override"])
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert not checkout_line_info.voucher
variant = checkout_line_info.variant
# set category on sale
variant.product.category = category
variant.product.save()
checkout_line_info.product = variant.product
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[discount_info]
)
# then
currency = checkout_line_info.channel_listing.currency
expected_undiscounted_price = Money(price_override, currency)
product_collections = set(pc.id for pc in checkout_line_info.collections)
_, sale_discount = get_product_discount_on_sale(
product=checkout_line_info.product,
product_collections=product_collections,
discount=discount_info,
channel=checkout_with_single_item.channel,
variant_id=variant.id,
)
expected_price = sale_discount(expected_undiscounted_price)
assert prices_data.undiscounted_price == expected_undiscounted_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price
def test_calculate_base_line_unit_price_with_fixed_voucher(
checkout_with_single_item, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.save()
voucher_amount = Money(Decimal(3), checkout_with_single_item.currency)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount = voucher_amount
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price - voucher_amount
def test_calculate_base_line_unit_price_with_fixed_voucher_custom_prices(
checkout_with_single_item, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
price_override = Decimal("20.00")
checkout_line.price_override = price_override
checkout_line.save(update_fields=["price_override"])
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.save()
voucher_amount = Money(Decimal(3), checkout_with_single_item.currency)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount = voucher_amount
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
currency = checkout_line_info.channel_listing.currency
expected_price = Money(price_override, currency)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price - voucher_amount
def test_calculate_base_line_unit_price_with_percentage_voucher(
checkout_with_single_item, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.discount_value_type = DiscountValueType.PERCENTAGE
voucher.save()
voucher_percent_value = Decimal(10)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount_value = voucher_percent_value
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_voucher_amount = Money(Decimal("1"), checkout_with_single_item.currency)
expected_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price - expected_voucher_amount
def test_calculate_base_line_unit_price_with_percentage_voucher_custom_prices(
checkout_with_single_item, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
price_override = Decimal("20.00")
checkout_line.price_override = price_override
checkout_line.save(update_fields=["price_override"])
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.discount_value_type = DiscountValueType.PERCENTAGE
voucher.save()
voucher_percent_value = Decimal(10)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount_value = voucher_percent_value
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
currency = checkout_line_info.channel_listing.currency
expected_price = Money(price_override, currency)
expected_voucher_amount = Money(
price_override * voucher_percent_value / 100, checkout_with_single_item.currency
)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price - expected_voucher_amount
def test_calculate_base_line_unit_price_with_discounts_apply_once_per_order(
checkout_with_single_item, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.apply_once_per_order = True
voucher.discount_value_type = DiscountValueType.PERCENTAGE
voucher.save()
voucher_percent_value = Decimal(10)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount_value = voucher_percent_value
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
# apply once per order is applied when calculating line total.
assert prices_data.price_with_discounts == expected_price
def test_calculate_base_line_unit_price_with_discounts_once_per_order_custom_prices(
checkout_with_single_item, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
price_override = Decimal("20.00")
checkout_line.price_override = price_override
checkout_line.save(update_fields=["price_override"])
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.apply_once_per_order = True
voucher.discount_value_type = DiscountValueType.PERCENTAGE
voucher.save()
voucher_percent_value = Decimal(10)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount_value = voucher_percent_value
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
currency = checkout_line_info.channel_listing.currency
expected_price = Money(price_override, currency)
assert prices_data.undiscounted_price == expected_price
assert prices_data.price_with_sale == expected_price
# apply once per order is applied when calculating line total.
assert prices_data.price_with_discounts == expected_price
def test_calculate_base_line_unit_price_with_variant_on_sale_and_voucher(
checkout_with_single_item, discount_info, category, voucher, channel_USD
):
# given
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.save()
voucher_amount = Money(Decimal(3), checkout_with_single_item.currency)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount = voucher_amount
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# set category on sale
variant.product.category = category
variant.product.save()
checkout_line_info.product = variant.product
# when
prices_data = calculate_base_line_unit_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[discount_info]
)
# then
expected_undiscounted_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
product_collections = set(pc.id for pc in checkout_line_info.collections)
_, sale_discount = get_product_discount_on_sale(
product=checkout_line_info.product,
product_collections=product_collections,
discount=discount_info,
channel=checkout_with_single_item.channel,
variant_id=variant.id,
)
sale_discount_amount = sale_discount(expected_undiscounted_price)
expected_price = expected_undiscounted_price - sale_discount_amount
assert prices_data.undiscounted_price == expected_undiscounted_price
assert prices_data.price_with_sale == expected_price
assert prices_data.price_with_discounts == expected_price - voucher_amount
def test_calculate_base_line_total_price(checkout_with_single_item):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert not checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_price * quantity
assert prices_data.price_with_sale == expected_price * quantity
assert prices_data.price_with_discounts == expected_price * quantity
def test_calculate_base_line_total_price_with_variant_on_sale(
checkout_with_single_item, discount_info, category
):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert not checkout_line_info.voucher
variant = checkout_line_info.variant
# set category on sale
variant.product.category = category
variant.product.save()
checkout_line_info.product = variant.product
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[discount_info]
)
# then
expected_undiscounted_unit_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
product_collections = set(pc.id for pc in checkout_line_info.collections)
_, sale_discount = get_product_discount_on_sale(
product=checkout_line_info.product,
product_collections=product_collections,
discount=discount_info,
channel=checkout_with_single_item.channel,
variant_id=variant.id,
)
sale_discount_amount = sale_discount(expected_undiscounted_unit_price)
expected_price = expected_undiscounted_unit_price - sale_discount_amount
assert prices_data.undiscounted_price == expected_undiscounted_unit_price * quantity
assert prices_data.price_with_sale == expected_price * quantity
assert prices_data.price_with_discounts == expected_price * quantity
def test_calculate_base_line_total_price_with_fixed_voucher(
checkout_with_single_item, voucher, channel_USD
):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.save()
voucher_amount = Money(Decimal(3), checkout_with_single_item.currency)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount = voucher_amount
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_unit_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_unit_price * quantity
assert prices_data.price_with_sale == expected_unit_price * quantity
assert (
prices_data.price_with_discounts
== (expected_unit_price - voucher_amount) * quantity
)
def test_calculate_base_line_total_price_with_percentage_voucher(
checkout_with_single_item, voucher, channel_USD
):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.discount_value_type = DiscountValueType.PERCENTAGE
voucher.save()
voucher_percent_value = Decimal(10)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount_value = voucher_percent_value
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_voucher_amount = Money(Decimal("1"), checkout_with_single_item.currency)
expected_unit_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_unit_price * quantity
assert prices_data.price_with_sale == expected_unit_price * quantity
assert (
prices_data.price_with_discounts
== (expected_unit_price - expected_voucher_amount) * quantity
)
def test_calculate_base_line_total_price_with_discounts_apply_once_per_order(
checkout_with_single_item, voucher, channel_USD
):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.apply_once_per_order = True
voucher.discount_value_type = DiscountValueType.PERCENTAGE
voucher.save()
voucher_percent_value = Decimal(10)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount_value = voucher_percent_value
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[]
)
# then
expected_voucher_amount = Money(Decimal("1"), checkout_with_single_item.currency)
expected_unit_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
assert prices_data.undiscounted_price == expected_unit_price * quantity
assert prices_data.price_with_sale == expected_unit_price * quantity
# apply once per order is applied when calculating line total.
assert (
prices_data.price_with_discounts
== (expected_unit_price * quantity) - expected_voucher_amount
)
def test_calculate_base_line_total_price_with_variant_on_sale_and_voucher(
checkout_with_single_item, discount_info, category, voucher, channel_USD
):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.save()
voucher_amount = Money(Decimal(3), checkout_with_single_item.currency)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount = voucher_amount
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# set category on sale
variant.product.category = category
variant.product.save()
checkout_line_info.product = variant.product
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[discount_info]
)
# then
expected_undiscounted_unit_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
product_collections = set(pc.id for pc in checkout_line_info.collections)
_, sale_discount = get_product_discount_on_sale(
product=checkout_line_info.product,
product_collections=product_collections,
discount=discount_info,
channel=checkout_with_single_item.channel,
variant_id=variant.id,
)
sale_discount_amount = sale_discount(expected_undiscounted_unit_price)
expected_unit_price = expected_undiscounted_unit_price - sale_discount_amount
assert prices_data.undiscounted_price == expected_undiscounted_unit_price * quantity
assert prices_data.price_with_sale == expected_unit_price * quantity
assert (
prices_data.price_with_discounts
== (expected_unit_price - voucher_amount) * quantity
)
def test_calculate_base_line_total_price_with_variant_on_sale_and_voucher_applied_once(
checkout_with_single_item, discount_info, category, voucher, channel_USD
):
# given
quantity = 3
checkout_line = checkout_with_single_item.lines.first()
checkout_line.quantity = quantity
checkout_line.save()
checkout_line = checkout_with_single_item.lines.first()
voucher.products.add(checkout_line.variant.product)
voucher.type = VoucherType.SPECIFIC_PRODUCT
voucher.apply_once_per_order = True
voucher.save()
voucher_amount = Money(Decimal(3), checkout_with_single_item.currency)
voucher_channel_listing = voucher.channel_listings.get(channel=channel_USD)
voucher_channel_listing.discount = voucher_amount
voucher_channel_listing.save()
checkout_with_single_item.voucher_code = voucher.code
checkout_lines_info, _ = fetch_checkout_lines(checkout_with_single_item)
checkout_line_info = checkout_lines_info[0]
assert checkout_line_info.voucher
variant = checkout_line_info.variant
# set category on sale
variant.product.category = category
variant.product.save()
checkout_line_info.product = variant.product
# when
prices_data = calculate_base_line_total_price(
checkout_line_info, checkout_with_single_item.channel, discounts=[discount_info]
)
# then
expected_undiscounted_unit_price = variant.get_price(
product=checkout_line_info.product,
collections=checkout_line_info.collections,
channel=checkout_with_single_item.channel,
channel_listing=checkout_line_info.channel_listing,
discounts=[],
)
product_collections = set(pc.id for pc in checkout_line_info.collections)
_, sale_discount = get_product_discount_on_sale(
product=checkout_line_info.product,
product_collections=product_collections,
discount=discount_info,
channel=checkout_with_single_item.channel,
variant_id=variant.id,
)
sale_discount_amount = sale_discount(expected_undiscounted_unit_price)
expected_unit_price = expected_undiscounted_unit_price - sale_discount_amount
assert prices_data.undiscounted_price == expected_undiscounted_unit_price * quantity
assert prices_data.price_with_sale == expected_unit_price * quantity
assert (
prices_data.price_with_discounts
== (expected_unit_price * quantity) - voucher_amount
)
def test_base_tax_rate_net_price_zero():
price = TaxedMoney(net=Money(0, "USD"), gross=Money(3, "USD"))
assert base_tax_rate(price) == Decimal("0.0")
def test_base_tax_rate_gross_price_zero():
price = TaxedMoney(net=Money(3, "USD"), gross=Money(0, "USD"))
assert base_tax_rate(price) == Decimal("0.0")
def test_base_checkout_total():
# given
currency = "USD"
taxed_money = TaxedMoney(net=Money(10, currency), gross=Money(10, currency))
subtotal = taxed_money
shipping_price = taxed_money
discount = Money(5, currency)
# when
total = base_checkout_total(subtotal, shipping_price, discount, currency)
expected = subtotal + shipping_price - discount
# then
assert total == expected
def test_base_checkout_total_high_discount():
# given
currency = "USD"
zero_taxed_money = TaxedMoney(net=Money(0, currency), gross=Money(0, currency))
subtotal = TaxedMoney(net=Money(10, currency), gross=Money(12, currency))
shipping_price = zero_taxed_money
discount = Money(20, currency)
# when
total = base_checkout_total(subtotal, shipping_price, discount, currency)
# then
assert total == zero_taxed_money
| 37.475787
| 88
| 0.772767
| 3,791
| 30,955
| 5.839093
| 0.025851
| 0.098121
| 0.093965
| 0.115287
| 0.970184
| 0.958077
| 0.951211
| 0.946287
| 0.944028
| 0.940143
| 0
| 0.003468
| 0.161525
| 30,955
| 825
| 89
| 37.521212
| 0.849393
| 0.020288
| 0
| 0.838449
| 0
| 0
| 0.004032
| 0
| 0
| 0
| 0
| 0
| 0.122779
| 1
| 0.035541
| false
| 0
| 0.009693
| 0
| 0.045234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7cfe95c6759feee2397de3f952b5fd6bdfa39ca2
| 137
|
py
|
Python
|
st3/package_util/compat/typing.py
|
Thom1729/package_util
|
3ddec00d8ab4a52f0f5ce3fe8b09247c1518547f
|
[
"MIT"
] | 18
|
2020-02-20T11:56:43.000Z
|
2021-12-30T19:00:50.000Z
|
st3/package_util/compat/typing.py
|
Thom1729/package_util
|
3ddec00d8ab4a52f0f5ce3fe8b09247c1518547f
|
[
"MIT"
] | 31
|
2020-02-21T13:38:12.000Z
|
2021-12-15T22:18:37.000Z
|
st3/package_util/compat/typing.py
|
Thom1729/package_util
|
3ddec00d8ab4a52f0f5ce3fe8b09247c1518547f
|
[
"MIT"
] | 3
|
2020-02-21T09:31:27.000Z
|
2021-10-01T20:56:16.000Z
|
try:
from typing import * # noqa: F401, F403
except ImportError:
from .typing_stubs import * # type: ignore # noqa: F401, F403
| 27.4
| 66
| 0.671533
| 18
| 137
| 5.055556
| 0.666667
| 0.21978
| 0.263736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 0.233577
| 137
| 4
| 67
| 34.25
| 0.752381
| 0.343066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6b0d16f74ff1faebf0826e751ccbc24a085729d3
| 31,951
|
py
|
Python
|
classes.py
|
jared-jorgenson/mini_game
|
ac73987ac4c32c0e9f521d7bcf8d4d9ee4ded85a
|
[
"MIT"
] | null | null | null |
classes.py
|
jared-jorgenson/mini_game
|
ac73987ac4c32c0e9f521d7bcf8d4d9ee4ded85a
|
[
"MIT"
] | null | null | null |
classes.py
|
jared-jorgenson/mini_game
|
ac73987ac4c32c0e9f521d7bcf8d4d9ee4ded85a
|
[
"MIT"
] | null | null | null |
import pygame
class Player(pygame.sprite.Sprite):
death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'),
pygame.image.load('Images/death3.png'),
pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'),
pygame.image.load('Images/death6.png'),
pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'),
pygame.image.load('Images/death9.png'),
pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'),
pygame.image.load('Images/death12.png'),
pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'),
pygame.image.load('Images/death15.png'),
pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'),
pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'),
pygame.image.load('Images/death20.png')]
p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'),
pygame.image.load('Images/p1left2.png'),
pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'),
pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')]
p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'),
pygame.image.load('Images/p1left2shield.png'),
pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'),
pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')]
p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'),
pygame.image.load('Images/p1right2.png'),
pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'),
pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')]
p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'),
pygame.image.load('Images/p1right2shield.png'),
pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'),
pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')]
p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'),
pygame.image.load('Images/p1front2.png'),
pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'),
pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')]
p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'),
pygame.image.load('Images/p1front2shield.png'),
pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'),
pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')]
p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'),
pygame.image.load('Images/p1back2.png'),
pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'),
pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')]
p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'),
pygame.image.load('Images/p1back2shield.png'),
pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'),
pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')]
p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'),
pygame.image.load('Images/p2left2.png'),
pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'),
pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')]
p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'),
pygame.image.load('Images/p2right2.png'),
pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'),
pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')]
p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'),
pygame.image.load('Images/p2front2.png'),
pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'),
pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')]
p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'),
pygame.image.load('Images/p2back2.png'),
pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'),
pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')]
p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'),
pygame.image.load('Images/p2left2shield.png'),
pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'),
pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')]
p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'),
pygame.image.load('Images/p2right2shield.png'),
pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'),
pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')]
p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'),
pygame.image.load('Images/p2front2shield.png'),
pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'),
pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')]
p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'),
pygame.image.load('Images/p2back2shield.png'),
pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'),
pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')]
# Constructor function
def __init__(self, x, y, number):
super().__init__()
self.image = pygame.Surface([24, 28])
self.image.fill((0,0,0))
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.front = True
self.back = False
self.left = False
self.right = False
self.number = number
self.change_x = 0
self.change_y = 0
self.walkCount = 0
self.walls = None
self.alive = True
self.canmove = True
self.deathCount = 0
self.gotomenu=False
self.speed=3
self.superspeed=False
self.superspeedcount=0
self.shield=False
self.shieldcount=0
self.megabombs=False
self.megabombcount = 0
def changespeed(self, x, y):
self.change_x += x
self.change_y += y
if self.superspeed and self.change_x==0 and self.change_y==0:
self.speed=6
if self.superspeedcount>=150:
self.superspeed = False
self.speed=3
self.superspeedcount=0
def update(self):
if self.canmove:
self.rect.x += self.change_x
if self.change_x <0:
self.left=True
self.right=False
self.front=False
self.back=False
elif self.change_x >0:
self.left=False
self.right=True
self.front=False
self.back=False
block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)
for block in block_hit_list:
if self.change_x > 0:
self.rect.right = block.rect.left
else:
self.rect.left = block.rect.right
self.rect.y += self.change_y
if self.change_y <0:
self.left=False
self.right=False
self.front=False
self.back=True
elif self.change_y >0:
self.left=False
self.right=False
self.front=True
self.back=False
block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)
for block in block_hit_list:
if self.change_y > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom
def draw(self, screen):
if self.number == 1:
screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0))
if self.alive:
if self.front:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1frontshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1front.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.back:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1backshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1back.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.left:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1leftshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1left.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.right:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1rightshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1right.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.number == 2:
screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0))
if self.alive:
if self.front:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2frontshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2front.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.back:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2backshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2back.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.left:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2leftshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2left.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.right:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2rightshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2right.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
if self.alive == False and self.deathCount < 200:
screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y))
self.deathCount += 1
if self.deathCount >= 200:
self.rect.x = 1000
self.gotomenu=True
def reset(self,x,y):
self.gotomenu = False
self.alive = True
self.deathCount = 0
self.rect.x = x
self.rect.y = y
self.canmove = True
self.front = True
self.change_x=0
self.change_y=0
self.superspeed=False
self.speed=3
self.shield=False
self.megabombs=False
self.megabombcount=0
class Wall(pygame.sprite.Sprite):
def __init__(self, x, y, width, height):
super().__init__()
self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class powerup(pygame.sprite.Sprite):
superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'),
pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'),
pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')]
shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'),
pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'),
pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')]
megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'),
pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'),
pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')]
def __init__(self, x, y, number):
super().__init__()
self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.number = number
self.spawntimer=0
self.respawntimer=0
self.exists=True
self.animationcount=0
def draw(self, screen):
if self.number==1:
if self.exists and self.spawntimer>50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
elif self.number==2:
if self.exists and self.spawntimer > 50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
else:
if self.exists and self.spawntimer > 50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
def reset(self):
self.spawntimer=0
self.respawntimer=0
self.exists=True
class bomb(pygame.sprite.Sprite):
def __init__(self, x, y, width, height, bomb_count, bomb_type):
super().__init__()
self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.width = width
self.height = height
self.bomb_count = bomb_count
self.bomb_type = bomb_type
self.walls = None
self.leftcheck = self.rect.x - 32
self.rightcheck = self.rect.x + self.width
self.upcheck = self.rect.y - 32
self.downcheck = self.rect.y + self.height
self.expleft = True
self.doubleexpleft = True
self.expright = True
self.doubleexpright = True
self.expup = True
self.doubleexpup = True
self.expdown = True
self.doubleexpdown = True
self.expboxlist = []
def draw(self, screen):
if self.bomb_count < 30:
if self.bomb_type==0:
screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 60:
if self.bomb_type == 0:
screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 90:
if self.bomb_type == 0:
screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 120:
if self.bomb_type==0:
for i in self.walls:
if i.rect.collidepoint(self.leftcheck,self.rect.y):
self.expleft = False
if i.rect.collidepoint(self.rightcheck,self.rect.y):
self.expright = False
if i.rect.collidepoint(self.rect.x,self.upcheck):
self.expup = False
if i.rect.collidepoint(self.rect.x,self.downcheck):
self.expdown = False
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))
if self.expleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32))
if self.expright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32))
if self.expup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32))
if self.expdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32))
elif self.bomb_type==1:
for i in self.walls:
if i.rect.collidepoint(self.leftcheck, self.rect.y):
self.expleft = False
if i.rect.collidepoint(self.leftcheck-32, self.rect.y):
self.doubleexpleft = False
if i.rect.collidepoint(self.rightcheck, self.rect.y):
self.expright = False
if i.rect.collidepoint(self.rightcheck+32, self.rect.y):
self.doubleexpright = False
if i.rect.collidepoint(self.rect.x, self.upcheck):
self.expup = False
if i.rect.collidepoint(self.rect.x, self.upcheck-32):
self.doubleexpup = False
if i.rect.collidepoint(self.rect.x, self.downcheck):
self.expdown = False
if i.rect.collidepoint(self.rect.x, self.downcheck+32):
self.doubleexpdown = False
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))
if self.expleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32))
if self.doubleexpleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32))
if self.expright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32))
if self.doubleexpright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32))
if self.expup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32))
if self.doubleexpup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32))
if self.expdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32))
if self.doubleexpdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+16, 32, 32))
| 59.833333
| 121
| 0.52111
| 3,460
| 31,951
| 4.784971
| 0.067919
| 0.132882
| 0.181203
| 0.239732
| 0.864279
| 0.828038
| 0.75459
| 0.74396
| 0.737376
| 0.728437
| 0
| 0.034365
| 0.351538
| 31,951
| 534
| 122
| 59.833333
| 0.764709
| 0.000626
| 0
| 0.574181
| 0
| 0
| 0.131159
| 0.060165
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021195
| false
| 0
| 0.001927
| 0
| 0.069364
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6b221317ab066084e6a6681c2759fb8660e93351
| 11,665
|
py
|
Python
|
openGaussBase/testcase/SQL/DCL/Alter_Default_Privileges/Opengauss_Function_Alter_Default_Privileges_Case0016.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/SQL/DCL/Alter_Default_Privileges/Opengauss_Function_Alter_Default_Privileges_Case0016.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/SQL/DCL/Alter_Default_Privileges/Opengauss_Function_Alter_Default_Privileges_Case0016.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 功能测试
Case Name : 初始用户和sysadmin自己alter自己权限
Description :
1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变
1.1.初始用户alter自己的权限
1.2.清理环境 期望:清理成功
2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变
2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功
2.2.default016_016用户连接 执行alter测试
2.3.清理 期望:清理成功
备注:以上alter测试包括对表(包含视图),类型,函数的权限测试
Expect :
1.初始用户alter自己的权限:alter不报错,但不生效,查询权限不变
1.1.初始用户alter自己的权限
1.2.清理环境 期望:清理成功
2.sysadmin用户alter自己的权限:alter不报错,但不生效,查询权限不变
2.1.管理员用户连接创建sysadmin用户 default016_01 期望:创建成功
2.2.default016_016用户连接 执行alter测试
2.3.清理 期望:清理成功
备注:以上alter测试包括对表(包含视图),类型,函数的权限测试
History :
"""
import sys
import unittest
from yat.test import macro
from yat.test import Node
sys.path.append(sys.path[0] + "/../")
from testcase.utils.Logger import Logger
from testcase.utils.Constant import Constant
from testcase.utils.CommonSH import CommonSH
logger = Logger()
commonsh = CommonSH('dbuser')
class Privategrant(unittest.TestCase):
def setUp(self):
logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016开始执行--------')
self.userNode = Node('dbuser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.Constant = Constant()
# 初始用户用户名
self.username = self.userNode.ssh_user
# 初始用户密码
self.password = macro.GAUSSDB_INIT_USER_PASSWD
def test_common_user_permission(self):
logger.info('--------1.初始用户alter自己的权限--------')
logger.info('--------1.1.初始用户alter自己的权限--------')
sql_cmd = (f'''
drop schema if exists schema_016 cascade;
create schema schema_016;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on tables to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on functions to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} GRANT EXECUTE on functions to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 GRANT ALL PRIVILEGES on TYPES to {self.username} WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role {self.username} GRANT USAGE on TYPES to {self.username} WITH GRANT OPTION ;
drop schema if exists schema_016 cascade;
create schema schema_016;
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id int unique);
select * from test_alter_default_016;
drop function if exists test_default_016(int) cascade;
create or replace function test_default_016(a int) return int
as
b int:= a;
begin
for i in 1..a loop
b:=b+1;
end loop;
return b;
end;
select test_default_016(16);
drop type if exists type016;
CREATE TYPE type016 AS (c_int int,c_text text);
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id type016);
select * from test_alter_default_016;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on tables from {self.username} CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role {self.username} revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from {self.username} CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on functions from {self.username} CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role {self.username} revoke EXECUTE on functions from {self.username} CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role {self.username} in schema schema_016 revoke ALL on TYPES from {self.username} CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role {self.username} revoke USAGE on TYPES from {self.username} CASCADE CONSTRAINTS;
''')
excute_cmd = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)
self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg)
logger.info('--------1.2.清理环境--------')
sql_cmd = ('''
drop table if exists test_alter_default_016 cascade;
drop type if exists type016 cascade;
drop function if exists test_default_016(int) cascade;
drop schema if exists schema_016 cascade;
''')
excute_cmd = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U {self.username} -W {self.password} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)
logger.info('--------2.sysadmin用户alter自己的权限--------')
logger.info('--------2.1.管理员用户连接创建sysadmin用户 default016_01 --------')
sql_cmd = commonsh.execut_db_sql(f'''
drop owned by default016_01 cascade;
drop user if exists default016_01;
create user default016_01 password '{macro.COMMON_PASSWD}';
grant all privileges to default016_01;
''')
logger.info(sql_cmd)
self.assertIn(self.Constant.CREATE_ROLE_SUCCESS_MSG, sql_cmd)
logger.info('--------2.2.default016_01用户连接 执行alter测试--------')
sql_cmd = (f'''
drop schema if exists schema_016 cascade;
create schema schema_016;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on tables to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 GRANT select,insert,update,truncate,references,TRIGGER,DELETE on tables to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on functions to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 GRANT EXECUTE on functions to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 GRANT ALL PRIVILEGES on TYPES to default016_01 WITH GRANT OPTION ;
ALTER DEFAULT PRIVILEGES for role default016_01 GRANT USAGE on TYPES to default016_01 WITH GRANT OPTION ;
drop schema if exists schema_016 cascade;
create schema schema_016;
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id int unique);
select * from test_alter_default_016;
drop function if exists test_default_016(int) cascade;
create or replace function test_default_016(a int) return int
as
b int:= a;
begin
for i in 1..a loop
b:=b+1;
end loop;
return b;
end;
select test_default_016(16);
drop type if exists type016;
CREATE TYPE type016 AS (c_int int,c_text text);
drop table if exists test_alter_default_016 cascade;
create table test_alter_default_016(id type016);
select * from test_alter_default_016;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on tables from default016_01 CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role default016_01 revoke select,insert,update,truncate,references,TRIGGER,DELETE on tables from default016_01 CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on functions from default016_01 CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role default016_01 revoke EXECUTE on functions from default016_01 CASCADE CONSTRAINTS;
ALTER DEFAULT PRIVILEGES for role default016_01 in schema schema_016 revoke ALL on TYPES from default016_01 CASCADE CONSTRAINTS ;
ALTER DEFAULT PRIVILEGES for role default016_01 revoke USAGE on TYPES from default016_01 CASCADE CONSTRAINTS;
''')
excute_cmd = f'''
source {self.DB_ENV_PATH};
gsql -d {self.userNode.db_name} -p {self.userNode.db_port} -U default016_01 -W {macro.COMMON_PASSWD} -c "{sql_cmd}"
'''
logger.info(excute_cmd)
msg = self.userNode.sh(excute_cmd).result()
logger.info(msg)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], msg)
self.assertIn(self.Constant.ALTER_DEFAULT_PRIVILEGES, msg)
logger.info('--------2.3.清理--------')
sql_cmd = commonsh.execut_db_sql(f'''
drop owned by default016_01 cascade;
drop user if exists default016_01;
''')
logger.info(sql_cmd)
self.assertNotIn(self.Constant.SQL_WRONG_MSG[1], sql_cmd)
def tearDown(self):
logger.info('----------------------------------清理环境----------------------------------')
sql_cmd = commonsh.execut_db_sql('''
drop owned by default016_01 cascade;
drop user if exists default016_01;
''')
logger.info(sql_cmd)
logger.info('--------Opengauss_Function_Alter_Default_Privileges_Case0016执行结束--------')
| 55.547619
| 184
| 0.598028
| 1,361
| 11,665
| 4.981631
| 0.14842
| 0.072566
| 0.090855
| 0.088496
| 0.811799
| 0.79646
| 0.792773
| 0.761799
| 0.749853
| 0.735546
| 0
| 0.05161
| 0.323961
| 11,665
| 210
| 185
| 55.547619
| 0.808141
| 0.044921
| 0
| 0.603896
| 0
| 0.11039
| 0.800669
| 0.118052
| 0
| 0
| 0
| 0
| 0.045455
| 1
| 0.019481
| false
| 0.032468
| 0.045455
| 0
| 0.084416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b2cac513cb8e6260352dc24ccb57b041a317ef9
| 8,858
|
py
|
Python
|
tests/test_networks.py
|
UCY-LINC-LAB/5G-Slicer
|
41e75a6709bc779cb4f3e08484b9ada3911646ed
|
[
"Apache-2.0"
] | null | null | null |
tests/test_networks.py
|
UCY-LINC-LAB/5G-Slicer
|
41e75a6709bc779cb4f3e08484b9ada3911646ed
|
[
"Apache-2.0"
] | null | null | null |
tests/test_networks.py
|
UCY-LINC-LAB/5G-Slicer
|
41e75a6709bc779cb4f3e08484b9ada3911646ed
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from networks.QoS import QoS
from networks.connections.mathematical_connections import FunctionalDegradation
from networks.slicing import SliceConceptualGraph
from utils.location import Location
class TestBaseStationLinear(unittest.TestCase):
def setUp(self):
self.name = "network"
self.wireless_connection_type = "LinearDegradation"
self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.parameters = dict(
best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',
'error_rate': '1.0%'},
worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',
'error_rate': '2.0%'}, radius="5km")
self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)
def test_creation(self):
self.assertEqual(self.network.get_name(), "network")
def test_get_empty_nodes(self):
self.assertEqual(self.network.get_nodes(), {})
def test_add_node(self):
name, lat, lon = 'node', 33, 40
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node(name, lat, lon)
self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node('node', 33, 40)
def test_get_empty_RUs(self):
self.assertEqual(self.network.get_RUs(), {})
def test_set_basetastion(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RU(lat, lon)
def test_constructor(self):
with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):
SliceConceptualGraph('test', {}, {}, {})
SliceConceptualGraph('test', self.midhaul_qos, {}, {})
SliceConceptualGraph('test', {}, self.backhaul_qos, {})
SliceConceptualGraph('test', {}, {}, self.parameters)
def test_get_qos(self):
self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))
def test_set_qos(self):
self.network.set_backhaul(QoS.minimum_qos_dict)
self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))
def test_qos_from_distance(self):
self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))
self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))
def test_get_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('test', 10, 10)
self.assertEqual(self.network.get_node_location('test2'), None)
self.assertEqual(self.network.get_node_location('test'), Location(10, 10))
def test_has_to_pass_through_backhaul(self):
self.network.set_RU(10, 10)
self.network.set_RU(20, 20)
self.network.add_node('source1', 10, 10)
self.network.add_node('destination1', 10, 10)
self.network.add_node('destination2', 20, 20)
def test_set_RUs(self):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
self.assertEqual(self.network.get_RUs(),
{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})
lat, lon = 33, 40
self.network.set_RU(lat, lon)
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
def test_set_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('destination1', 10, 10)
self.network.set_node_location('destination1', 20, 20)
self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 'test', 20)
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 20, 'test')
class TestBaseLog2Degradation(unittest.TestCase):
def setUp(self):
self.name = "network"
self.wireless_connection_type = "Log2Degradation"
self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.parameters = dict(
best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',
'error_rate': '1.0%'},
worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',
'error_rate': '2.0%'}, radius="5km")
self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)
def test_creation(self):
self.assertEqual(self.network.get_name(), "network")
def test_get_empty_nodes(self):
self.assertEqual(self.network.get_nodes(), {})
def test_add_node(self):
name, lat, lon = 'node', 33, 40
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node(name, lat, lon)
self.network.set_RU(33, 40, 0)
self.network.add_node(name, lat, lon)
self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node('node', 33, 40)
def test_get_empty_RUs(self):
self.assertEqual(self.network.get_RUs(), {})
def test_set_basetastion(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RU(lat, lon)
def test_constructor(self):
with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):
SliceConceptualGraph('test', {} ,{}, {})
SliceConceptualGraph('test', self.midhaul_qos, {}, {})
SliceConceptualGraph('test', {}, self.backhaul_qos, {})
SliceConceptualGraph('test', {}, {}, self.parameters)
def test_get_qos(self):
self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))
def test_set_qos(self):
self.network.set_backhaul(QoS.minimum_qos_dict)
self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))
def test_qos_from_distance(self):
self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))
self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))
def test_get_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('test', 10, 10)
self.assertEqual(self.network.get_node_location('test2'), None)
self.assertEqual(self.network.get_node_location('test'), Location(10, 10))
def test_set_RUs(self):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
self.assertEqual(self.network.get_RUs(),
{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
def test_set_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('destination1', 10, 10)
self.network.set_node_location('destination1', 20, 20)
self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 'test', 20)
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 20, 'test')
| 47.368984
| 112
| 0.634229
| 1,064
| 8,858
| 5.102444
| 0.079887
| 0.1317
| 0.090993
| 0.124516
| 0.929821
| 0.926506
| 0.92448
| 0.92448
| 0.921901
| 0.899061
| 0
| 0.039182
| 0.210544
| 8,858
| 186
| 113
| 47.623656
| 0.737166
| 0
| 0
| 0.902597
| 0
| 0
| 0.108715
| 0
| 0
| 0
| 0
| 0
| 0.253247
| 1
| 0.175325
| false
| 0.006494
| 0.032468
| 0
| 0.220779
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b3dd632291d2f985432a2f2e2e3bd67cb5c5d46
| 19,209
|
py
|
Python
|
sdk/python/pulumi_azure/desktopvirtualization/workspace.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/desktopvirtualization/workspace.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/desktopvirtualization/workspace.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['WorkspaceArgs', 'Workspace']
@pulumi.input_type
class WorkspaceArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Workspace resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
:param pulumi.Input[str] description: A description for the Virtual Desktop Workspace.
:param pulumi.Input[str] friendly_name: A friendly name for the Virtual Desktop Workspace.
:param pulumi.Input[str] location: The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if description is not None:
pulumi.set(__self__, "description", description)
if friendly_name is not None:
pulumi.set(__self__, "friendly_name", friendly_name)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for the Virtual Desktop Workspace.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> Optional[pulumi.Input[str]]:
"""
A friendly name for the Virtual Desktop Workspace.
"""
return pulumi.get(self, "friendly_name")
@friendly_name.setter
def friendly_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "friendly_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _WorkspaceState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Workspace resources.
:param pulumi.Input[str] description: A description for the Virtual Desktop Workspace.
:param pulumi.Input[str] friendly_name: A friendly name for the Virtual Desktop Workspace.
:param pulumi.Input[str] location: The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if friendly_name is not None:
pulumi.set(__self__, "friendly_name", friendly_name)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for the Virtual Desktop Workspace.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> Optional[pulumi.Input[str]]:
"""
A friendly name for the Virtual Desktop Workspace.
"""
return pulumi.get(self, "friendly_name")
@friendly_name.setter
def friendly_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "friendly_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class Workspace(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages a Virtual Desktop Workspace.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example", location="West Europe")
workspace = azure.desktopvirtualization.Workspace("workspace",
location=example.location,
resource_group_name=example.name,
friendly_name="FriendlyName",
description="A description of my workspace")
```
## Import
Virtual Desktop Workspaces can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:desktopvirtualization/workspace:Workspace example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/myGroup1/providers/Microsoft.DesktopVirtualization/workspaces/myworkspace
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description for the Virtual Desktop Workspace.
:param pulumi.Input[str] friendly_name: A friendly name for the Virtual Desktop Workspace.
:param pulumi.Input[str] location: The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WorkspaceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Virtual Desktop Workspace.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example", location="West Europe")
workspace = azure.desktopvirtualization.Workspace("workspace",
location=example.location,
resource_group_name=example.name,
friendly_name="FriendlyName",
description="A description of my workspace")
```
## Import
Virtual Desktop Workspaces can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:desktopvirtualization/workspace:Workspace example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/myGroup1/providers/Microsoft.DesktopVirtualization/workspaces/myworkspace
```
:param str resource_name: The name of the resource.
:param WorkspaceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WorkspaceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WorkspaceArgs.__new__(WorkspaceArgs)
__props__.__dict__["description"] = description
__props__.__dict__["friendly_name"] = friendly_name
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
super(Workspace, __self__).__init__(
'azure:desktopvirtualization/workspace:Workspace',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Workspace':
"""
Get an existing Workspace resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description for the Virtual Desktop Workspace.
:param pulumi.Input[str] friendly_name: A friendly name for the Virtual Desktop Workspace.
:param pulumi.Input[str] location: The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _WorkspaceState.__new__(_WorkspaceState)
__props__.__dict__["description"] = description
__props__.__dict__["friendly_name"] = friendly_name
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
return Workspace(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A description for the Virtual Desktop Workspace.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> pulumi.Output[Optional[str]]:
"""
A friendly name for the Virtual Desktop Workspace.
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The location/region where the Virtual Desktop Workspace is located. Changing the location/region forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Virtual Desktop Workspace. Changing the name
forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which to
create the Virtual Desktop Workspace. Changing the resource group name forces
a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
| 42.781737
| 221
| 0.645895
| 2,249
| 19,209
| 5.342374
| 0.0747
| 0.086975
| 0.093217
| 0.076904
| 0.862588
| 0.848273
| 0.834374
| 0.823887
| 0.819642
| 0.812901
| 0
| 0.004706
| 0.258837
| 19,209
| 448
| 222
| 42.877232
| 0.839222
| 0.37269
| 0
| 0.767241
| 1
| 0
| 0.08311
| 0.006279
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159483
| false
| 0.00431
| 0.021552
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
864a8f3a33d90a7cbe60473e931b29c2b862bbbb
| 13,719
|
py
|
Python
|
contact/views.py
|
Dimstella/blockchain-contact-tracing-app-hospitals
|
e0b2bf2b3b8c06e58032faed99900d1c7b7d300d
|
[
"MIT"
] | null | null | null |
contact/views.py
|
Dimstella/blockchain-contact-tracing-app-hospitals
|
e0b2bf2b3b8c06e58032faed99900d1c7b7d300d
|
[
"MIT"
] | null | null | null |
contact/views.py
|
Dimstella/blockchain-contact-tracing-app-hospitals
|
e0b2bf2b3b8c06e58032faed99900d1c7b7d300d
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.template import loader
from django.http import HttpResponse
from django.shortcuts import render, redirect
from .models import Patient
from django.contrib import messages
import pandas as pd
from django.contrib.auth.decorators import login_required
from web3 import Web3
import datetime
import hashlib
import json
def encrypt_string(hash_string):
sha_signature = \
hashlib.sha256(hash_string.encode()).hexdigest()
return sha_signature
@login_required()
def index(request):
context = {}
template = loader.get_template('index.html')
return HttpResponse(template.render(context, request))
@login_required()
def patient_form(request):
dt = pd.read_csv('contact/static/info/countries.txt', sep='\n')
countries = []
df = dt.to_dict()
for k, country in df.items():
for k,v in country.items():
countries.append(v)
return render(request, 'patient_form.html', {'countries': countries})
@login_required()
def add_patients(request):
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
print(contract)
if request.method == 'GET':
precord = Patient()
precord.name = request.GET.get("name")
precord.surname = request.GET.get("surname")
precord.address = request.GET.get("address")
precord.email = request.GET.get("email")
precord.city = request.GET.get("city")
precord.region = request.GET.get("region")
precord.postal = request.GET.get("postal")
precord.country = request.GET.get("country")
precord.phone = request.GET.get("phone")
precord.status = request.GET.get("status")
precord.notes = request.GET.get("notes", None)
precord.created_at = request.GET.get("bdate")
precord.user = request.user
name_surname = precord.name+"_"+precord.surname
precord.hashing = encrypt_string(name_surname)
country = precord.country
postal = precord.postal
status = int(precord.status)
result = contract.functions.addPatient(status, postal, str(precord.user), encrypt_string(name_surname), country).transact({'from':'0xF49D6960fb886B6ACD1Aca017f6306134a795457'})
num = contract.functions.getPatientsCount().call()
print(num)
precord.save()
messages.success(request, 'Record Saved')
return render(request, 'index.html')
else:
return render(request, 'index.html')
@login_required()
def patients_list(request):
patients = Patient.objects.all()
return render(request, 'patient_list.html', {'patients':patients})
@login_required
def delete_patient(request, uid):
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
patient = Patient.objects.get(uid=uid)
patient.status = request.GET.get("status")
patient.notes = request.GET.get("notes", None)
patient.created_at = request.GET.get("bdate")
hashing = patient.hashing
country = patient.country
postal = patient.postal
status = int(patient.status)
result = contract.functions.addPatient(status, postal, 'deleted', str(hashing), country).transact({'from':'0xF49D6960fb886B6ACD1Aca017f6306134a795457'})
patient.delete()
return redirect("http://127.0.0.1:8000/patients-list")
@login_required
def update_patient_status(request, uid):
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
if request.method == 'GET':
patient = Patient.objects.get(uid=uid)
patient.status = request.GET.get("status")
patient.notes = request.GET.get("notes", None)
patient.created_at = request.GET.get("bdate")
hashing = patient.hashing
print(patient)
country = patient.country
postal = patient.postal
status = int(patient.status)
result = contract.functions.addPatient(status, postal, str(patient.user), str(patient.hashing), country).transact({'from':'0xF49D6960fb886B6ACD1Aca017f6306134a795457'})
patient.save()
return redirect("http://127.0.0.1:8000/patients-list")
return redirect("http://127.0.0.1:8000/patients-list")
@login_required
def edit(request, uid):
patient = Patient.objects.get(uid=uid)
return render(request,'edit.html', {'patient':patient})
def users(request):
dt = pd.read_csv('countries.txt', sep='\n')
countries = []
df = dt.to_dict()
for k, country in df.items():
for k,v in country.items():
countries.append(v)
return render(request,'users.html', {'countries':countries})
def search_results(request):
dt = pd.read_csv('countries.txt', sep='\n')
countries = []
df = dt.to_dict()
for k, country in df.items():
for k,v in country.items():
countries.append(v)
ganache_url = 'http://127.0.0.1:7545'
web3 = Web3(Web3.HTTPProvider(ganache_url))
abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"enum Contact_tracing.Statuses","name":"_status","type":"uint8"},{"internalType":"string","name":"_postal","type":"string"},{"internalType":"string","name":"_hospitalName","type":"string"},{"internalType":"string","name":"_hashing","type":"string"},{"internalType":"string","name":"_country","type":"string"}],"name":"addPatient","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getPatientsCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"gettPatient","outputs":[{"components":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"internalType":"struct Contact_tracing.Patient","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"name":"patients","outputs":[{"internalType":"enum Contact_tracing.Statuses","name":"status","type":"uint8"},{"internalType":"string","name":"postal","type":"string"},{"internalType":"string","name":"hospitalName","type":"string"},{"internalType":"string","name":"hashing","type":"string"},{"internalType":"string","name":"country","type":"string"}],"stateMutability":"view","type":"function"}]')
address = "0xa84580e93474b942b48B16CAEeaA1920962CBd90"
contract = web3.eth.contract(address = address, abi = abi)
no_patients = contract.functions.getPatientsCount().call()
lpatient = []
lhash = []
message = 'No infected people'
if request.method == 'GET':
region = request.GET.get("city")
postal = request.GET.get("postal")
country = request.GET.get("country")
counter = 0
for i in range(0, no_patients):
patients = contract.functions.gettPatient(i).call()
lpatient = list(patients)
print(lpatient)
if lpatient[0] == 0 and lpatient[1] == postal and lpatient[4] == country:
if lpatient[3] not in lhash and lpatient[2] != 'daleted':
lhash.append(lpatient[3])
counter = counter + 1
message = 'Infected people in your area are'
return render(request,'infected.html', {'countries':countries, 'infected_people': counter, 'message': message})
| 56.45679
| 1,732
| 0.648954
| 1,427
| 13,719
| 6.179397
| 0.111423
| 0.061238
| 0.119755
| 0.114312
| 0.779088
| 0.742005
| 0.715468
| 0.715468
| 0.709004
| 0.709004
| 0
| 0.033514
| 0.127852
| 13,719
| 242
| 1,733
| 56.690083
| 0.703468
| 0
| 0
| 0.453947
| 0
| 0.026316
| 0.575096
| 0.529757
| 0
| 0
| 0.021817
| 0
| 0
| 1
| 0.065789
| false
| 0
| 0.078947
| 0
| 0.223684
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
864d2029d6faf1f316dacf9ce08c2da5fefb2cbf
| 9,735
|
py
|
Python
|
scripts/policy/sdn_single_vm_multiple_policy_topology.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 5
|
2020-09-29T00:36:57.000Z
|
2022-02-16T06:51:32.000Z
|
scripts/policy/sdn_single_vm_multiple_policy_topology.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 27
|
2019-11-02T02:18:34.000Z
|
2022-02-24T18:49:08.000Z
|
scripts/policy/sdn_single_vm_multiple_policy_topology.py
|
atsgen/tf-test
|
2748fcd81491450c75dadc71849d2a1c11061029
|
[
"Apache-2.0"
] | 20
|
2019-11-28T16:02:25.000Z
|
2022-01-06T05:56:58.000Z
|
'''*******AUTO-GENERATED TOPOLOGY*********'''
from __future__ import print_function
from builtins import range
from builtins import object
from tcutils.util import get_random_name,get_random_cidr
class sdn_single_vm_multiple_policy_config(object):
def __init__(self, domain='default-domain', project='admin', username=None, password=None):
#
# Domain and project defaults: Do not change until support for
# non-default is tested!
self.domain = domain
self.project = project
self.username = username
self.password = password
#
# Define VN's in the project:
self.vnet_list = [get_random_name('vnet0')]
#
# Define network info for each VN:
if self.project == 'vCenter':
# For vcenter, only one subnet per VN is supported
self.vn_nets = {self.vnet_list[0]: [get_random_cidr(af='v4')]}
else:
self.vn_nets = {self.vnet_list[0]: ['10.1.1.0/24', '11.1.1.0/24']}
#
# Define network policies
self.policy_list = list()
for i in range(10):
self.policy_list.append(get_random_name('policy%d'%i))
self.vn_policy = {self.vnet_list[0]: self.policy_list}
#
# Define VM's
# VM distribution on available compute nodes is handled by nova
# scheduler or contrail vm naming scheme
self.vn_of_vm = {get_random_name('vmc0'): self.vnet_list[0]}
#
# Define network policy rules
self.rules = {}
self.rules[self.policy_list[0]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[1]] = [{'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'icmp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[2]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[3]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[4]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[5]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[6]] = [{'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[7]] = [{'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[8]] = [{'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'udp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
self.rules[self.policy_list[9]] = [{'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [0, 0]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [1, 1]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [2, 2]}, {'direction': '>', 'protocol': 'tcp', 'dest_network': self.vnet_list[0], 'source_network': self.vnet_list[0], 'dst_ports': 'any', 'simple_action': 'deny', 'src_ports': [3, 3]}]
# end __init__
if __name__ == '__main__':
print("Currently topology limited to one domain/project..")
print("Based on need, can be extended to cover config for multiple domain/projects")
print()
my_topo = sdn_single_vm_multiple_policy_config(
domain='default-domain', project='admin')
x = my_topo.__dict__
# print "keys only:"
# for key, value in x.iteritems(): print key
# print
# print "keys & values:"
# for key, value in x.iteritems(): print key, "-->", value
import topo_helper
topo_h = topo_helper.topology_helper(my_topo)
#vmc_list= topo_h.get_vmc_list()
policy_vn = topo_h.get_policy_vn()
#
| 120.185185
| 754
| 0.638007
| 1,357
| 9,735
| 4.308032
| 0.101695
| 0.116319
| 0.174478
| 0.186794
| 0.81235
| 0.79781
| 0.787205
| 0.779336
| 0.768731
| 0.768731
| 0
| 0.022816
| 0.131073
| 9,735
| 80
| 755
| 121.6875
| 0.668282
| 0.060709
| 0
| 0
| 1
| 0
| 0.397477
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0.04878
| 0.121951
| 0
| 0.170732
| 0.097561
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
867b8e99cfbed437050f3b3f92884ccf95f2bf33
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_talon/na_talon_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_talon/na_talon_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_talon/na_talon_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Talon_Jng_Aatrox(Ratings):
pass
class NA_Talon_Jng_Ahri(Ratings):
pass
class NA_Talon_Jng_Akali(Ratings):
pass
class NA_Talon_Jng_Alistar(Ratings):
pass
class NA_Talon_Jng_Amumu(Ratings):
pass
class NA_Talon_Jng_Anivia(Ratings):
pass
class NA_Talon_Jng_Annie(Ratings):
pass
class NA_Talon_Jng_Ashe(Ratings):
pass
class NA_Talon_Jng_AurelionSol(Ratings):
pass
class NA_Talon_Jng_Azir(Ratings):
pass
class NA_Talon_Jng_Bard(Ratings):
pass
class NA_Talon_Jng_Blitzcrank(Ratings):
pass
class NA_Talon_Jng_Brand(Ratings):
pass
class NA_Talon_Jng_Braum(Ratings):
pass
class NA_Talon_Jng_Caitlyn(Ratings):
pass
class NA_Talon_Jng_Camille(Ratings):
pass
class NA_Talon_Jng_Cassiopeia(Ratings):
pass
class NA_Talon_Jng_Chogath(Ratings):
pass
class NA_Talon_Jng_Corki(Ratings):
pass
class NA_Talon_Jng_Darius(Ratings):
pass
class NA_Talon_Jng_Diana(Ratings):
pass
class NA_Talon_Jng_Draven(Ratings):
pass
class NA_Talon_Jng_DrMundo(Ratings):
pass
class NA_Talon_Jng_Ekko(Ratings):
pass
class NA_Talon_Jng_Elise(Ratings):
pass
class NA_Talon_Jng_Evelynn(Ratings):
pass
class NA_Talon_Jng_Ezreal(Ratings):
pass
class NA_Talon_Jng_Fiddlesticks(Ratings):
pass
class NA_Talon_Jng_Fiora(Ratings):
pass
class NA_Talon_Jng_Fizz(Ratings):
pass
class NA_Talon_Jng_Galio(Ratings):
pass
class NA_Talon_Jng_Gangplank(Ratings):
pass
class NA_Talon_Jng_Garen(Ratings):
pass
class NA_Talon_Jng_Gnar(Ratings):
pass
class NA_Talon_Jng_Gragas(Ratings):
pass
class NA_Talon_Jng_Graves(Ratings):
pass
class NA_Talon_Jng_Hecarim(Ratings):
pass
class NA_Talon_Jng_Heimerdinger(Ratings):
pass
class NA_Talon_Jng_Illaoi(Ratings):
pass
class NA_Talon_Jng_Irelia(Ratings):
pass
class NA_Talon_Jng_Ivern(Ratings):
pass
class NA_Talon_Jng_Janna(Ratings):
pass
class NA_Talon_Jng_JarvanIV(Ratings):
pass
class NA_Talon_Jng_Jax(Ratings):
pass
class NA_Talon_Jng_Jayce(Ratings):
pass
class NA_Talon_Jng_Jhin(Ratings):
pass
class NA_Talon_Jng_Jinx(Ratings):
pass
class NA_Talon_Jng_Kalista(Ratings):
pass
class NA_Talon_Jng_Karma(Ratings):
pass
class NA_Talon_Jng_Karthus(Ratings):
pass
class NA_Talon_Jng_Kassadin(Ratings):
pass
class NA_Talon_Jng_Katarina(Ratings):
pass
class NA_Talon_Jng_Kayle(Ratings):
pass
class NA_Talon_Jng_Kayn(Ratings):
pass
class NA_Talon_Jng_Kennen(Ratings):
pass
class NA_Talon_Jng_Khazix(Ratings):
pass
class NA_Talon_Jng_Kindred(Ratings):
pass
class NA_Talon_Jng_Kled(Ratings):
pass
class NA_Talon_Jng_KogMaw(Ratings):
pass
class NA_Talon_Jng_Leblanc(Ratings):
pass
class NA_Talon_Jng_LeeSin(Ratings):
pass
class NA_Talon_Jng_Leona(Ratings):
pass
class NA_Talon_Jng_Lissandra(Ratings):
pass
class NA_Talon_Jng_Lucian(Ratings):
pass
class NA_Talon_Jng_Lulu(Ratings):
pass
class NA_Talon_Jng_Lux(Ratings):
pass
class NA_Talon_Jng_Malphite(Ratings):
pass
class NA_Talon_Jng_Malzahar(Ratings):
pass
class NA_Talon_Jng_Maokai(Ratings):
pass
class NA_Talon_Jng_MasterYi(Ratings):
pass
class NA_Talon_Jng_MissFortune(Ratings):
pass
class NA_Talon_Jng_MonkeyKing(Ratings):
pass
class NA_Talon_Jng_Mordekaiser(Ratings):
pass
class NA_Talon_Jng_Morgana(Ratings):
pass
class NA_Talon_Jng_Nami(Ratings):
pass
class NA_Talon_Jng_Nasus(Ratings):
pass
class NA_Talon_Jng_Nautilus(Ratings):
pass
class NA_Talon_Jng_Nidalee(Ratings):
pass
class NA_Talon_Jng_Nocturne(Ratings):
pass
class NA_Talon_Jng_Nunu(Ratings):
pass
class NA_Talon_Jng_Olaf(Ratings):
pass
class NA_Talon_Jng_Orianna(Ratings):
pass
class NA_Talon_Jng_Ornn(Ratings):
pass
class NA_Talon_Jng_Pantheon(Ratings):
pass
class NA_Talon_Jng_Poppy(Ratings):
pass
class NA_Talon_Jng_Quinn(Ratings):
pass
class NA_Talon_Jng_Rakan(Ratings):
pass
class NA_Talon_Jng_Rammus(Ratings):
pass
class NA_Talon_Jng_RekSai(Ratings):
pass
class NA_Talon_Jng_Renekton(Ratings):
pass
class NA_Talon_Jng_Rengar(Ratings):
pass
class NA_Talon_Jng_Riven(Ratings):
pass
class NA_Talon_Jng_Rumble(Ratings):
pass
class NA_Talon_Jng_Ryze(Ratings):
pass
class NA_Talon_Jng_Sejuani(Ratings):
pass
class NA_Talon_Jng_Shaco(Ratings):
pass
class NA_Talon_Jng_Shen(Ratings):
pass
class NA_Talon_Jng_Shyvana(Ratings):
pass
class NA_Talon_Jng_Singed(Ratings):
pass
class NA_Talon_Jng_Sion(Ratings):
pass
class NA_Talon_Jng_Sivir(Ratings):
pass
class NA_Talon_Jng_Skarner(Ratings):
pass
class NA_Talon_Jng_Sona(Ratings):
pass
class NA_Talon_Jng_Soraka(Ratings):
pass
class NA_Talon_Jng_Swain(Ratings):
pass
class NA_Talon_Jng_Syndra(Ratings):
pass
class NA_Talon_Jng_TahmKench(Ratings):
pass
class NA_Talon_Jng_Taliyah(Ratings):
pass
class NA_Talon_Jng_Talon(Ratings):
pass
class NA_Talon_Jng_Taric(Ratings):
pass
class NA_Talon_Jng_Teemo(Ratings):
pass
class NA_Talon_Jng_Thresh(Ratings):
pass
class NA_Talon_Jng_Tristana(Ratings):
pass
class NA_Talon_Jng_Trundle(Ratings):
pass
class NA_Talon_Jng_Tryndamere(Ratings):
pass
class NA_Talon_Jng_TwistedFate(Ratings):
pass
class NA_Talon_Jng_Twitch(Ratings):
pass
class NA_Talon_Jng_Udyr(Ratings):
pass
class NA_Talon_Jng_Urgot(Ratings):
pass
class NA_Talon_Jng_Varus(Ratings):
pass
class NA_Talon_Jng_Vayne(Ratings):
pass
class NA_Talon_Jng_Veigar(Ratings):
pass
class NA_Talon_Jng_Velkoz(Ratings):
pass
class NA_Talon_Jng_Vi(Ratings):
pass
class NA_Talon_Jng_Viktor(Ratings):
pass
class NA_Talon_Jng_Vladimir(Ratings):
pass
class NA_Talon_Jng_Volibear(Ratings):
pass
class NA_Talon_Jng_Warwick(Ratings):
pass
class NA_Talon_Jng_Xayah(Ratings):
pass
class NA_Talon_Jng_Xerath(Ratings):
pass
class NA_Talon_Jng_XinZhao(Ratings):
pass
class NA_Talon_Jng_Yasuo(Ratings):
pass
class NA_Talon_Jng_Yorick(Ratings):
pass
class NA_Talon_Jng_Zac(Ratings):
pass
class NA_Talon_Jng_Zed(Ratings):
pass
class NA_Talon_Jng_Ziggs(Ratings):
pass
class NA_Talon_Jng_Zilean(Ratings):
pass
class NA_Talon_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
86878499d4795f1de654ad30bc93467d3e84cd3c
| 261
|
py
|
Python
|
novice/python-unit-testing/answers/test_rectangle2.py
|
Southampton-RSG/2019-03-13-southampton-swc
|
1f07d82c1bd1f237a19fa7a17bb4765e0364dc88
|
[
"CC-BY-4.0"
] | 1
|
2021-06-20T11:51:37.000Z
|
2021-06-20T11:51:37.000Z
|
novice/python-unit-testing/answers/test_rectangle2.py
|
Southampton-RSG/2019-03-13-southampton-swc
|
1f07d82c1bd1f237a19fa7a17bb4765e0364dc88
|
[
"CC-BY-4.0"
] | 1
|
2019-09-30T21:15:32.000Z
|
2019-09-30T21:15:32.000Z
|
novice/python-unit-testing/answers/test_rectangle2.py
|
Southampton-RSG/2019-03-13-southampton-swc
|
1f07d82c1bd1f237a19fa7a17bb4765e0364dc88
|
[
"CC-BY-4.0"
] | null | null | null |
from rectangle2 import rectangle_area
def test_unit_square():
assert rectangle_area([0, 0, 1, 1]) == 1.0
def test_large_square():
assert rectangle_area([1, 1, 4, 4]) == 9.0
def test_actual_rectangle():
assert rectangle_area([0, 1, 4, 7]) == 24.0
| 23.727273
| 47
| 0.67433
| 44
| 261
| 3.772727
| 0.409091
| 0.313253
| 0.343373
| 0.301205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 0.176245
| 261
| 10
| 48
| 26.1
| 0.67907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.428571
| true
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
869a0bbbdd3ad540b61675d429b25b1caee7f14d
| 38,519
|
py
|
Python
|
tiled-lutnet/training-software/MNIST-CIFAR-SVHN/models/MNIST/scripts/lutnet_init.py
|
awai54st/LUTNet
|
81b044f31d1131bee1a7fae41fc4d2fb102ea73a
|
[
"BSD-2-Clause"
] | 38
|
2019-10-28T10:06:33.000Z
|
2022-02-21T21:38:39.000Z
|
tiled-lutnet/training-software/MNIST-CIFAR-SVHN/models/MNIST/scripts/lutnet_init.py
|
awai54st/LUTNet
|
81b044f31d1131bee1a7fae41fc4d2fb102ea73a
|
[
"BSD-2-Clause"
] | null | null | null |
tiled-lutnet/training-software/MNIST-CIFAR-SVHN/models/MNIST/scripts/lutnet_init.py
|
awai54st/LUTNet
|
81b044f31d1131bee1a7fae41fc4d2fb102ea73a
|
[
"BSD-2-Clause"
] | 13
|
2019-10-28T10:17:48.000Z
|
2021-08-10T21:37:11.000Z
|
import h5py
import numpy as np
np.set_printoptions(threshold=np.nan)
from shutil import copyfile
copyfile("dummy_lutnet.h5", "pretrained_bin.h5") # create pretrained.h5 using datastructure from dummy.h5
bl = h5py.File("baseline_pruned.h5", 'r')
#dummy = h5py.File("dummy.h5", 'r')
pretrained = h5py.File("pretrained_bin.h5", 'r+')
# dense layer 1
bl_w1 = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"]
zero_fill = np.zeros(np.shape(np.array(bl_w1)))
pret_w1 = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"]
pret_w1[...] = np.array(bl_w1)
p_gamma[...] = np.array(bl_gamma)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 2
bl_w1 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 3
bl_w1 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 4
bl_w1 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 5
bl_w1 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# bn 1
bl_beta = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 2
bl_beta = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 3
bl_beta = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 4
bl_beta = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 5
bl_beta = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
pretrained.close()
| 47.731103
| 192
| 0.749708
| 6,429
| 38,519
| 4.030642
| 0.024265
| 0.157913
| 0.162158
| 0.165091
| 0.988307
| 0.987188
| 0.987188
| 0.984255
| 0.982904
| 0.97152
| 0
| 0.064803
| 0.066565
| 38,519
| 806
| 193
| 47.790323
| 0.655903
| 0.013785
| 0
| 0.646889
| 0
| 0
| 0.345126
| 0.044254
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004342
| 0
| 0.004342
| 0.008683
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86a2581b9feb29958228bc644f88e652dbe7a0fa
| 38
|
py
|
Python
|
Ex029 Aula 11-Cores no Terminal.py
|
andersontmachado/ExerciciosPython
|
ebd93eb4127dadedee8b719ccc4bc20fc151d0ad
|
[
"MIT"
] | 1
|
2020-04-30T14:47:15.000Z
|
2020-04-30T14:47:15.000Z
|
Ex029 Aula 11-Cores no Terminal.py
|
andersontmachado/exerciciospython
|
ebd93eb4127dadedee8b719ccc4bc20fc151d0ad
|
[
"MIT"
] | null | null | null |
Ex029 Aula 11-Cores no Terminal.py
|
andersontmachado/exerciciospython
|
ebd93eb4127dadedee8b719ccc4bc20fc151d0ad
|
[
"MIT"
] | null | null | null |
print('\033[7;30mOla mundo\033[m!!!')
| 19
| 37
| 0.631579
| 7
| 38
| 3.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.052632
| 38
| 1
| 38
| 38
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
86a72a80401e7713121d2f9ca2a2d2dc62069b97
| 16,684
|
py
|
Python
|
PaddleCV/tracking/pytracking/features/deep.py
|
weiwei1115/models
|
e2c96c5f64b1dc8f0d5d9aa121300b87150e11e3
|
[
"Apache-2.0"
] | 2
|
2021-05-15T07:35:04.000Z
|
2021-07-15T07:01:13.000Z
|
PaddleCV/tracking/pytracking/features/deep.py
|
weiwei1115/models
|
e2c96c5f64b1dc8f0d5d9aa121300b87150e11e3
|
[
"Apache-2.0"
] | null | null | null |
PaddleCV/tracking/pytracking/features/deep.py
|
weiwei1115/models
|
e2c96c5f64b1dc8f0d5d9aa121300b87150e11e3
|
[
"Apache-2.0"
] | 4
|
2021-08-11T08:25:10.000Z
|
2021-10-16T07:41:59.000Z
|
import os
import numpy as np
from paddle import fluid
from ltr.models.bbreg.atom import atom_resnet50, atom_resnet18
from ltr.models.siamese.siam import siamfc_alexnet
from ltr.models.siam.siam import SiamRPN_AlexNet, SiamMask_ResNet50_sharp, SiamMask_ResNet50_base
from pytracking.admin.environment import env_settings
from pytracking.features.featurebase import MultiFeatureBase
from pytracking.libs import TensorList
from pytracking.libs.paddle_utils import n2p
class ResNet18(MultiFeatureBase):
"""ResNet18 feature.
args:
output_layers: List of layers to output.
net_path: Relative or absolute net path (default should be fine).
use_gpu: Use GPU or CPU.
"""
def __init__(self,
output_layers=('block2', ),
net_path='atom_iou',
use_gpu=True,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.output_layers = list(output_layers)
self.use_gpu = use_gpu
self.net_path = net_path
def initialize(self):
with fluid.dygraph.guard():
if os.path.isabs(self.net_path):
net_path_full = self.net_path
else:
net_path_full = os.path.join(env_settings().network_path,
self.net_path)
self.net = atom_resnet18(
backbone_pretrained=False,
backbone_is_test=True,
iounet_is_test=True)
state_dictsm, _ = fluid.load_dygraph(net_path_full)
self.net.load_dict(state_dictsm)
self.net.train()
self.iou_predictor = self.net.bb_regressor
self.layer_stride = {
'conv0': 2,
'conv1': 2,
'block0': 4,
'block1': 8,
'block2': 16,
'block3': 32,
'classification': 16,
'fc': None
}
self.layer_dim = {
'conv0': 64,
'conv1': 64,
'block0': 64,
'block1': 128,
'block2': 256,
'block3': 512,
'classification': 256,
'fc': None
}
self.iounet_feature_layers = self.net.bb_regressor_layer
if isinstance(self.pool_stride, int) and self.pool_stride == 1:
self.pool_stride = [1] * len(self.output_layers)
self.feature_layers = sorted(
list(set(self.output_layers + self.iounet_feature_layers)))
self.mean = np.reshape([0.485, 0.456, 0.406], [1, -1, 1, 1])
self.std = np.reshape([0.229, 0.224, 0.225], [1, -1, 1, 1])
def free_memory(self):
if hasattr(self, 'net'):
del self.net
if hasattr(self, 'iou_predictor'):
del self.iou_predictor
if hasattr(self, 'iounet_backbone_features'):
del self.iounet_backbone_features
if hasattr(self, 'iounet_features'):
del self.iounet_features
def dim(self):
return TensorList([self.layer_dim[l] for l in self.output_layers])
def stride(self):
return TensorList([
s * self.layer_stride[l]
for l, s in zip(self.output_layers, self.pool_stride)
])
def extract(self, im: np.ndarray, debug_save_name=None):
with fluid.dygraph.guard():
if debug_save_name is not None:
np.savez(debug_save_name, im)
im = im / 255. # don't use im /= 255. since we don't want to alter the input
im -= self.mean
im /= self.std
im = n2p(im)
output_features = self.net.extract_features(im, self.feature_layers)
# Store the raw resnet features which are input to iounet
iounet_backbone_features = TensorList([
output_features[layer] for layer in self.iounet_feature_layers
])
self.iounet_backbone_features = iounet_backbone_features.numpy()
# Store the processed features from iounet, just before pooling
self.iounet_features = TensorList([
f.numpy()
for f in self.iou_predictor.get_iou_feat(
iounet_backbone_features)
])
output = TensorList([
output_features[layer].numpy() for layer in self.output_layers
])
return output
class ResNet50(MultiFeatureBase):
"""ResNet50 feature.
args:
output_layers: List of layers to output.
net_path: Relative or absolute net path (default should be fine).
use_gpu: Use GPU or CPU.
"""
def __init__(self,
output_layers=('block2', ),
net_path='atom_iou',
use_gpu=True,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.output_layers = list(output_layers)
self.use_gpu = use_gpu
self.net_path = net_path
def initialize(self):
with fluid.dygraph.guard():
if os.path.isabs(self.net_path):
net_path_full = self.net_path
else:
net_path_full = os.path.join(env_settings().network_path,
self.net_path)
self.net = atom_resnet50(
backbone_pretrained=False,
backbone_is_test=True,
iounet_is_test=True)
state_dictsm, _ = fluid.load_dygraph(net_path_full)
self.net.load_dict(state_dictsm)
self.net.train()
self.iou_predictor = self.net.bb_regressor
self.layer_stride = {
'conv0': 2,
'conv1': 2,
'block0': 4,
'block1': 8,
'block2': 16,
'block3': 32,
'classification': 16,
'fc': None
}
self.layer_dim = {
'conv0': 64,
'conv1': 64,
'block0': 256,
'block1': 512,
'block2': 1024,
'block3': 2048,
'classification': 256,
'fc': None
}
self.iounet_feature_layers = self.net.bb_regressor_layer
if isinstance(self.pool_stride, int) and self.pool_stride == 1:
self.pool_stride = [1] * len(self.output_layers)
self.feature_layers = sorted(
list(set(self.output_layers + self.iounet_feature_layers)))
self.mean = np.reshape([0.485, 0.456, 0.406], [1, -1, 1, 1])
self.std = np.reshape([0.229, 0.224, 0.225], [1, -1, 1, 1])
def free_memory(self):
if hasattr(self, 'net'):
del self.net
if hasattr(self, 'iou_predictor'):
del self.iou_predictor
if hasattr(self, 'iounet_backbone_features'):
del self.iounet_backbone_features
if hasattr(self, 'iounet_features'):
del self.iounet_features
def dim(self):
return TensorList([self.layer_dim[l] for l in self.output_layers])
def stride(self):
return TensorList([
s * self.layer_stride[l]
for l, s in zip(self.output_layers, self.pool_stride)
])
def extract(self, im: np.ndarray, debug_save_name=None):
with fluid.dygraph.guard():
if debug_save_name is not None:
np.savez(debug_save_name, im)
im = im / 255. # don't use im /= 255. since we don't want to alter the input
im -= self.mean
im /= self.std
im = n2p(im)
output_features = self.net.extract_features(im, self.feature_layers)
# Store the raw resnet features which are input to iounet
iounet_backbone_features = TensorList([
output_features[layer] for layer in self.iounet_feature_layers
])
self.iounet_backbone_features = iounet_backbone_features.numpy()
# Store the processed features from iounet, just before pooling
self.iounet_features = TensorList([
f.numpy()
for f in self.iou_predictor.get_iou_feat(
iounet_backbone_features)
])
output = TensorList([
output_features[layer].numpy() for layer in self.output_layers
])
return output
class SFCAlexnet(MultiFeatureBase):
"""Alexnet feature.
args:
output_layers: List of layers to output.
net_path: Relative or absolute net path (default should be fine).
use_gpu: Use GPU or CPU.
"""
def __init__(self,
output_layers=('conv5', ),
net_path='estimator',
use_gpu=True,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.output_layers = list(output_layers)
self.use_gpu = use_gpu
self.net_path = net_path
def initialize(self):
with fluid.dygraph.guard():
if os.path.isabs(self.net_path):
net_path_full = self.net_path
else:
net_path_full = os.path.join(env_settings().network_path,
self.net_path)
self.net = siamfc_alexnet(
backbone_pretrained=False,
backbone_is_test=True,
estimator_is_test=True)
state_dictsm, _ = fluid.load_dygraph(net_path_full)
self.net.load_dict(state_dictsm)
self.net.train()
self.target_estimator = self.net.target_estimator
self.layer_stride = {'conv5': 8}
self.layer_dim = {'conv5': 256}
self.estimator_feature_layers = self.net.target_estimator_layer
if isinstance(self.pool_stride, int) and self.pool_stride == 1:
self.pool_stride = [1] * len(self.output_layers)
self.feature_layers = sorted(
list(set(self.output_layers + self.estimator_feature_layers)))
self.mean = np.reshape([0., 0., 0.], [1, -1, 1, 1])
self.std = np.reshape([1 / 255., 1 / 255., 1 / 255.], [1, -1, 1, 1])
def free_memory(self):
if hasattr(self, 'net'):
del self.net
if hasattr(self, 'target_estimator'):
del self.target_estimator
if hasattr(self, 'estimator_backbone_features'):
del self.estimator_backbone_features
def dim(self):
return TensorList([self.layer_dim[l] for l in self.output_layers])
def stride(self):
return TensorList([
s * self.layer_stride[l]
for l, s in zip(self.output_layers, self.pool_stride)
])
def extract(self, im: np.ndarray, debug_save_name=None):
with fluid.dygraph.guard():
if debug_save_name is not None:
np.savez(debug_save_name, im)
im = im / 255. # don't use im /= 255. since we don't want to alter the input
im -= self.mean
im /= self.std
im = n2p(im)
output_features = self.net.extract_features(im, self.feature_layers)
# Store the raw backbone features which are input to estimator
estimator_backbone_features = TensorList([
output_features[layer]
for layer in self.estimator_feature_layers
])
self.estimator_backbone_features = estimator_backbone_features.numpy(
)
output = TensorList([
output_features[layer].numpy() for layer in self.output_layers
])
return output
class SRPNAlexNet(MultiFeatureBase):
"""Alexnet feature.
args:
output_layers: List of layers to output.
net_path: Relative or absolute net path (default should be fine).
use_gpu: Use GPU or CPU.
"""
def __init__(self,
net_path='estimator',
use_gpu=True,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.use_gpu = use_gpu
self.net_path = net_path
def initialize(self):
with fluid.dygraph.guard():
if os.path.isabs(self.net_path):
net_path_full = self.net_path
else:
net_path_full = os.path.join(env_settings().network_path, self.net_path)
self.net = SiamRPN_AlexNet(backbone_pretrained=False, is_test=True)
state_dict, _ = fluid.load_dygraph(net_path_full)
self.net.load_dict(state_dict)
self.net.eval()
def free_memory(self):
if hasattr(self, 'net'):
del self.net
def extract(self, im: np.ndarray, debug_save_name=None):
with fluid.dygraph.guard():
if debug_save_name is not None:
np.savez(debug_save_name, im)
im = n2p(im)
output_features = self.net.extract_backbone_features(im)
# Store the raw backbone features which are input to estimator
output = TensorList([layer.numpy() for layer in output_features])
return output
class SMaskResNet50_base(MultiFeatureBase):
"""Resnet50-dilated feature.
args:
output_layers: List of layers to output.
net_path: Relative or absolute net path (default should be fine).
use_gpu: Use GPU or CPU.
"""
def __init__(self,
net_path='estimator',
use_gpu=True,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.use_gpu = use_gpu
self.net_path = net_path
def initialize(self):
with fluid.dygraph.guard():
if os.path.isabs(self.net_path):
net_path_full = self.net_path
else:
net_path_full = os.path.join(env_settings().network_path, self.net_path)
self.net = SiamMask_ResNet50_base(backbone_pretrained=False, is_test=True)
state_dict, _ = fluid.load_dygraph(net_path_full)
self.net.load_dict(state_dict)
self.net.eval()
def free_memory(self):
if hasattr(self, 'net'):
del self.net
def extract(self, im: np.ndarray, debug_save_name=None):
with fluid.dygraph.guard():
if debug_save_name is not None:
np.savez(debug_save_name, im)
im = n2p(im)
output_features = self.net.extract_backbone_features(im)
# Store the raw backbone features which are input to estimator
output = TensorList([layer.numpy() for layer in output_features])
return output
class SMaskResNet50_sharp(MultiFeatureBase):
"""Resnet50-dilated feature.
args:
output_layers: List of layers to output.
net_path: Relative or absolute net path (default should be fine).
use_gpu: Use GPU or CPU.
"""
def __init__(self,
net_path='estimator',
use_gpu=True,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.use_gpu = use_gpu
self.net_path = net_path
def initialize(self):
with fluid.dygraph.guard():
if os.path.isabs(self.net_path):
net_path_full = self.net_path
else:
net_path_full = os.path.join(env_settings().network_path, self.net_path)
self.net = SiamMask_ResNet50_sharp(backbone_pretrained=False, is_test=True)
state_dict, _ = fluid.load_dygraph(net_path_full)
self.net.load_dict(state_dict)
self.net.eval()
def free_memory(self):
if hasattr(self, 'net'):
del self.net
def extract(self, im: np.ndarray, debug_save_name=None):
with fluid.dygraph.guard():
if debug_save_name is not None:
np.savez(debug_save_name, im)
im = n2p(im)
output_features = self.net.extract_backbone_features(im)
# Store the raw backbone features which are input to estimator
output = TensorList([layer.numpy() for layer in output_features])
return output
| 33.705051
| 98
| 0.552805
| 1,934
| 16,684
| 4.539814
| 0.089452
| 0.055011
| 0.033827
| 0.016401
| 0.899544
| 0.890774
| 0.890774
| 0.882574
| 0.880296
| 0.880296
| 0
| 0.022499
| 0.35531
| 16,684
| 494
| 99
| 33.773279
| 0.79379
| 0.100815
| 0
| 0.850144
| 0
| 0
| 0.03096
| 0.00523
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086455
| false
| 0
| 0.028818
| 0.017291
| 0.167147
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86b35f885b38c215bfc2684f695ba3ae9b742e9a
| 9,347
|
py
|
Python
|
pandapower/test/opf/test_costs_pwl.py
|
mathildebadoual/pandapower
|
9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc
|
[
"BSD-3-Clause"
] | 1
|
2020-10-19T06:39:15.000Z
|
2020-10-19T06:39:15.000Z
|
pandapower/test/opf/test_costs_pwl.py
|
miek770/pandapower
|
de004efc1b7432a633792af4f551f7635a02db47
|
[
"BSD-3-Clause"
] | null | null | null |
pandapower/test/opf/test_costs_pwl.py
|
miek770/pandapower
|
de004efc1b7432a633792af4f551f7635a02db47
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
import numpy as np
import pytest
from pandapower.optimal_powerflow import OPFNotConverged
import pandapower as pp
try:
import pplog as logging
except ImportError:
import logging
logger = logging.getLogger(__name__)
logger.setLevel("DEBUG")
def test_cost_piecewise_linear_gen():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "gen", np.array([[-150, -100], [-75, -50], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_gen.p_kw.values / 1.5 < 1e-3
def test_cost_piecewise_linear_eg():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10)
pp.create_ext_grid(net, 0, max_p_kw=0, min_p_kw=-50)
pp.create_gen(net, 1, p_kw=-10, max_p_kw=0, min_p_kw=-50, controllable=True)
# pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "ext_grid", np.array([[-50, -500], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - - net.res_ext_grid.p_kw.values * 10 < 1e-3
# check and assert result
def test_get_costs():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "gen", np.array([[-150, -300], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost == 2 * net.res_gen.p_kw.values
# check and assert result
def test_cost_piecewise_linear_sgen():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array([[-150, -100], [-75, -50], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_sgen.p_kw.values / 1.5 < 1e-3
def test_cost_piecewise_linear_load():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_load(net, 1, p_kw=100, controllable=True, max_p_kw=150, min_p_kw=50, max_q_kvar=0,
min_q_kvar=0)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "load", np.array([[0, 0], [75, 50], [150, 100]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert abs(net.res_cost - net.res_load.p_kw.values / 1.5) < 1e-3
def test_cost_piecewise_linear_sgen_uneven_slopes():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array([[-150, -200], [-75, -50], [0, 0]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_sgen.p_kw.values / 1.5 < 1e-3
def test_cost_piecewise_linear_load_uneven_slopes():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_load(net, 1, p_kw=100, controllable=True, max_p_kw=150, min_p_kw=50, max_q_kvar=0,
min_q_kvar=0)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "load", np.array([[0, 0], [75, 51], [150, 101]]))
# run OPF
with pytest.raises(OPFNotConverged):
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert abs(net.res_cost - net.res_load.p_kw.values / 1.5) < 1e-3
def test_cost_piecewise_linear_sgen_very_unsteady_slopes():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.5
vm_min = 0.5
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-1000, controllable=True, max_p_kw=0, min_p_kw=-1500,
max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array([[-1500, 2],[-750,1 ], [0,2]]))
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# assert net.res_cost - net.res_sgen.p_kw.values / 1.5 < 1e-3
if __name__ == "__main__":
# test_cost_piecewise_linear_sgen_very_unsteady_slopes()
pytest.main(["test_costs_pwl.py", "-s"])
| 37.09127
| 99
| 0.644378
| 1,603
| 9,347
| 3.436057
| 0.091703
| 0.091503
| 0.034858
| 0.040668
| 0.904139
| 0.896514
| 0.876725
| 0.871097
| 0.855301
| 0.855301
| 0
| 0.079405
| 0.230662
| 9,347
| 251
| 100
| 37.239044
| 0.686553
| 0.135552
| 0
| 0.732877
| 0
| 0
| 0.026355
| 0
| 0
| 0
| 0
| 0
| 0.10274
| 1
| 0.054795
| false
| 0
| 0.047945
| 0
| 0.10274
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86c0f5e44bffc70a506881987c3f56e4e3ef7cdd
| 30,797
|
py
|
Python
|
tests/contrib/flask/test_request.py
|
thieman/dd-trace-py
|
1e87c9bdf7769032982349c4ccc0e1c2e6866a16
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/contrib/flask/test_request.py
|
thieman/dd-trace-py
|
1e87c9bdf7769032982349c4ccc0e1c2e6866a16
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/contrib/flask/test_request.py
|
thieman/dd-trace-py
|
1e87c9bdf7769032982349c4ccc0e1c2e6866a16
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2021-02-11T10:20:14.000Z
|
2021-02-11T10:20:14.000Z
|
# -*- coding: utf-8 -*-
from ddtrace.compat import PY2
from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY
from ddtrace.contrib.flask.patch import flask_version
from ddtrace.ext import http
from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID
from flask import abort
from . import BaseFlaskTestCase
from ...utils import assert_span_http_status_code
base_exception_name = 'builtins.Exception'
if PY2:
base_exception_name = 'exceptions.Exception'
class FlaskRequestTestCase(BaseFlaskTestCase):
def test_request(self):
"""
When making a request
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.index',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')
assert_span_http_status_code(req_span, 200)
assert http.QUERY_STRING not in req_span.meta
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')
self.assertEqual(handler_span.resource, '/')
self.assertEqual(req_span.error, 0)
def test_request_query_string_trace(self):
"""Make sure when making a request that we create the expected spans and capture the query string."""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_http_config('flask', dict(trace_query_string=True)):
self.client.get('/?foo=bar&baz=biz')
spans = self.get_spans()
# Request tags
assert spans[0].get_tag(http.QUERY_STRING) == 'foo=bar&baz=biz'
def test_analytics_global_on_integration_default(self):
"""
When making a request
When an integration trace search is not event sample rate is not set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=True)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 1.0,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_on_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=True)):
with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 0.5,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_default(self):
"""
When making a request
When an integration trace search is not set and sample rate is set and globally trace search is disabled
We expect the root span to not include tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=False)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is disabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=False)):
with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 0.5,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_distributed_tracing(self):
"""
When making a request
When distributed tracing headers are present
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
# Default: distributed tracing enabled
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertEqual(span.trace_id, 678910)
self.assertEqual(span.parent_id, 12345)
# Explicitly enable distributed tracing
with self.override_config('flask', dict(distributed_tracing_enabled=True)):
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertEqual(span.trace_id, 678910)
self.assertEqual(span.parent_id, 12345)
# With distributed tracing disabled
with self.override_config('flask', dict(distributed_tracing_enabled=False)):
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertNotEqual(span.trace_id, 678910)
self.assertIsNone(span.parent_id)
def test_request_query_string(self):
"""
When making a request
When the request contains a query string
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
res = self.client.get('/', query_string=dict(hello='flask'))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.index',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
# Note: contains no query string
self.assertEqual(req_span.resource, 'GET /')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')
# Note: contains no query string
self.assertEqual(req_span.get_tag('flask.url_rule'), '/')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
# Note: contains no query string
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')
assert_span_http_status_code(req_span, 200)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')
# Note: contains no query string
self.assertEqual(handler_span.resource, '/')
self.assertEqual(req_span.error, 0)
def test_request_unicode(self):
"""
When making a request
When the url contains unicode
We create the expected spans
"""
@self.app.route(u'/üŋïĉóđē')
def unicode():
return 'üŋïĉóđē', 200
res = self.client.get(u'/üŋïĉóđē')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'\xc3\xbc\xc5\x8b\xc3\xaf\xc4\x89\xc3\xb3\xc4\x91\xc4\x93')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.unicode',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, u'GET /üŋïĉóđē')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'unicode')
self.assertEqual(req_span.get_tag('flask.url_rule'), u'/üŋïĉóđē')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), u'http://localhost/üŋïĉóđē')
assert_span_http_status_code(req_span, 200)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.unicode')
self.assertEqual(handler_span.resource, u'/üŋïĉóđē')
self.assertEqual(req_span.error, 0)
def test_request_404(self):
"""
When making a request
When the requested endpoint was not found
We create the expected spans
"""
res = self.client.get('/not-found')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 9)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET 404')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')
assert_span_http_status_code(req_span, 404)
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
def test_request_abort_404(self):
"""
When making a request
When the requested endpoint calls `abort(404)`
We create the expected spans
"""
@self.app.route('/not-found')
def not_found():
abort(404)
res = self.client.get('/not-found')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.not_found',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /not-found')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')
assert_span_http_status_code(req_span, 404)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'not_found')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/not-found')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.not_found')
self.assertEqual(handler_span.resource, '/not-found')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
def test_request_500(self):
"""
When making a request
When the requested endpoint raises an exception
We create the expected spans
"""
@self.app.route('/500')
def fivehundred():
raise Exception('500 error')
res = self.client.get('/500')
self.assertEqual(res.status_code, 500)
spans = self.get_spans()
self.assertEqual(len(spans), 9)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /500')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')
self.assertEqual(handler_span.resource, '/500')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 1)
self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
def test_request_501(self):
"""
When making a request
When the requested endpoint calls `abort(501)`
We create the expected spans
"""
@self.app.route('/501')
def fivehundredone():
abort(501)
res = self.client.get('/501')
self.assertEqual(res.status_code, 501)
spans = self.get_spans()
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundredone',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /501')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/501')
assert_span_http_status_code(req_span, 501)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundredone')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/501')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('501 Not Implemented'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundredone')
self.assertEqual(handler_span.resource, '/501')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('501 Not Implemented'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 0)
def test_request_error_handler(self):
"""
When making a request
When the requested endpoint raises an exception
We create the expected spans
"""
@self.app.errorhandler(500)
def error_handler(e):
return 'Whoops', 500
@self.app.route('/500')
def fivehundred():
raise Exception('500 error')
res = self.client.get('/500')
self.assertEqual(res.status_code, 500)
self.assertEqual(res.data, b'Whoops')
spans = self.get_spans()
if flask_version >= (0, 12, 0):
self.assertEqual(len(spans), 11)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'tests.contrib.flask.test_request.error_handler',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
else:
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'tests.contrib.flask.test_request.error_handler',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /500')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')
self.assertEqual(handler_span.resource, '/500')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 1)
self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
| 39.892487
| 117
| 0.618437
| 3,558
| 30,797
| 5.149522
| 0.058179
| 0.153913
| 0.071717
| 0.087654
| 0.925718
| 0.908634
| 0.8969
| 0.891169
| 0.877906
| 0.861587
| 0
| 0.016762
| 0.273566
| 30,797
| 771
| 118
| 39.944228
| 0.802208
| 0.103517
| 0
| 0.797794
| 0
| 0.001838
| 0.194158
| 0.11455
| 0
| 0
| 0
| 0
| 0.455882
| 1
| 0.051471
| false
| 0
| 0.014706
| 0.018382
| 0.086397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
86cf3a0a9a0e45685f04435a33dcecfd088782c9
| 12,924
|
py
|
Python
|
melodic/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkProperties.py
|
Dieptranivsr/Ros_Diep
|
d790e75e6f5da916701b11a2fdf3e03b6a47086b
|
[
"MIT"
] | null | null | null |
melodic/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkProperties.py
|
Dieptranivsr/Ros_Diep
|
d790e75e6f5da916701b11a2fdf3e03b6a47086b
|
[
"MIT"
] | 1
|
2021-07-08T10:26:06.000Z
|
2021-07-08T10:31:11.000Z
|
melodic/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkProperties.py
|
Dieptranivsr/Ros_Diep
|
d790e75e6f5da916701b11a2fdf3e03b6a47086b
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from gazebo_msgs/GetLinkPropertiesRequest.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetLinkPropertiesRequest(genpy.Message):
_md5sum = "7d82d60381f1b66a30f2157f60884345"
_type = "gazebo_msgs/GetLinkPropertiesRequest"
_has_header = False # flag to mark the presence of a Header object
_full_text = """string link_name # name of link
# link names are prefixed by model name, e.g. pr2::base_link
"""
__slots__ = ['link_name']
_slot_types = ['string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
link_name
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetLinkPropertiesRequest, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.link_name is None:
self.link_name = ''
else:
self.link_name = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.link_name = str[start:end].decode('utf-8', 'rosmsg')
else:
self.link_name = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.link_name = str[start:end].decode('utf-8', 'rosmsg')
else:
self.link_name = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from gazebo_msgs/GetLinkPropertiesResponse.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geometry_msgs.msg
class GetLinkPropertiesResponse(genpy.Message):
_md5sum = "a8619f92d17cfcc3958c0fd13299443d"
_type = "gazebo_msgs/GetLinkPropertiesResponse"
_has_header = False # flag to mark the presence of a Header object
_full_text = """geometry_msgs/Pose com # center of mass location in link frame
# and orientation of the moment of inertias
# relative to the link frame
bool gravity_mode # set gravity mode on/off
float64 mass # linear mass of link
float64 ixx # moment of inertia
float64 ixy # moment of inertia
float64 ixz # moment of inertia
float64 iyy # moment of inertia
float64 iyz # moment of inertia
float64 izz # moment of inertia
bool success # return true if get info is successful
string status_message # comments if available
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of position and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
"""
__slots__ = ['com','gravity_mode','mass','ixx','ixy','ixz','iyy','iyz','izz','success','status_message']
_slot_types = ['geometry_msgs/Pose','bool','float64','float64','float64','float64','float64','float64','float64','bool','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
com,gravity_mode,mass,ixx,ixy,ixz,iyy,iyz,izz,success,status_message
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetLinkPropertiesResponse, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.com is None:
self.com = geometry_msgs.msg.Pose()
if self.gravity_mode is None:
self.gravity_mode = False
if self.mass is None:
self.mass = 0.
if self.ixx is None:
self.ixx = 0.
if self.ixy is None:
self.ixy = 0.
if self.ixz is None:
self.ixz = 0.
if self.iyy is None:
self.iyy = 0.
if self.iyz is None:
self.iyz = 0.
if self.izz is None:
self.izz = 0.
if self.success is None:
self.success = False
if self.status_message is None:
self.status_message = ''
else:
self.com = geometry_msgs.msg.Pose()
self.gravity_mode = False
self.mass = 0.
self.ixx = 0.
self.ixy = 0.
self.ixz = 0.
self.iyy = 0.
self.iyz = 0.
self.izz = 0.
self.success = False
self.status_message = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_7dB7dB().pack(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success))
_x = self.status_message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.com is None:
self.com = geometry_msgs.msg.Pose()
end = 0
_x = self
start = end
end += 114
(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success,) = _get_struct_7dB7dB().unpack(str[start:end])
self.gravity_mode = bool(self.gravity_mode)
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status_message = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status_message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_7dB7dB().pack(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success))
_x = self.status_message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.com is None:
self.com = geometry_msgs.msg.Pose()
end = 0
_x = self
start = end
end += 114
(_x.com.position.x, _x.com.position.y, _x.com.position.z, _x.com.orientation.x, _x.com.orientation.y, _x.com.orientation.z, _x.com.orientation.w, _x.gravity_mode, _x.mass, _x.ixx, _x.ixy, _x.ixz, _x.iyy, _x.iyz, _x.izz, _x.success,) = _get_struct_7dB7dB().unpack(str[start:end])
self.gravity_mode = bool(self.gravity_mode)
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status_message = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status_message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_7dB7dB = None
def _get_struct_7dB7dB():
global _struct_7dB7dB
if _struct_7dB7dB is None:
_struct_7dB7dB = struct.Struct("<7dB7dB")
return _struct_7dB7dB
class GetLinkProperties(object):
_type = 'gazebo_msgs/GetLinkProperties'
_md5sum = '0e06a70386d0ee3fb880c02f23fcd821'
_request_class = GetLinkPropertiesRequest
_response_class = GetLinkPropertiesResponse
| 37.031519
| 284
| 0.638502
| 1,769
| 12,924
| 4.496891
| 0.120972
| 0.014079
| 0.03017
| 0.019107
| 0.763042
| 0.763042
| 0.74758
| 0.74758
| 0.74758
| 0.74758
| 0
| 0.020024
| 0.227174
| 12,924
| 348
| 285
| 37.137931
| 0.776432
| 0.192355
| 0
| 0.763052
| 1
| 0
| 0.213005
| 0.046007
| 0
| 0
| 0.001992
| 0
| 0
| 1
| 0.060241
| false
| 0
| 0.036145
| 0
| 0.208835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
811a461fc321525abd67f11d9522903e94b00815
| 3,067
|
py
|
Python
|
2017/adv2017-1.py
|
fcharlier/AdventOfCode
|
6b2765da9e4d6f6b1f201897bb56043482a65bb2
|
[
"WTFPL"
] | null | null | null |
2017/adv2017-1.py
|
fcharlier/AdventOfCode
|
6b2765da9e4d6f6b1f201897bb56043482a65bb2
|
[
"WTFPL"
] | null | null | null |
2017/adv2017-1.py
|
fcharlier/AdventOfCode
|
6b2765da9e4d6f6b1f201897bb56043482a65bb2
|
[
"WTFPL"
] | null | null | null |
#!/usr/bin/python
def meh(captcha):
"""Returns the sum of the digits which match the next one in the captcha
input string.
>>> meh('1122')
3
>>> meh('1111')
4
>>> meh('1234')
0
>>> meh('91212129')
9
"""
result = 0
for n in range(len(captcha)):
if captcha[n] == captcha[(n + 1) % len(captcha)]:
result += int(captcha[n])
return result
def meh2(captcha):
"""Returns the sum of the digits which match the next one in the captcha
input string.
>>> meh2('1212')
6
>>> meh2('1221')
0
>>> meh2('123425')
4
>>> meh2('123123')
12
>>> meh2('12131415')
4
"""
result = 0
for n in range(len(captcha)):
if captcha[n] == captcha[(n + len(captcha) / 2) % len(captcha)]:
result += int(captcha[n])
return result
if __name__ == '__main__':
input = '57276274387944537823652626177853384411146325384494935924454336611953119173638191671326254832624841593421667683474349154668177743437745965461678636631863541462893547616877914914662358836365421198516263335926544716331814125295712581158399321372683742773423626286669759415959391374744214595682795818615532673877868424196926497731144319736445141728123322962547288572434564178492753681842244888368542423832228211172842456231275738182764232265933625119312598161192193214898949267765417468348935134618964683127194391796165368145548814473129857697989322621368744725685183346825333247866734735894493395218781464346951777873929898961358796274889826894529599645442657423438562423853247543621565468819799931598754753467593832328147439341586125262733737128386961596394728159719292787597426898945198788211417854662948358422729471312456437778978749753927251431677533575752312447488337156956217451965643454445329758327129966657189332824969141448538681979632611199385896965946849725421978137753366252459914913637858783146735469758716752765718189175583956476935185985918536318424248425426398158278111751711911227818826766177996223718837428972784328925743869885232266127727865267881592395643836999244218345184474613129823933659422223685422732186536199153988717455568523781673393698356967355875123554797755491181791593156433735591529495984256519631187849654633243225118132152549712643273819314433877592644693826861523243946998615722951182474773173215527598949553185313259992227879964482121769617218685394776778423378182462422788277997523913176326468957342296368178321958626168785578977414537368686438348124283789748775163821457641135163495649331144436157836647912852483177542224864952271874645274572426458614384917923623627532487625396914111582754953944965462576624728896917137599778828769958626788685374749661741223741834844643725486925886933118382649581481351844943368484853956759877215252766294896496444835264357169642341291412768946589781812493421379575569593678354241223363739129813633236996588711791919421574583924743119867622229659211793468744163297478952475933163259769578345894367855534294493613767564497137369969315192443795512585'
print meh(input)
print meh2(input)
| 61.34
| 2,134
| 0.852625
| 125
| 3,067
| 20.856
| 0.384
| 0.018412
| 0.013042
| 0.015343
| 0.121979
| 0.121979
| 0.121979
| 0.121979
| 0.09206
| 0.09206
| 0
| 0.790824
| 0.097489
| 3,067
| 49
| 2,135
| 62.591837
| 0.151012
| 0.005217
| 0
| 0.5
| 0
| 0
| 0.818462
| 0.815385
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.125
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81367db3d2084fd41d74cdefdf3b14a53b5730ea
| 46,827
|
py
|
Python
|
scripts/misc/operator_condition_number_scipy.py
|
volpatto/firedrake_scripts
|
ba9c935bb0c9a6bbc6de69f476e42ad0ea8bb1c6
|
[
"MIT"
] | 5
|
2019-01-19T14:18:51.000Z
|
2022-02-10T14:22:12.000Z
|
scripts/misc/operator_condition_number_scipy.py
|
volpatto/firedrake_scripts
|
ba9c935bb0c9a6bbc6de69f476e42ad0ea8bb1c6
|
[
"MIT"
] | null | null | null |
scripts/misc/operator_condition_number_scipy.py
|
volpatto/firedrake_scripts
|
ba9c935bb0c9a6bbc6de69f476e42ad0ea8bb1c6
|
[
"MIT"
] | 1
|
2021-06-14T07:32:26.000Z
|
2021-06-14T07:32:26.000Z
|
import attr
from firedrake import *
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from scipy.linalg import svd
from scipy.sparse.linalg import svds
from scipy.sparse import csr_matrix
from slepc4py import SLEPc
import pandas as pd
from tqdm import tqdm
import os
matplotlib.use('Agg')
@attr.s
class ConditionNumberResult(object):
form = attr.ib()
assembled_form = attr.ib()
condition_number = attr.ib()
sparse_operator = attr.ib()
number_of_dofs = attr.ib()
nnz = attr.ib()
is_operator_symmetric = attr.ib()
bcs = attr.ib(default=list())
def plot_matrix(assembled_form, **kwargs):
"""Provides a plot of a matrix."""
fig, ax = plt.subplots(1, 1)
petsc_mat = assembled_form.M.handle
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
return plot
def plot_matrix_mixed(assembled_form, **kwargs):
"""Provides a plot of a mixed matrix."""
fig, ax = plt.subplots(1, 1)
petsc_mat = assembled_form.M.handle
f0_size = assembled_form.M[0, 0].handle.getSize()
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.axhline(y=f0_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] - 0.5, color="k")
return plot
def plot_matrix_primal_hybrid_full(a_form, bcs=[], **kwargs):
"""Provides a plot of a full hybrid-mixed matrix."""
fig, ax = plt.subplots(1, 1)
assembled_form = assemble(a_form, bcs=bcs, mat_type="aij")
petsc_mat = assembled_form.M.handle
f0_size = assembled_form.M[0, 0].handle.getSize()
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.axhline(y=f0_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] - 0.5, color="k")
return plot
def plot_matrix_mixed_hybrid_full(a_form, bcs=[], **kwargs):
"""Provides a plot of a full hybrid-mixed matrix."""
fig, ax = plt.subplots(1, 1)
assembled_form = assemble(a_form, bcs=bcs, mat_type="aij")
petsc_mat = assembled_form.M.handle
f0_size = assembled_form.M[0, 0].handle.getSize()
f1_size = assembled_form.M[1, 1].handle.getSize()
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.axhline(y=f0_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] - 0.5, color="k")
ax.axhline(y=f0_size[0] + f1_size[0] - 0.5, color="k")
ax.axvline(x=f0_size[0] + f1_size[0] - 0.5, color="k")
return plot
def plot_matrix_hybrid_multiplier(a_form, trace_index=2, bcs=[], **kwargs):
"""Provides a plot of a condensed hybrid-mixed matrix for single scale problems."""
fig, ax = plt.subplots(1, 1)
_A = Tensor(a_form)
A = _A.blocks
idx = trace_index
S = A[idx, idx] - A[idx, :idx] * A[:idx, :idx].inv * A[:idx, idx]
Smat = assemble(S, bcs=bcs)
petsc_mat = Smat.M.handle
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
Mnp = Mnp.toarray()
# Eliminate rows and columns filled with zero entries
Mnp = Mnp[~(Mnp==0).all(1)]
idx = np.argwhere(np.all(Mnp[..., :] == 0, axis=0))
Mnp = np.delete(Mnp, idx, axis=1)
Am = np.ma.masked_values(Mnp, 0, rtol=1e-13)
# Plot the matrix
plot = ax.matshow(Am, **kwargs)
# Below there is the spy alternative
# plot = plt.spy(Am, **kwargs)
# Remove axis ticks and values
ax.tick_params(length=0)
ax.set_xticklabels([])
ax.set_yticklabels([])
return plot
def filter_real_part_in_array(array: np.ndarray, imag_threshold: float = 1e-5) -> np.ndarray:
"""Utility function to filter real part in a numpy array.
:param array:
Array with real and complex numbers.
:param imag_threshold:
Threshold to cut off imaginary part in complex number.
:return:
Filtered array with only real numbers.
"""
real_part_array = array.real[abs(array.imag) < 1e-5]
return real_part_array
def calculate_condition_number(
A,
num_of_factors,
backend: str = "scipy",
use_sparse: bool = False,
zero_tol: float = 1e-5
):
backend = backend.lower()
if backend == "scipy":
size = A.getSize()
Mnp = csr_matrix(A.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
if use_sparse:
singular_values = svds(
A=Mnp,
k=num_of_factors,
which="LM",
maxiter=5000,
return_singular_vectors=False,
solver="lobpcg"
)
else:
M = Mnp.toarray()
singular_values = svd(M, compute_uv=False, check_finite=False)
singular_values = singular_values[singular_values > zero_tol]
condition_number = singular_values.max() / singular_values.min()
elif backend == "slepc":
S = SLEPc.SVD()
S.create()
S.setOperator(A)
S.setType(SLEPc.SVD.Type.LAPACK)
S.setDimensions(nsv=num_of_factors)
S.setTolerances(max_it=5000)
S.setWhichSingularTriplets(SLEPc.SVD.Which.LARGEST)
S.solve()
num_converged_values = S.getConverged()
singular_values_list = list()
if num_converged_values > 0:
for i in range(num_converged_values):
singular_value = S.getValue(i)
singular_values_list.append(singular_value)
else:
raise RuntimeError("SLEPc SVD has not converged.")
singular_values = np.array(singular_values_list)
singular_values = singular_values[singular_values > zero_tol]
condition_number = singular_values.max() / singular_values.min()
else:
raise NotImplementedError("The required method for condition number estimation is currently unavailable.")
return condition_number
def solve_poisson_cg(mesh, degree=1, use_quads=False):
# Function space declaration
V = FunctionSpace(mesh, "CG", degree)
# Trial and test functions
u = TrialFunction(V)
v = TestFunction(V)
# Dirichlet BCs
bcs = DirichletBC(V, 0.0, "on_boundary")
# Variational form
a = inner(grad(u), grad(v)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = V.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_ls(mesh, degree=1):
# Function space declaration
pressure_family = 'CG'
velocity_family = 'CG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Stabilization parameters
delta_1 = Constant(1)
delta_2 = Constant(1)
delta_3 = Constant(1)
# Least-squares terms
a = delta_1 * inner(u + grad(p), v + grad(q)) * dx
a += delta_2 * div(u) * div(v) * dx
a += delta_3 * inner(curl(u), curl(v)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_cgls(mesh, degree=1):
# Function space declaration
pressure_family = 'CG'
velocity_family = 'CG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Mixed classical terms
a = (dot(u, v) - div(v) * p - q * div(u)) * dx
# Stabilizing terms
a += -0.5 * inner((u + grad(p)), v + grad(q)) * dx
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
a += 0.5 * div(u) * div(v) * dx
a += 0.5 * inner(curl(u), curl(v)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_vms(mesh, degree=1):
# Function space declaration
pressure_family = 'CG'
velocity_family = 'CG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Mixed classical terms
a = (dot(u, v) - div(v) * p + q * div(u)) * dx
# Stabilizing terms
a += 0.5 * inner(u + grad(p), grad(q) - v) * dx
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
# a += 0.5 * div(u) * div(v) * dx
# a += 0.5 * inner(curl(u), curl(v)) * dx
# L += 0.5 * f * div(v) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_mixed_RT(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
if use_quads:
hdiv_family = 'RTCF'
pressure_family = 'DQ'
else:
hdiv_family = 'RT'
pressure_family = 'DG'
U = FunctionSpace(mesh, hdiv_family, degree + 1)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
bcs = DirichletBC(W[0], sigma_e, "on_boundary")
# Mixed classical terms
a = (dot(u, v) - div(v) * p + q * div(u)) * dx
A = assemble(a, bcs=bcs, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_dgls(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
# bcs = DirichletBC(W[0], sigma_e, "on_boundary", method="geometric")
# Average cell size and mesh dependent stabilization
h_avg = (h("+") + h("-")) / 2.0
# Jump stabilizing parameters based on Badia-Codina stabilized dG method
L0 = 1
eta_p = L0 * h # method B in the Badia-Codina paper
# eta_p = 1
# eta_p = L0 * L0 # method D in the Badia-Codina paper
eta_u = h / L0 # method B in the Badia-Codina paper
# eta_u = 1
# Nitsche's penalizing term
beta_0 = Constant(1.0)
beta = beta_0 / h
# Mixed classical terms
a = (dot(u, v) - div(v) * p - q * div(u)) * dx
# DG terms
a += jump(v, n) * avg(p) * dS - avg(q) * jump(u, n) * dS
# Edge stabilizing terms
# ** Badia-Codina based
a += (avg(eta_p) / h_avg) * (jump(u, n) * jump(v, n)) * dS
a += (avg(eta_u) / h_avg) * dot(jump(p, n), jump(q, n)) * dS
# ** Mesh independent terms
# a += jump(u, n) * jump(v, n) * dS
# a += dot(jump(p, n), jump(q, n)) * dS
# Volumetric stabilizing terms
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
# a += -0.5 * inner(u + grad(p), v + grad(q)) * dx
# a += 0.5 * div(u) * div(v) * dx
# a += 0.5 * inner(curl(u), curl(v)) * dx
# ** Badia-Codina based
a += -eta_u * inner(u + grad(p), v + grad(q)) * dx
a += eta_p * div(u) * div(v) * dx
a += eta_p * inner(curl(u), curl(v)) * dx
# Weakly imposed boundary conditions
a += dot(v, n) * p * ds - q * dot(u, n) * ds
a += beta * p * q * ds # may decrease convergente rates
# ** The terms below are based on ASGS Badia-Codina (2010), it is not a classical Nitsche's method
a += (eta_p / h) * dot(u, n) * dot(v, n) * ds
a += (eta_u / h) * dot(p * n, q * n) * ds
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_dvms(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
# bcs = DirichletBC(W[0], sigma_e, "on_boundary", method="geometric")
# Average cell size and mesh dependent stabilization
h_avg = (h("+") + h("-")) / 2.0
# Jump stabilizing parameters based on Badia-Codina stabilized dG method
L0 = 1
eta_p = L0 * h # method B in the Badia-Codina paper
# eta_p = L0 * L0 # method D in the Badia-Codina paper
eta_u = h / L0 # method B in the Badia-Codina paper
# Mixed classical terms
a = (dot(u, v) - div(v) * p + q * div(u)) * dx
# DG terms
a += jump(v, n) * avg(p) * dS - avg(q) * jump(u, n) * dS
# Edge stabilizing terms
# ** Badia-Codina based
a += (avg(eta_p) / h_avg) * (jump(u, n) * jump(v, n)) * dS
a += (avg(eta_u) / h_avg) * dot(jump(p, n), jump(q, n)) * dS
# ** Mesh independent (original)
# a += jump(u, n) * jump(v, n) * dS # not considered in the original paper
# a += dot(jump(p, n), jump(q, n)) * dS
# Volumetric stabilizing terms
# a += 0.5 * inner(u + grad(p), grad(q) - v) * dx
# a += 0.5 * h * h * div(u) * div(v) * dx
# a += 0.5 * h * h * inner(curl(u), curl(v)) * dx
# L += 0.5 * h * h * f * div(v) * dx
# a += 0.5 * div(u) * div(v) * dx
# a += 0.5 * inner(curl(u), curl(v)) * dx
# L += 0.5 * f * div(v) * dx
# ** Badia-Codina based
a += eta_u * inner(u + grad(p), grad(q) - v) * dx
a += eta_p * div(u) * div(v) * dx
# Weakly imposed boundary conditions
a += dot(v, n) * p * ds - q * dot(u, n) * ds
# ** The terms below are based on ASGS Badia-Codina (2010), it is not a classical Nitsche's method
a += (eta_p / h) * dot(u, n) * dot(v, n) * ds
a += (eta_u / h) * dot(p * n, q * n) * ds # may decrease convergente rates
# ** Classical Nitsche
# a += beta * p * q * ds # may decrease convergente rates (Nitsche)
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_sipg(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
V = FunctionSpace(mesh, pressure_family, degree)
# Trial and test functions
p = TrialFunction(V)
q = TestFunction(V)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Edge stabilizing parameter
beta0 = Constant(1e1)
beta = beta0 / h
# Symmetry term. Choose if the method is SIPG (-1) or NIPG (1)
s = Constant(-1)
# Classical volumetric terms
a = inner(grad(p), grad(q)) * dx
L = f * q * dx
# DG edge terms
a += s * dot(jump(p, n), avg(grad(q))) * dS - dot(avg(grad(p)), jump(q, n)) * dS
# Edge stabilizing terms
a += beta("+") * dot(jump(p, n), jump(q, n)) * dS
# Weak boundary conditions
a += s * dot(p * n, grad(q)) * ds - dot(grad(p), q * n) * ds
a += beta * p * q * ds
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = V.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_dls(mesh, degree=1):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
W = U * V
# Trial and test functions
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Dirichlet BCs
# bcs = DirichletBC(W[0], sigma_e, "on_boundary", method="geometric")
# Average cell size and mesh dependent stabilization
h_avg = (h("+") + h("-")) / 2.0
# Jump stabilizing parameters based on Badia-Codina stabilized dG method
# L0 = 1
# eta_p = L0 * h_avg # method B in the Badia-Codina paper
eta_p = 1
# eta_p = L0 * L0 # method D in the Badia-Codina paper
# eta_u = h_avg / L0 # method B in the Badia-Codina paper
eta_u = 1
# eta_u_bc = h / L0 # method B in the Badia-Codina paper
eta_u_bc = 1
# Least-Squares weights
delta = Constant(1.0)
# delta = h
delta_0 = delta
delta_1 = delta
delta_2 = delta
delta_3 = 1 / h
delta_4 = 1 / h
# Least-squares terms
a = delta_0 * inner(u + grad(p), v + grad(q)) * dx
a += delta_1 * div(u) * div(v) * dx
a += delta_2 * inner(curl(u), curl(v)) * dx
# Edge stabilizing terms
# ** Badia-Codina based (better results) **
a += eta_u * avg(delta_3) * (jump(u, n) * jump(v, n)) * dS
a += eta_p * avg(delta_4) * dot(jump(p, n), jump(q, n)) * dS
a += eta_u_bc * delta_3 * p * q * ds # may decrease convergente rates
a += eta_u_bc * delta_4 * dot(u, n) * dot(v, n) * ds
# ** Mesh independent **
# a += jump(u, n) * jump(v, n) * dS
# a += dot(jump(p, n), jump(q, n)) * dS
# a += p * q * ds
A = assemble(a, mat_type="aij")
petsc_mat = A.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-12)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = W.dim()
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=A,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric
)
return result
def solve_poisson_sdhm(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
trace_family = "HDiv Trace"
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
u, p, lambda_h = TrialFunctions(W)
v, q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# BCs
u_projected = sigma_e
p_boundaries = p_exact
bcs = DirichletBC(W.sub(2), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0e-18)
# beta = beta_0 / h
beta = beta_0
# Stabilization parameters
delta_0 = Constant(-1)
delta_1 = Constant(-0.5) * h * h
delta_2 = Constant(0.5) * h * h
delta_3 = Constant(0.5) * h * h
# Mixed classical terms
a = (dot(u, v) - div(v) * p + delta_0 * q * div(u)) * dx
L = delta_0 * f * q * dx
# Stabilizing terms
a += delta_1 * inner(u + grad(p), v + grad(q)) * dx
a += delta_2 * div(u) * div(v) * dx
a += delta_3 * inner(curl(u), curl(v)) * dx
L += delta_2 * f * div(v) * dx
# Hybridization terms
a += lambda_h("+") * dot(v, n)("+") * dS + mu_h("+") * dot(u, n)("+") * dS
a += beta("+") * (lambda_h("+") - p("+")) * (mu_h("+") - q("+")) * dS
# Weakly imposed BC
a += (p_boundaries * dot(v, n) + mu_h * (dot(u, n) - dot(u_projected, n))) * ds
a += beta * (lambda_h - p_boundaries) * mu_h * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[2, 2] - A[2, :2] * A[:2, :2].inv * A[:2, 2]
Smat = assemble(S, bcs=bcs)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bcs
)
return result
def solve_poisson_hdg(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
trace_family = "HDiv Trace"
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
u, p, lambda_h = TrialFunctions(W)
v, q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Dirichlet BCs
bc_multiplier = DirichletBC(W.sub(2), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0e0)
beta = beta_0 / h
# beta = beta_0
# Numerical flux trace
u_hat = u + beta * (p - lambda_h) * n
# HDG classical form
a = (dot(u, v) - div(v) * p) * dx + lambda_h("+") * jump(v, n) * dS
a += -dot(u, grad(q)) * dx + jump(u_hat, n) * q("+") * dS
L = f * q * dx
# Transmission condition
a += jump(u_hat, n) * mu_h("+") * dS
# Weakly imposed BC
a += lambda_h * dot(v, n) * ds
a += dot(u_hat, n) * q * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[2, 2] - A[2, :2] * A[:2, :2].inv * A[:2, 2]
Smat = assemble(S, bcs=bc_multiplier)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bc_multiplier
)
return result
def solve_poisson_cgh(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
trace_family = "HDiv Trace"
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
T = FunctionSpace(mesh, trace_family, degree)
W = V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
p, lambda_h = TrialFunctions(W)
q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Dirichlet BCs
bc_multiplier = DirichletBC(W.sub(1), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0e0)
beta = beta_0 / h
# beta = beta_0
# Numerical flux trace
u = -grad(p)
u_hat = u + beta * (p - lambda_h) * n
# HDG classical form
a = -dot(u, grad(q)) * dx + jump(u_hat, n) * q("+") * dS
L = f * q * dx
# Transmission condition
a += jump(u_hat, n) * mu_h("+") * dS
# Weakly imposed BC
a += dot(u_hat, n) * q * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[1, 1] - A[1, :1] * A[:1, :1].inv * A[:1, 1]
Smat = assemble(S, bcs=bc_multiplier)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bc_multiplier
)
return result
def solve_poisson_ldgc(
mesh,
degree=1,
is_multiplier_continuous=True
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
primal_family = "DQ" if use_quads else "DG"
V = FunctionSpace(mesh, primal_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
trace_family = "HDiv Trace"
T = FunctionSpace(mesh, trace_family, degree)
W = V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
p, lambda_h = TrialFunctions(W)
q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
# Forcing function
f_expression = div(-grad(p_exact))
f = Function(V).interpolate(f_expression)
# Dirichlet BCs
p_boundaries = Constant(0.0)
bc_multiplier = DirichletBC(W.sub(1), p_exact, "on_boundary")
# Hybridization parameter
s = Constant(-1.0)
beta = Constant(32.0)
h = CellDiameter(mesh)
h_avg = avg(h)
# Classical term
a = dot(grad(p), grad(q)) * dx
L = f * q * dx
# Hybridization terms
a += s * dot(grad(q), n)("+") * (p("+") - lambda_h("+")) * dS
a += -dot(grad(p), n)("+") * (q("+") - mu_h("+")) * dS
a += (beta / h_avg) * (p("+") - lambda_h("+")) * (q("+") - mu_h("+")) * dS
# Boundary terms
# a += -dot(vel_projected, n) * v * ds # How to set this bc??
# a += (beta / h) * (p- p_boundaries) * q * ds # is this necessary?
L += s * dot(grad(q), n) * p_boundaries * ds
F = a - L
a_form = lhs(F)
_A = Tensor(a_form)
A = _A.blocks
S = A[1, 1] - A[1, :1] * A[:1, :1].inv * A[:1, 1]
Smat = assemble(S, bcs=bc_multiplier)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bc_multiplier
)
return result
def solve_poisson_lsh(
mesh,
degree=1,
is_multiplier_continuous=False
):
# Function space declaration
use_quads = str(mesh.ufl_cell()) == "quadrilateral"
pressure_family = 'DQ' if use_quads else 'DG'
velocity_family = 'DQ' if use_quads else 'DG'
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
if is_multiplier_continuous:
LagrangeElement = FiniteElement("Lagrange", mesh.ufl_cell(), degree)
C0TraceElement = LagrangeElement["facet"]
T = FunctionSpace(mesh, C0TraceElement)
else:
trace_family = "HDiv Trace"
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T
# Trial and test functions
# solution = Function(W)
# u, p, lambda_h = split(solution)
u, p, lambda_h = TrialFunctions(W)
v, q, mu_h = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
h = CellDiameter(mesh)
x, y = SpatialCoordinate(mesh)
# Exact solution
p_exact = sin(2 * pi * x) * sin(2 * pi * y)
exact_solution = Function(V).interpolate(p_exact)
exact_solution.rename("Exact pressure", "label")
sigma_e = Function(U, name='Exact velocity')
sigma_e.project(-grad(p_exact))
# BCs
bcs = DirichletBC(W.sub(2), p_exact, "on_boundary")
# Hybridization parameter
beta_0 = Constant(1.0)
beta = beta_0 / h
beta_avg = beta_0 / h("+")
# Stabilizing parameter
# delta_0 = Constant(1)
# delta_1 = Constant(1)
# delta_2 = Constant(1)
# delta_3 = Constant(1)
# delta_4 = Constant(1)
# delta_5 = Constant(1)
# LARGE_NUMBER = Constant(1e0)
delta = h * h
# delta = Constant(1)
# delta = h
delta_0 = delta
delta_1 = delta
delta_2 = delta
delta_3 = delta
delta_4 = delta
# delta_4 = LARGE_NUMBER / h
delta_5 = delta
# Numerical flux trace
u_hat = u + beta * (p - lambda_h) * n
v_hat = v + beta * (q - mu_h) * n
# Flux least-squares
# a = (
# (inner(u, v) - q * div(u) - p * div(v) + inner(grad(p), grad(q)))
# * delta_1
# * dx
# )
# # These terms below are unsymmetric
# a += delta_1 * jump(u_hat, n=n) * q("+") * dS
# a += delta_1("+") * dot(u_hat, n) * q * ds
# # a += delta_1 * dot(u, n) * q * ds
# # L = -delta_1 * dot(u_projected, n) * q * ds
# a += delta_1("+") * lambda_h("+") * jump(v, n=n) * dS
# a += delta_1 * lambda_h * dot(v, n) * ds
# # L = delta_1 * p_exact * dot(v, n) * ds
# Flux Least-squares as in DG
a = delta_0 * inner(u + grad(p), v + grad(q)) * dx
# Classical mixed Darcy eq. first-order terms as stabilizing terms
a += delta_1 * (dot(u, v) - div(v) * p) * dx
a += delta_1("+") * lambda_h("+") * jump(v, n=n) * dS
a += delta_1 * lambda_h * dot(v, n) * ds
# Mass balance least-square
a += delta_2 * div(u) * div(v) * dx
# L = delta_2 * f * div(v) * dx
# Irrotational least-squares
a += delta_3 * inner(curl(u), curl(v)) * dx
# Hybridization terms
a += mu_h("+") * jump(u_hat, n=n) * dS
a += delta_4("+") * (p("+") - lambda_h("+")) * (q("+") - mu_h("+")) * dS
# a += delta_4 * (p - lambda_h) * (q - mu_h) * ds
# a += delta_5 * (dot(u, n)("+") - dot(u_hat, n)("+")) * (dot(v, n)("+") - dot(v_hat, n)("+")) * dS
# a += delta_5 * (dot(u, n) - dot(u_hat, n)) * (dot(v, n) - dot(v_hat, n)) * ds
# Weakly imposed BC from hybridization
# a += mu_h * (lambda_h - p_boundaries) * ds
# a += mu_h * lambda_h * ds
# ###
# a += (
# (mu_h - q) * (lambda_h - p_boundaries) * ds
# ) # maybe this is not a good way to impose BC, but this necessary
_A = Tensor(a)
A = _A.blocks
S = A[2, 2] - A[2, :2] * A[:2, :2].inv * A[:2, 2]
Smat = assemble(S, bcs=bcs)
petsc_mat = Smat.M.handle
is_symmetric = petsc_mat.isSymmetric(tol=1e-8)
size = petsc_mat.getSize()
Mnp = csr_matrix(petsc_mat.getValuesCSR()[::-1], shape=size)
Mnp.eliminate_zeros()
nnz = Mnp.nnz
number_of_dofs = Mnp.shape[0]
num_of_factors = int(number_of_dofs) - 1
condition_number = calculate_condition_number(petsc_mat, num_of_factors)
result = ConditionNumberResult(
form=a,
assembled_form=Smat,
condition_number=condition_number,
sparse_operator=Mnp,
number_of_dofs=number_of_dofs,
nnz=nnz,
is_operator_symmetric=is_symmetric,
bcs=bcs
)
return result
def hp_refinement_cond_number_calculation(
solver,
min_degree=1,
max_degree=4,
numel_xy=(5, 10, 15, 20, 25),
quadrilateral=True,
name="",
**kwargs
):
results_dict = {
"Element": list(),
"Number of Elements": list(),
"Degree": list(),
"Symmetric": list(),
"nnz": list(),
"dofs": list(),
"h": list(),
"Condition Number": list(),
}
element_kind = "Quad" if quadrilateral else "Tri"
pbar = tqdm(range(min_degree, max_degree))
for degree in pbar:
for n in numel_xy:
pbar.set_description(f"Processing {name} - degree = {degree} - N = {n}")
mesh = UnitSquareMesh(n, n, quadrilateral=quadrilateral)
result = solver(mesh, degree=degree)
current_cell_size = mesh.cell_sizes.dat.data_ro.min() if not quadrilateral else 1 / n
results_dict["Element"].append(element_kind)
results_dict["Number of Elements"].append(n * n)
results_dict["Degree"].append(degree)
results_dict["Symmetric"].append(result.is_operator_symmetric)
results_dict["nnz"].append(result.nnz)
results_dict["dofs"].append(result.number_of_dofs)
results_dict["h"].append(current_cell_size)
results_dict["Condition Number"].append(result.condition_number)
os.makedirs("./cond_number_results/results_%s" % name, exist_ok=True)
df_cond_number = pd.DataFrame(data=results_dict)
path_to_save_results = "./cond_number_results/results_%s/cond_numbers.csv" % name
df_cond_number.to_csv(path_to_save_results)
return df_cond_number
# Solver options
solvers_options = {
# "cg": solve_poisson_cg,
# "cgls": solve_poisson_cgls,
# "dgls": solve_poisson_dgls,
# "sdhm": solve_poisson_sdhm,
# "ls": solve_poisson_ls,
# "dls": solve_poisson_dls,
"lsh": solve_poisson_lsh,
# "vms": solve_poisson_vms,
# "dvms": solve_poisson_dvms,
# "mixed_RT": solve_poisson_mixed_RT,
# "hdg": solve_poisson_hdg,
# "cgh": solve_poisson_cgh,
# "ldgc": solve_poisson_ldgc,
# "sipg": solve_poisson_sipg,
}
degree = 1
last_degree = 1
for current_solver in solvers_options:
# Setting the output file name
name = f"{current_solver}"
# Selecting the solver and its kwargs
solver = solvers_options[current_solver]
# Performing the convergence study
hp_refinement_cond_number_calculation(
solver,
min_degree=degree,
max_degree=degree + last_degree,
quadrilateral=True,
name=name
)
# N = 5
# mesh = UnitSquareMesh(N, N, quadrilateral=True)
# result = solve_poisson_lsh(mesh, degree=1)
# print(f'Is symmetric? {result.is_operator_symmetric}')
# print(f'nnz: {result.nnz}')
# print(f'DoFs: {result.number_of_dofs}')
# print(f'Condition Number: {result.condition_number}')
# # Plotting the resulting matrix
# matplotlib.use('TkAgg')
# import copy
# my_cmap = copy.copy(plt.cm.get_cmap("winter"))
# my_cmap.set_bad(color="lightgray")
# # plot_matrix_primal_hybrid_full(result.form, result.bcs, cmap=my_cmap)
# # plot_matrix_mixed_hybrid_full(result.form, result.bcs, cmap=my_cmap)
# plot_matrix_hybrid_multiplier(result.form, trace_index=2, bcs=result.bcs, cmap=my_cmap)
# # plot_matrix(result.assembled_form, cmap=my_cmap)
# # plot_matrix_mixed(result.assembled_form, cmap=my_cmap)
# plt.tight_layout()
# plt.savefig("sparse_pattern.png")
# plt.show()
| 30.807237
| 114
| 0.617528
| 6,697
| 46,827
| 4.13409
| 0.067045
| 0.024561
| 0.025572
| 0.013725
| 0.813805
| 0.793289
| 0.780214
| 0.768005
| 0.75847
| 0.749151
| 0
| 0.015045
| 0.247699
| 46,827
| 1,520
| 115
| 30.807237
| 0.770864
| 0.20044
| 0
| 0.745303
| 0
| 0
| 0.035617
| 0.002186
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022965
| false
| 0
| 0.012526
| 0
| 0.06785
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4d711198a223af0615e717b95a37866d231b085
| 1,242
|
py
|
Python
|
ex035A11.py
|
gabrieleliasdev/python-cev
|
45390963b5112a982e673f6a6866da422bf9ae6d
|
[
"MIT"
] | null | null | null |
ex035A11.py
|
gabrieleliasdev/python-cev
|
45390963b5112a982e673f6a6866da422bf9ae6d
|
[
"MIT"
] | null | null | null |
ex035A11.py
|
gabrieleliasdev/python-cev
|
45390963b5112a982e673f6a6866da422bf9ae6d
|
[
"MIT"
] | null | null | null |
print('\033[0;33;44mTeste\033[m')
print('\033[4;33;44mTeste\033[m')
print('\033[1;35;43mTeste\033[m')
print('\033[7;32;40mTeste\033[m')
print('\033[7;30mTeste\033[m')
print(" - - - Testando os 40 - - -")
print("\033[0;37;40mPreto\033[m")
print("\033[0;30;41mVermelho\033[m")
print("\033[0;30;42mVerde\033[m")
print("\033[0;30;43mAmarelo\033[m")
print("\033[0;30;44mRoxo\033[m")
print("\033[0;30;45mLilás\033[m")
print("\033[0;30;46mTurquesa\033[m")
print("\033[0;30;47mBranco\033[m")
print("\033[0;36;48mFundo Transparente\033[m")
print(" - - - Testando os 30 - - -")
print("\033[0;37;40mTeste\033[m")
print("\033[0;31;40mTeste\033[m")
print("\033[0;32;40mTeste\033[m")
print("\033[0;33;40mTeste\033[m")
print("\033[0;34;40mTeste\033[m")
print("\033[0;35;40mTeste\033[m")
print("\033[0;36;40mTeste\033[m")
print("\033[0;37;40mTeste\033[m")
print("\033[0;38;40mTeste\033[m")
print(" - - - Testando os 1ª - - -")
print("\033[0;30;47mTeste\033[m")
print("\033[1;30;47mTexto em Negrito\033[m")
print("\033[2;30;47mTeste\033[m")
print("\033[3;30;47mFonta Itálica\033[m")
print("\033[4;30;47mSublinhado\033[m")
print("\033[5;30;47mTeste\033[m")
print("\033[6;30;47mTeste\033[m")
print("\033[7;30;47mTeste\033[m")
print("\033[7;38;47mTeste\033[m")
| 33.567568
| 46
| 0.665056
| 239
| 1,242
| 3.456067
| 0.188285
| 0.309927
| 0.337772
| 0.40678
| 0.690073
| 0.584746
| 0.133172
| 0.077482
| 0.077482
| 0.077482
| 0
| 0.302128
| 0.053945
| 1,242
| 37
| 47
| 33.567568
| 0.400851
| 0
| 0
| 0.057143
| 0
| 0
| 0.716814
| 0.567981
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
be0006e92a529db72d1a914a113e9040dbe56c1e
| 48,343
|
py
|
Python
|
test/sec_full.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | null | null | null |
test/sec_full.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | 1
|
2019-07-03T13:32:38.000Z
|
2019-07-03T13:32:38.000Z
|
test/sec_full.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | 1
|
2019-05-15T16:01:31.000Z
|
2019-05-15T16:01:31.000Z
|
security = """
New Web users get the Roles "User,Nosy"
New Email users get the Role "User"
Role "admin":
User may access the rest interface (Rest Access)
User may access the web interface (Web Access)
User may access the xmlrpc interface (Xmlrpc Access)
User may create everything (Create)
User may edit everything (Edit)
User may manipulate user Roles through the web (Web Roles)
User may restore everything (Restore)
User may retire everything (Retire)
User may use the email interface (Email Access)
User may view everything (View)
Role "anonymous":
User may access the web interface (Web Access)
Role "cc-permission":
(Restore for "cost_center_permission_group" only)
(Retire for "cost_center_permission_group" only)
User is allowed to create cost_center_permission_group (Create for "cost_center_permission_group" only)
User is allowed to edit cost_center_permission_group (Edit for "cost_center_permission_group" only)
Role "contact":
User is allowed to create contact (Create for "contact" only)
User is allowed to edit contact (Edit for "contact" only)
Role "controlling":
User is allowed Edit on (Edit for "daily_record": ('status', 'time_record') only)
User is allowed Edit on (Edit for "sap_cc": ('group_lead', 'team_lead') only)
User is allowed Edit on (Edit for "time_project": ('group_lead', 'team_lead') only)
User is allowed Edit on (Edit for "time_wp": ('project',) only)
User is allowed View on (View for "user": ('roles',) only)
User is allowed View on (View for "user_dynamic": ('id', 'sap_cc', 'user', 'valid_from', 'valid_to') only)
User is allowed to access contract_type (View for "contract_type" only)
User is allowed to access daily_record (View for "daily_record" only)
User is allowed to access daily_record_freeze (View for "daily_record_freeze" only)
User is allowed to access leave_submission (View for "leave_submission" only)
User is allowed to access overtime_correction (View for "overtime_correction" only)
User is allowed to access query (View for "query" only)
User is allowed to access time_project (View for "time_project" only)
User is allowed to access time_record (View for "time_record" only)
User is allowed to access time_report (View for "time_report" only)
User is allowed to access time_wp (View for "time_wp" only)
User is allowed to access vacation_correction (View for "vacation_correction" only)
User is allowed to create cost_center (Create for "cost_center" only)
User is allowed to create cost_center_group (Create for "cost_center_group" only)
User is allowed to create cost_center_status (Create for "cost_center_status" only)
User is allowed to create department (Create for "department" only)
User is allowed to create organisation (Create for "organisation" only)
User is allowed to create product_family (Create for "product_family" only)
User is allowed to create public_holiday (Create for "public_holiday" only)
User is allowed to create query (Create for "query" only)
User is allowed to create reporting_group (Create for "reporting_group" only)
User is allowed to create sap_cc (Create for "sap_cc" only)
User is allowed to create time_activity (Create for "time_activity" only)
User is allowed to create time_activity_perm (Create for "time_activity_perm" only)
User is allowed to create time_record (Create for "time_record" only)
User is allowed to create work_location (Create for "work_location" only)
User is allowed to edit cost_center (Edit for "cost_center" only)
User is allowed to edit cost_center_group (Edit for "cost_center_group" only)
User is allowed to edit cost_center_status (Edit for "cost_center_status" only)
User is allowed to edit department (Edit for "department" only)
User is allowed to edit organisation (Edit for "organisation" only)
User is allowed to edit product_family (Edit for "product_family" only)
User is allowed to edit public_holiday (Edit for "public_holiday" only)
User is allowed to edit query (Edit for "query" only)
User is allowed to edit reporting_group (Edit for "reporting_group" only)
User is allowed to edit sap_cc (Edit for "sap_cc" only)
User is allowed to edit time_activity (Edit for "time_activity" only)
User is allowed to edit time_activity_perm (Edit for "time_activity_perm" only)
User is allowed to edit time_record (Edit for "time_record" only)
User is allowed to edit work_location (Edit for "work_location" only)
Role "doc_admin":
User is allowed Edit on (Edit for "department": ('doc_num',) only)
User is allowed to create artefact (Create for "artefact" only)
User is allowed to create doc (Create for "doc" only)
User is allowed to create doc_category (Create for "doc_category" only)
User is allowed to create doc_status (Create for "doc_status" only)
User is allowed to create product_type (Create for "product_type" only)
User is allowed to create reference (Create for "reference" only)
User is allowed to edit artefact (Edit for "artefact" only)
User is allowed to edit doc (Edit for "doc" only)
User is allowed to edit doc_category (Edit for "doc_category" only)
User is allowed to edit doc_status (Edit for "doc_status" only)
User is allowed to edit product_type (Edit for "product_type" only)
User is allowed to edit reference (Edit for "reference" only)
Role "dom-user-edit-facility":
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (Edit for "user": ['room'] only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (View for "user": ['room'] only)
Role "dom-user-edit-gtt":
(Search for "user_dynamic" only)
May only view/edit records with the correct domain (Edit for "user_dynamic" only)
May only view/edit records with the correct domain (View for "user_dynamic" only)
User is allowed to access contract_type (View for "contract_type" only)
User is allowed to create user (Create for "user" only)
User is allowed to create user_contact (Create for "user_contact" only)
User is allowed to create user_dynamic (Create for "user_dynamic" only)
User is allowed to edit user_contact (Edit for "user_contact" only)
Users may view user_dynamic records for ad_domain for which they are in the domain_permission for the user (View for "user_dynamic" only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (Edit for "user": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (View for "user": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only)
Role "dom-user-edit-hr":
(Search for "user_dynamic" only)
May only view/edit records with the correct domain (Edit for "user_dynamic" only)
May only view/edit records with the correct domain (View for "user_dynamic" only)
User is allowed to access contract_type (View for "contract_type" only)
User is allowed to create user_contact (Create for "user_contact" only)
User is allowed to create user_dynamic (Create for "user_dynamic" only)
User is allowed to edit user_contact (Edit for "user_contact" only)
Users may view user_dynamic records for ad_domain for which they are in the domain_permission for the user (View for "user_dynamic" only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (Edit for "user": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (View for "user": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only)
Role "dom-user-edit-office":
User is allowed to create user_contact (Create for "user_contact" only)
User is allowed to edit user_contact (Edit for "user_contact" only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (Edit for "user": ['contacts', 'position_text', 'room'] only)
Users may view/edit user records for ad_domain for which they are in the domain_permission for the user (View for "user": ['contacts', 'position_text', 'room'] only)
Role "external":
(Search for "ext_tracker_state": ('id', 'issue') only)
(Search for "user": ('id', 'nickname', 'username') only)
External users are allowed to access issue if they are on the list of allowed external users or there is a transitive permission via containers (Edit for "issue": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only)
External users are allowed to access issue if they are on the list of allowed external users or there is a transitive permission via containers (View for "issue": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only)
User is allowed View on (View for "category": ('id', 'name') only)
User is allowed View on (View for "user": ('nickname', 'status', 'username') only)
User is allowed View on (View for "user_status": ('name',) only)
User is allowed View on file if file is linked from an item with View permission (View for "file" only)
User is allowed View on msg if msg is linked from an item with View permission (View for "msg" only)
User is allowed to access area (View for "area" only)
User is allowed to access doc_issue_status (View for "doc_issue_status" only)
User is allowed to access ext_tracker (View for "ext_tracker" only)
User is allowed to access ext_tracker_state (View for "ext_tracker_state" only)
User is allowed to access ext_tracker_type (View for "ext_tracker_type" only)
User is allowed to access keyword (View for "keyword" only)
User is allowed to access kind (View for "kind" only)
User is allowed to access msg_keyword (View for "msg_keyword" only)
User is allowed to access safety_level (View for "safety_level" only)
User is allowed to access severity (View for "severity" only)
User is allowed to access status (View for "status" only)
User is allowed to access status_transition (View for "status_transition" only)
User is allowed to access test_level (View for "test_level" only)
User is allowed to create file (Create for "file" only)
User is allowed to create issue (Create for "issue" only)
User is allowed to create msg (Create for "msg" only)
User is allowed to create query (Create for "query" only)
User is allowed to edit their queries (Edit for "query" only)
User is allowed to retire their queries (Retire for "query" only)
User is allowed to search for their own files (Search for "file" only)
User is allowed to search for their own messages (Search for "msg" only)
User is allowed to search for their queries (Search for "query" only)
User is allowed to search issue (Search for "issue" only)
User is allowed to view their own files (View for "file" only)
User may access the web interface (Web Access)
User may use the email interface (Email Access)
Users are allowed to edit some of their details (Edit for "user": ('csv_delimiter', 'hide_message_files', 'password', 'timezone') only)
Users are allowed to view some of their details (View for "user": ('activity', 'actor', 'creation', 'creator', 'firstname', 'lastname', 'realname', 'username') only)
Users are allowed to view their own and public queries for classes where they have search permission (View for "query" only)
Role "facility":
(Restore for "room" only)
(Retire for "room" only)
User is allowed to create room (Create for "room" only)
User is allowed to edit room (Edit for "room" only)
Role "functional-role":
(Restore for "user_functional_role" only)
(Retire for "user_functional_role" only)
User is allowed Edit on (Edit for "user": ('business_responsible', 'scale_seniority') only)
User is allowed View on (View for "user": ('business_responsible', 'planning_role', 'scale_seniority') only)
User is allowed to access user_functional_role (View for "user_functional_role" only)
User is allowed to create user_functional_role (Create for "user_functional_role" only)
User is allowed to edit user_functional_role (Edit for "user_functional_role" only)
Role "hr":
(Edit for "overtime_period": ('name', 'order') only)
(Restore for "room" only)
(Retire for "room" only)
User is allowed Edit on (Edit for "daily_record": ('required_overtime', 'weekend_allowed') only)
User is allowed Edit on (Edit for "daily_record": ('status', 'time_record') only)
User is allowed Edit on (Edit for "time_project": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only)
User is allowed View on (View for "user": ('contacts',) only)
User is allowed to access auto_wp (View for "auto_wp" only)
User is allowed to access contract_type (View for "contract_type" only)
User is allowed to access daily_record (View for "daily_record" only)
User is allowed to access daily_record_freeze (View for "daily_record_freeze" only)
User is allowed to access leave_submission (View for "leave_submission" only)
User is allowed to access overtime_correction (View for "overtime_correction" only)
User is allowed to access time_record (View for "time_record" only)
User is allowed to access user_contact (View for "user_contact" only)
User is allowed to access user_dynamic (View for "user_dynamic" only)
User is allowed to access vacation_correction (View for "vacation_correction" only)
User is allowed to create auto_wp (Create for "auto_wp" only)
User is allowed to create daily_record_freeze (Create for "daily_record_freeze" only)
User is allowed to create location (Create for "location" only)
User is allowed to create org_location (Create for "org_location" only)
User is allowed to create organisation (Create for "organisation" only)
User is allowed to create overtime_correction (Create for "overtime_correction" only)
User is allowed to create overtime_period (Create for "overtime_period" only)
User is allowed to create product_family (Create for "product_family" only)
User is allowed to create public_holiday (Create for "public_holiday" only)
User is allowed to create reporting_group (Create for "reporting_group" only)
User is allowed to create room (Create for "room" only)
User is allowed to create sap_cc (Create for "sap_cc" only)
User is allowed to create time_record (Create for "time_record" only)
User is allowed to create uc_type (Create for "uc_type" only)
User is allowed to create user (Create for "user" only)
User is allowed to create user_dynamic (Create for "user_dynamic" only)
User is allowed to edit auto_wp (Edit for "auto_wp" only)
User is allowed to edit dynamic user data if not frozen in validity span of dynamic user record (Edit for "user_dynamic" only)
User is allowed to edit freeze record if not frozen at the given date (Edit for "daily_record_freeze": ('frozen',) only)
User is allowed to edit location (Edit for "location" only)
User is allowed to edit org_location (Edit for "org_location" only)
User is allowed to edit organisation (Edit for "organisation" only)
User is allowed to edit overtime correction if the overtime correction is not frozen (Edit for "overtime_correction" only)
User is allowed to edit product_family (Edit for "product_family" only)
User is allowed to edit public_holiday (Edit for "public_holiday" only)
User is allowed to edit reporting_group (Edit for "reporting_group" only)
User is allowed to edit room (Edit for "room" only)
User is allowed to edit sap_cc (Edit for "sap_cc" only)
User is allowed to edit time_record (Edit for "time_record" only)
User is allowed to edit uc_type (Edit for "uc_type" only)
User may manipulate user Roles through the web (Web Roles)
Role "hr-leave-approval":
User is allowed Edit on (Edit for "leave_submission": ('status',) only)
User is allowed to access contract_type (View for "contract_type" only)
User is allowed to access leave_submission (View for "leave_submission" only)
User is allowed to access vacation_correction (View for "vacation_correction" only)
Role "hr-org-location":
(Search for "daily_record_freeze" only)
(Search for "overtime_correction" only)
(Search for "time_activity_perm" only)
(Search for "time_record" only)
(Search for "user_dynamic" only)
User is allowed to view dynamic user data if he/she is in group HR-Org-Location and in the same Org-Location as the given user (View for "user_dynamic" only)
User is allowed to view freeze information if he/she is in group HR-Org-Location and in the same Org-Location as the given user (View for "daily_record_freeze" only)
User is allowed to view overtime information if he/she is in group HR-Org-Location and in the same Org-Location as the given user (View for "overtime_correction" only)
User is allowed to view time record data if he/she is in group HR-Org-Location and in the same Org-Location as the given user (View for "time_record" only)
Role "hr-vacation":
User is allowed to access contract_type (View for "contract_type" only)
User is allowed to access leave_submission (View for "leave_submission" only)
User is allowed to access vacation_correction (View for "vacation_correction" only)
User is allowed to create contract_type (Create for "contract_type" only)
User is allowed to create leave_submission (Create for "leave_submission" only)
User is allowed to create vacation_correction (Create for "vacation_correction" only)
User is allowed to edit contract_type (Edit for "contract_type" only)
User is allowed to edit leave_submission (Edit for "leave_submission" only)
User is allowed to edit vacation_correction (Edit for "vacation_correction" only)
Role "issue_admin":
User is allowed Edit on msg if msg is linked from an item with Edit permission (Edit for "msg" only)
User is allowed to access issue (View for "issue" only)
User is allowed to create area (Create for "area" only)
User is allowed to create category (Create for "category" only)
User is allowed to create doc_issue_status (Create for "doc_issue_status" only)
User is allowed to create ext_tracker (Create for "ext_tracker" only)
User is allowed to create issue (Create for "issue" only)
User is allowed to create keyword (Create for "keyword" only)
User is allowed to create kind (Create for "kind" only)
User is allowed to create msg_keyword (Create for "msg_keyword" only)
User is allowed to create safety_level (Create for "safety_level" only)
User is allowed to create severity (Create for "severity" only)
User is allowed to create status (Create for "status" only)
User is allowed to create status_transition (Create for "status_transition" only)
User is allowed to create test_level (Create for "test_level" only)
User is allowed to edit area (Edit for "area" only)
User is allowed to edit category (Edit for "category" only)
User is allowed to edit doc_issue_status (Edit for "doc_issue_status" only)
User is allowed to edit ext_tracker (Edit for "ext_tracker" only)
User is allowed to edit issue (Edit for "issue" only)
User is allowed to edit keyword (Edit for "keyword" only)
User is allowed to edit kind (Edit for "kind" only)
User is allowed to edit msg_keyword (Edit for "msg_keyword" only)
User is allowed to edit safety_level (Edit for "safety_level" only)
User is allowed to edit severity (Edit for "severity" only)
User is allowed to edit status (Edit for "status" only)
User is allowed to edit status_transition (Edit for "status_transition" only)
User is allowed to edit test_level (Edit for "test_level" only)
Role "it":
Create (Create for "user_contact" only)
User is allowed Edit on (Edit for "file": ('name', 'type') only)
User is allowed Edit on (Edit for "location": ('domain_part',) only)
User is allowed Edit on (Edit for "organisation": ('domain_part',) only)
User is allowed Edit on (Edit for "user": ('ad_domain', 'nickname', 'password', 'pictures', 'roles', 'timetracking_by', 'timezone', 'username') only)
User is allowed Edit on (Edit for "user": ('address', 'alternate_addresses', 'nickname', 'password', 'timezone', 'username') only)
User is allowed Edit on file if file is linked from an item with Edit permission (Edit for "file" only)
User is allowed Edit on msg if msg is linked from an item with Edit permission (Edit for "msg" only)
User is allowed View on file if file is linked from an item with View permission (View for "file" only)
User is allowed to access domain_permission (View for "domain_permission" only)
User is allowed to access it_int_prio (View for "it_int_prio" only)
User is allowed to access it_issue (View for "it_issue" only)
User is allowed to access it_project (View for "it_project" only)
User is allowed to create domain_permission (Create for "domain_permission" only)
User is allowed to create it_category (Create for "it_category" only)
User is allowed to create it_int_prio (Create for "it_int_prio" only)
User is allowed to create it_issue (Create for "it_issue" only)
User is allowed to create it_project (Create for "it_project" only)
User is allowed to create it_request_type (Create for "it_request_type" only)
User is allowed to create mailgroup (Create for "mailgroup" only)
User is allowed to edit domain_permission (Edit for "domain_permission" only)
User is allowed to edit it_category (Edit for "it_category" only)
User is allowed to edit it_int_prio (Edit for "it_int_prio" only)
User is allowed to edit it_issue (Edit for "it_issue" only)
User is allowed to edit it_project (Edit for "it_project" only)
User is allowed to edit it_request_type (Edit for "it_request_type" only)
User is allowed to edit mailgroup (Edit for "mailgroup" only)
User may manipulate user Roles through the web (Web Roles)
Role "itview":
User is allowed to access it_int_prio (View for "it_int_prio" only)
User is allowed to access it_issue (View for "it_issue" only)
User is allowed to access it_project (View for "it_project" only)
Role "msgedit":
(Search for "msg": ('date', 'id') only)
User is allowed Edit on (Edit for "msg": ('author', 'date', 'id', 'keywords', 'subject', 'summary') only)
User is allowed to access ext_msg (View for "ext_msg" only)
User is allowed to access ext_tracker_state (View for "ext_tracker_state" only)
User is allowed to access ext_tracker_type (View for "ext_tracker_type" only)
Role "msgsync":
(Search for "msg": ('date', 'id') only)
User is allowed Edit on (Edit for "msg": ('author', 'date', 'id', 'keywords', 'subject', 'summary') only)
User is allowed to access ext_msg (View for "ext_msg" only)
User is allowed to access ext_tracker_state (View for "ext_tracker_state" only)
User is allowed to access ext_tracker_type (View for "ext_tracker_type" only)
User is allowed to create ext_msg (Create for "ext_msg" only)
User is allowed to create ext_tracker_state (Create for "ext_tracker_state" only)
User is allowed to edit ext_msg (Edit for "ext_msg" only)
User is allowed to edit ext_tracker_state (Edit for "ext_tracker_state" only)
Role "nosy":
User may get nosy messages for doc (Nosy for "doc" only)
User may get nosy messages for issue (Nosy for "issue" only)
User may get nosy messages for it_issue (Nosy for "it_issue" only)
User may get nosy messages for it_project (Nosy for "it_project" only)
User may get nosy messages for support (Nosy for "support" only)
Role "office":
(Restore for "room" only)
(Retire for "room" only)
User is allowed View on (View for "user": ('contacts',) only)
User is allowed to access user_contact (View for "user_contact" only)
User is allowed to create absence (Create for "absence" only)
User is allowed to create absence_type (Create for "absence_type" only)
User is allowed to create room (Create for "room" only)
User is allowed to create uc_type (Create for "uc_type" only)
User is allowed to edit absence (Edit for "absence" only)
User is allowed to edit absence_type (Edit for "absence_type" only)
User is allowed to edit room (Edit for "room" only)
User is allowed to edit uc_type (Edit for "uc_type" only)
Role "organisation":
User is allowed to access location (View for "location" only)
User is allowed to access org_location (View for "org_location" only)
User is allowed to access organisation (View for "organisation" only)
User is allowed to create location (Create for "location" only)
User is allowed to create org_location (Create for "org_location" only)
User is allowed to create organisation (Create for "organisation" only)
User is allowed to edit location (Edit for "location" only)
User is allowed to edit org_location (Edit for "org_location" only)
User is allowed to edit organisation (Edit for "organisation" only)
Role "pgp":
Role "procurement":
(View for "sap_cc" only)
(View for "time_project" only)
User is allowed Edit on (Edit for "sap_cc": ('group_lead', 'purchasing_agents', 'team_lead') only)
User is allowed Edit on (Edit for "time_project": ('group_lead', 'purchasing_agents', 'team_lead') only)
Role "project":
User is allowed Edit on (Edit for "time_project": ('cost_center', 'department', 'deputy', 'description', 'name', 'nosy', 'organisation', 'responsible', 'status') only)
User is allowed Edit on (Edit for "time_project": ('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location') only)
User is allowed to access time_project (View for "time_project" only)
User is allowed to access time_report (View for "time_report" only)
User is allowed to access time_wp (View for "time_wp" only)
User is allowed to create time_project (Create for "time_project" only)
User is allowed to create time_project_status (Create for "time_project_status" only)
User is allowed to create time_wp (Create for "time_wp" only)
User is allowed to create time_wp_group (Create for "time_wp_group" only)
User is allowed to edit time_project_status (Edit for "time_project_status" only)
User is allowed to edit time_wp (Edit for "time_wp" only)
User is allowed to edit time_wp_group (Edit for "time_wp_group" only)
Role "project_view":
User is allowed to access time_project (View for "time_project" only)
User is allowed to access time_report (View for "time_report" only)
User is allowed to access time_wp (View for "time_wp" only)
Role "sec-incident-nosy":
User is allowed to access it_int_prio (View for "it_int_prio" only)
User is allowed to access it_issue (View for "it_issue" only)
User is allowed to access it_project (View for "it_project" only)
Role "sec-incident-responsible":
User is allowed to access it_int_prio (View for "it_int_prio" only)
User is allowed to access it_issue (View for "it_issue" only)
User is allowed to access it_project (View for "it_project" only)
Role "staff-report":
Role "sub-login":
Role "summary_view":
Role "supportadmin":
User is allowed to access analysis_result (View for "analysis_result" only)
User is allowed to access contact (View for "contact" only)
User is allowed to access customer (View for "customer" only)
User is allowed to access customer_agreement (View for "customer_agreement" only)
User is allowed to access mailgroup (View for "mailgroup" only)
User is allowed to access return_type (View for "return_type" only)
User is allowed to access sup_classification (View for "sup_classification" only)
User is allowed to access support (View for "support" only)
User is allowed to create analysis_result (Create for "analysis_result" only)
User is allowed to create contact (Create for "contact" only)
User is allowed to create customer (Create for "customer" only)
User is allowed to create customer_agreement (Create for "customer_agreement" only)
User is allowed to create mailgroup (Create for "mailgroup" only)
User is allowed to create return_type (Create for "return_type" only)
User is allowed to create sup_classification (Create for "sup_classification" only)
User is allowed to create support (Create for "support" only)
User is allowed to edit analysis_result (Edit for "analysis_result" only)
User is allowed to edit contact (Edit for "contact" only)
User is allowed to edit customer (Edit for "customer" only)
User is allowed to edit customer_agreement (Edit for "customer_agreement" only)
User is allowed to edit mailgroup (Edit for "mailgroup" only)
User is allowed to edit return_type (Edit for "return_type" only)
User is allowed to edit sup_classification (Edit for "sup_classification" only)
User is allowed to edit support (Edit for "support" only)
Role "time-report":
User is allowed to access time_report (View for "time_report" only)
User is allowed to create time_report (Create for "time_report" only)
User is allowed to edit time_report (Edit for "time_report" only)
User may edit own file (file created by user) (Edit for "file" only)
Role "user":
(Search for "time_project": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only)
(Search for "time_wp": ('activity', 'actor', 'auto_wp', 'bookers', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only)
(View for "time_project": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only)
Search (Search for "user_contact" only)
User is allowed Edit on (Edit for "msg": ('keywords',) only)
User is allowed Edit on file if file is linked from an item with Edit permission (Edit for "file" only)
User is allowed Edit on issue if issue is non-confidential or user is on nosy list (Edit for "issue" only)
User is allowed Edit on it_issue if it_issue is non-confidential or user is on nosy list (Edit for "it_issue": ('messages', 'files', 'nosy') only)
User is allowed Edit on it_project if it_project is non-confidential or user is on nosy list (Edit for "it_project": ('messages', 'files', 'nosy') only)
User is allowed Edit on support if support is non-confidential or user is on nosy list (Edit for "support": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category', 'cc', 'cc_emails', 'classification', 'closed', 'confidential', 'customer', 'emails', 'execution', 'external_ref', 'files', 'goods_received', 'goods_sent', 'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues', 'related_support', 'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder', 'title', 'type', 'warranty') only)
User is allowed View on (View for "user": ('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname', 'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'queries', 'realname', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'title', 'tt_lines', 'username') only)
User is allowed View on (View for "user": ('activity', 'actor', 'address', 'alternate_addresses', 'creation', 'creator', 'id', 'queries', 'realname', 'status', 'timezone', 'username') only)
User is allowed View on (View for "user": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only)
User is allowed View on (View for "user": ('contacts',) only)
User is allowed View on (View for "user_dynamic": ('department', 'org_location') only)
User is allowed View on file if file is linked from an item with View permission (View for "file" only)
User is allowed View on issue if issue is non-confidential or user is on nosy list (View for "issue" only)
User is allowed View on it_issue if it_issue is non-confidential or user is on nosy list (View for "it_issue" only)
User is allowed View on it_project if it_project is non-confidential or user is on nosy list (View for "it_project" only)
User is allowed View on msg if msg is linked from an item with View permission (View for "msg" only)
User is allowed View on support if support is non-confidential or user is on nosy list (View for "support" only)
User is allowed to access absence (View for "absence" only)
User is allowed to access absence_type (View for "absence_type" only)
User is allowed to access analysis_result (View for "analysis_result" only)
User is allowed to access area (View for "area" only)
User is allowed to access artefact (View for "artefact" only)
User is allowed to access business_unit (View for "business_unit" only)
User is allowed to access category (View for "category" only)
User is allowed to access contact (View for "contact" only)
User is allowed to access contact_type (View for "contact_type" only)
User is allowed to access cost_center (View for "cost_center" only)
User is allowed to access cost_center_group (View for "cost_center_group" only)
User is allowed to access cost_center_permission_group (View for "cost_center_permission_group" only)
User is allowed to access cost_center_status (View for "cost_center_status" only)
User is allowed to access customer (View for "customer" only)
User is allowed to access customer_agreement (View for "customer_agreement" only)
User is allowed to access daily record if he is owner or supervisor or timetracking-by user (Edit for "daily_record": ('status', 'time_record') only)
User is allowed to access daily record if he is owner or supervisor or timetracking-by user (View for "daily_record" only)
User is allowed to access daily_record_status (View for "daily_record_status" only)
User is allowed to access department (View for "department" only)
User is allowed to access doc (View for "doc" only)
User is allowed to access doc_category (View for "doc_category" only)
User is allowed to access doc_issue_status (View for "doc_issue_status" only)
User is allowed to access doc_status (View for "doc_status" only)
User is allowed to access ext_tracker (View for "ext_tracker" only)
User is allowed to access ext_tracker_state (View for "ext_tracker_state" only)
User is allowed to access ext_tracker_type (View for "ext_tracker_type" only)
User is allowed to access functional_role (View for "functional_role" only)
User is allowed to access it_category (View for "it_category" only)
User is allowed to access it_issue_status (View for "it_issue_status" only)
User is allowed to access it_prio (View for "it_prio" only)
User is allowed to access it_project_status (View for "it_project_status" only)
User is allowed to access it_request_type (View for "it_request_type" only)
User is allowed to access keyword (View for "keyword" only)
User is allowed to access kind (View for "kind" only)
User is allowed to access leave_status (View for "leave_status" only)
User is allowed to access location (View for "location" only)
User is allowed to access mailgroup (View for "mailgroup" only)
User is allowed to access msg_keyword (View for "msg_keyword" only)
User is allowed to access org_group (View for "org_group" only)
User is allowed to access org_location (View for "org_location" only)
User is allowed to access organisation (View for "organisation" only)
User is allowed to access overtime_period (View for "overtime_period" only)
User is allowed to access prodcat (View for "prodcat" only)
User is allowed to access product (View for "product" only)
User is allowed to access product_family (View for "product_family" only)
User is allowed to access product_type (View for "product_type" only)
User is allowed to access project_type (View for "project_type" only)
User is allowed to access public_holiday (View for "public_holiday" only)
User is allowed to access reference (View for "reference" only)
User is allowed to access reporting_group (View for "reporting_group" only)
User is allowed to access return_type (View for "return_type" only)
User is allowed to access room (View for "room" only)
User is allowed to access safety_level (View for "safety_level" only)
User is allowed to access sap_cc (View for "sap_cc" only)
User is allowed to access severity (View for "severity" only)
User is allowed to access sex (View for "sex" only)
User is allowed to access status (View for "status" only)
User is allowed to access status_transition (View for "status_transition" only)
User is allowed to access summary_report (View for "summary_report" only)
User is allowed to access summary_type (View for "summary_type" only)
User is allowed to access sup_classification (View for "sup_classification" only)
User is allowed to access sup_execution (View for "sup_execution" only)
User is allowed to access sup_prio (View for "sup_prio" only)
User is allowed to access sup_status (View for "sup_status" only)
User is allowed to access sup_type (View for "sup_type" only)
User is allowed to access sup_warranty (View for "sup_warranty" only)
User is allowed to access test_level (View for "test_level" only)
User is allowed to access time_activity (View for "time_activity" only)
User is allowed to access time_activity_perm (View for "time_activity_perm" only)
User is allowed to access time_project_status (View for "time_project_status" only)
User is allowed to access time_wp_group (View for "time_wp_group" only)
User is allowed to access time_wp_summary_no (View for "time_wp_summary_no" only)
User is allowed to access timesheet (View for "timesheet" only)
User is allowed to access uc_type (View for "uc_type" only)
User is allowed to access user_status (View for "user_status" only)
User is allowed to access vac_aliq (View for "vac_aliq" only)
User is allowed to access vacation_report (View for "vacation_report" only)
User is allowed to access work_location (View for "work_location" only)
User is allowed to create daily_record (Create for "daily_record" only)
User is allowed to create doc (Create for "doc" only)
User is allowed to create ext_tracker_state (Create for "ext_tracker_state" only)
User is allowed to create file (Create for "file" only)
User is allowed to create issue (Create for "issue" only)
User is allowed to create it_issue (Create for "it_issue" only)
User is allowed to create leave_submission (Create for "leave_submission" only)
User is allowed to create msg (Create for "msg" only)
User is allowed to create queries (Create for "query" only)
User is allowed to create support (Create for "support" only)
User is allowed to create time_record (Create for "time_record" only)
User is allowed to create time_wp (Create for "time_wp" only)
User is allowed to edit (some of) their own user details (Edit for "user": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries', 'realname', 'room', 'subst_active', 'substitute', 'timezone', 'tt_lines') only)
User is allowed to edit category if he is responsible for it (Edit for "category": ('nosy', 'default_part_of') only)
User is allowed to edit doc (Edit for "doc" only)
User is allowed to edit ext_tracker_state (Edit for "ext_tracker_state" only)
User is allowed to edit if he's the owner of the contact (Edit for "user_contact": ('visible',) only)
User is allowed to edit several fields if he is Responsible for an it_issue (Edit for "it_issue": ('responsible',) only)
User is allowed to edit several fields if he is Stakeholder/Responsible for an it_issue (Edit for "it_issue": ('deadline', 'status', 'title') only)
User is allowed to edit their queries (Edit for "query" only)
User is allowed to edit time category if the status is "Open" and he is responsible for the time category (Edit for "time_project": ('deputy', 'planned_effort', 'nosy') only)
User is allowed to edit workpackage if he is time category owner or deputy (Edit for "time_wp": ('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no') only)
User is allowed to retire their queries (Retire for "query" only)
User is allowed to search daily_record (Search for "daily_record" only)
User is allowed to search for their own files (Search for "file" only)
User is allowed to search for their own messages (Search for "msg" only)
User is allowed to search for their queries (Search for "query" only)
User is allowed to search issue (Search for "issue" only)
User is allowed to search it_issue (Search for "it_issue" only)
User is allowed to search it_project (Search for "it_project" only)
User is allowed to search leave_submission (Search for "leave_submission" only)
User is allowed to search support (Search for "support" only)
User is allowed to search time_record (Search for "time_record" only)
User is allowed to search time_wp (Search for "time_wp": ('activity', 'actor', 'auto_wp', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only)
User is allowed to search user_status (Search for "user": ('status',) only)
User is allowed to see time record if he is allowed to see all details on work package or User may view a daily_record (and time_records that are attached to that daily_record) if the user owns the daily_record or has role 'HR' or 'Controlling', or the user is supervisor or substitute supervisor of the owner of the daily record (the supervisor relationship is transitive) or the user is the department manager of the owner of the daily record. If user has role HR-Org-Location and is in the same Org-Location as the record, it may also be seen (View for "time_record" only)
User is allowed to view (some of) their own user details (View for "user": ('entry_date', 'planning_role') only)
User is allowed to view contact if he's the owner of the contact or the contact is marked visible (View for "user_contact" only)
User is allowed to view leave submission if he is the supervisor or the person to whom approvals are delegated (Edit for "leave_submission": ('status',) only)
User is allowed to view leave submission if he is the supervisor or the person to whom approvals are delegated (View for "leave_submission" only)
User is allowed to view selected fields in work package if booking is allowed for this user (also applies to timetracking by, supervisor and approval delegated) (View for "time_wp": ('activity', 'actor', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only)
User is allowed to view their own files (View for "file" only)
User is allowed to view their own messages (View for "msg" only)
User is allowed to view their own overtime information (View for "overtime_correction" only)
User is allowed to view time record if he is the supervisor or the person to whom approvals are delegated (View for "time_record" only)
User is allowed to view work package and time category names if he/she has role HR or HR-Org-Location (View for "time_project": ('name',) only)
User is allowed to view work package and time category names if he/she has role HR or HR-Org-Location (View for "time_wp": ('name', 'project') only)
User is allowed to view/edit workpackage if he is owner or project responsible/deputy (Edit for "time_wp": ('bookers', 'description', 'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only)
User may access the rest interface (Rest Access)
User may access the web interface (Web Access)
User may access the xmlrpc interface (Xmlrpc Access)
User may edit own leave submissions (Edit for "leave_submission": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only)
User may edit own leave submissions (View for "leave_submission": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only)
User may see time report if reponsible or deputy of time project or on nosy list of time project (View for "time_report" only)
User may use the email interface (Email Access)
User may view a daily_record (and time_records that are attached to that daily_record) if the user owns the daily_record or has role 'HR' or 'Controlling', or the user is supervisor or substitute supervisor of the owner of the daily record (the supervisor relationship is transitive) or the user is the department manager of the owner of the daily record. If user has role HR-Org-Location and is in the same Org-Location as the record, it may also be seen (View for "daily_record" only)
User may view their own user functional role (View for "user_functional_role" only)
User may view time category if user is owner or deputy of time category or on nosy list of time category or if user is department manager of time category (View for "time_project" only)
User may view work package if responsible for it, if user is owner or deputy of time category or on nosy list of time category or if user is department manager of time category (View for "time_wp" only)
User or Timetracking by user may edit time_records owned by user (Edit for "time_record" only)
User or Timetracking by user may edit time_records owned by user (Restore for "time_record" only)
User or Timetracking by user may edit time_records owned by user (Retire for "time_record" only)
User or Timetracking by user may edit time_records owned by user (View for "time_record" only)
Users are allowed to view their own and public queries for classes where they have search permission (View for "query" only)
Users may see daily record if they may see one of the time_records for that day (View for "daily_record" only)
Role "user_view":
User is allowed to access user (View for "user" only)
Role "vacation-report":
""".strip ()
| 83.063574
| 690
| 0.762034
| 7,854
| 48,343
| 4.555895
| 0.043672
| 0.078475
| 0.164217
| 0.207143
| 0.850064
| 0.833799
| 0.812224
| 0.75728
| 0.665446
| 0.621653
| 0
| 0
| 0.150632
| 48,343
| 581
| 691
| 83.20654
| 0.871435
| 0
| 0
| 0.430293
| 0
| 0.146299
| 0.999441
| 0.03229
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.006885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be19a958423363abc9e04beed1c7e6d4e8b02233
| 8,562
|
py
|
Python
|
examples/python/oled_ssd1327.py
|
whpenner/upm
|
3168c61d8613da62ecc7598517a1decf533d5fe7
|
[
"MIT"
] | 1
|
2017-09-22T01:41:30.000Z
|
2017-09-22T01:41:30.000Z
|
bsp/intel/peripheral/libupm/examples/python/oled_ssd1327.py
|
Keneral/ahardware
|
9a8a025f7c9471444c9e271bbe7f48182741d710
|
[
"Unlicense"
] | null | null | null |
bsp/intel/peripheral/libupm/examples/python/oled_ssd1327.py
|
Keneral/ahardware
|
9a8a025f7c9471444c9e271bbe7f48182741d710
|
[
"Unlicense"
] | 1
|
2018-02-24T19:09:04.000Z
|
2018-02-24T19:09:04.000Z
|
#!/usr/bin/python
# Author: Zion Orent <[email protected]>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Load i2clcd display module
import time, signal, sys
import pyupm_i2clcd as upmLCD
myLCD = upmLCD.SSD1327(0, 0x3C);
logoArr = [0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x08, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x60, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0x06, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0xC0, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0xC0, 0x07, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x03, 0x80, 0x03, 0x80, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x03, 0x80, 0x03, 0x80,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x07, 0x80, 0x03, 0xC0, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x07, 0x80, 0x01, 0xC0,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20,
0x07, 0x80, 0x01, 0xE0, 0x08, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x20, 0x0F, 0x80, 0x01, 0xE0,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30,
0x0F, 0x00, 0x01, 0xE0, 0x08, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x30, 0x0F, 0x00, 0x01, 0xE0,
0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30,
0x0F, 0x00, 0x01, 0xE0, 0x18, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x30, 0x0F, 0x00, 0x01, 0xE0,
0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38,
0x0F, 0x00, 0x01, 0xE0, 0x18, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x38, 0x0F, 0x00, 0x01, 0xE0,
0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x38,
0x0F, 0x80, 0x01, 0xE0, 0x38, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x3C, 0x0F, 0x80, 0x01, 0xE0,
0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3E,
0x0F, 0x80, 0x03, 0xE0, 0x78, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x1E, 0x07, 0x80, 0x03, 0xE0,
0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E,
0x07, 0x80, 0x03, 0xE0, 0xF0, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x1F, 0x07, 0x80, 0x03, 0xC1,
0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F,
0x87, 0xC0, 0x07, 0xC1, 0xF0, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x0F, 0x83, 0xC0, 0x07, 0x83,
0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F,
0xC3, 0xC0, 0x07, 0x87, 0xE0, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x07, 0xE1, 0xE0, 0x07, 0x0F,
0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03,
0xF0, 0xE0, 0x0F, 0x0F, 0x80, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01, 0xF8, 0xF0, 0x0E, 0x1F,
0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
0xF8, 0x70, 0x1C, 0x3F, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xFC, 0x30, 0x18, 0x7E,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x7F, 0x18, 0x30, 0xFC, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x1F, 0x88, 0x21, 0xF0,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x0F, 0xC4, 0x47, 0xE0, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x03, 0xE0, 0x0F, 0x80,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0xF8, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xE0, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00,
0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x02, 0x00, 0x06, 0x00, 0x00, 0x6C, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x06,
0x00, 0x00, 0x60, 0x00, 0x7E, 0x3F, 0x0F, 0xC3,
0xF0, 0xFA, 0x0F, 0xDF, 0xE1, 0x9F, 0xEC, 0x7E,
0xE6, 0x73, 0x9C, 0xE7, 0x39, 0xCE, 0x1C, 0xDF,
0xE1, 0xB9, 0xEC, 0xE7, 0xE0, 0x61, 0xD8, 0x66,
0x1B, 0x86, 0x1C, 0x06, 0x61, 0xB0, 0x6D, 0xC3,
0x7C, 0x7F, 0xFF, 0xFF, 0xFF, 0x06, 0x0F, 0x86,
0x61, 0xB0, 0x6D, 0x83, 0x3E, 0x7F, 0xFF, 0xFF,
0xFF, 0x06, 0x07, 0xC6, 0x61, 0xB0, 0x6D, 0x83,
0xC3, 0x61, 0x18, 0x46, 0x03, 0x86, 0x18, 0x66,
0x61, 0xB0, 0x6D, 0xC3, 0xFE, 0x7F, 0x9F, 0xE7,
0xF9, 0xFE, 0x1F, 0xE6, 0x3F, 0x9F, 0xEC, 0xFE,
0x7E, 0x3F, 0x0F, 0xC3, 0xF0, 0xFA, 0x0F, 0xC6,
0x3F, 0x9F, 0xEC, 0x7E, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7C, 0x00,
0x00, 0x20, 0x82, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x44, 0x00, 0x00, 0x20, 0x82, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x6C, 0xF3,
0xCF, 0x70, 0x9E, 0x79, 0xE7, 0x80, 0x00, 0x00,
0x00, 0x00, 0x7D, 0x9E, 0x68, 0x20, 0xB2, 0xC8,
0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x9E,
0x6F, 0x20, 0xB2, 0xF9, 0xE7, 0x80, 0x00, 0x00,
0x00, 0x00, 0x46, 0x9A, 0x61, 0x20, 0xB2, 0xCB,
0x60, 0x80, 0x00, 0x00, 0x00, 0x00, 0x7C, 0xF3,
0xCF, 0x30, 0x9E, 0x79, 0xE7, 0x90, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x7C, 0x02, 0x00, 0x00, 0x82, 0x60, 0x00, 0x00,
0xF8, 0x00, 0x00, 0x40, 0x40, 0x02, 0x00, 0x00,
0x83, 0x60, 0x00, 0x00, 0x8C, 0x00, 0x00, 0x40,
0x60, 0xB7, 0x79, 0xE7, 0x81, 0xC7, 0x92, 0x70,
0x89, 0xE7, 0x9E, 0x78, 0x7C, 0xE2, 0xC9, 0x2C,
0x81, 0xCC, 0xD2, 0x40, 0xFB, 0x21, 0xB2, 0x48,
0x40, 0x62, 0xF9, 0x2C, 0x80, 0x8C, 0xD2, 0x40,
0x8B, 0xE7, 0xB0, 0x48, 0x40, 0xE2, 0xC9, 0x2C,
0x80, 0x84, 0xD2, 0x40, 0x8B, 0x2D, 0x92, 0x48,
0x7D, 0xB3, 0x79, 0x27, 0x80, 0x87, 0x9E, 0x40,
0x8D, 0xE7, 0x9E, 0x48, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
SeeedLogo = upmLCD.uint8Array(len(logoArr))
for x in range(len(logoArr)):
SeeedLogo.__setitem__(x, logoArr[x])
# If you don't set the display to be white, the seeed logo will appear jagged
myLCD.setGrayLevel(12)
myLCD.draw(SeeedLogo, 96 * 96 / 8);
for i in range(12):
myLCD.setCursor(i, 0)
myLCD.setGrayLevel(i)
myLCD.write('Hello World')
print "Exiting"
| 45.063158
| 77
| 0.68512
| 1,407
| 8,562
| 4.165601
| 0.176972
| 0.939089
| 1.28374
| 1.572428
| 0.614912
| 0.599215
| 0.586931
| 0.552124
| 0.518683
| 0.462037
| 0
| 0.454151
| 0.165732
| 8,562
| 189
| 78
| 45.301587
| 0.366373
| 0.141789
| 0
| 0.375796
| 0
| 0
| 0.002458
| 0
| 0
| 0
| 0.629711
| 0
| 0
| 0
| null | null | 0
| 0.012739
| null | null | 0.006369
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed2ce29f604f771e38af6d30ffbe09233cdd48b6
| 9,956
|
py
|
Python
|
Protein-ligand-binding/TopBio/Feature/LigandFeature.py
|
WeilabMSU/TopologyNet
|
4f4d13cec7e50624b43990c863dd84b8bbf359d8
|
[
"MIT"
] | 1
|
2021-12-09T02:38:53.000Z
|
2021-12-09T02:38:53.000Z
|
Protein-ligand-binding/TopBio/Feature/LigandFeature.py
|
WeilabMSU/TopologyNet
|
4f4d13cec7e50624b43990c863dd84b8bbf359d8
|
[
"MIT"
] | null | null | null |
Protein-ligand-binding/TopBio/Feature/LigandFeature.py
|
WeilabMSU/TopologyNet
|
4f4d13cec7e50624b43990c863dd84b8bbf359d8
|
[
"MIT"
] | 1
|
2021-12-09T02:38:54.000Z
|
2021-12-09T02:38:54.000Z
|
import numpy as np
import pickle
import os
def GenerateFeature_alpha(ligand_name, working_dir):
Cut = 12.0
LIGELE = ['C','N','O','S','CN','CO','CS','NO','NS','OS','CCl','CBr','CP','CF','CNO','CNS','COS','NOS','CNOS','CNOSPFClBrI','H','CH','NH','OH','SH','CNH','COH','CSH','NOH','NSH','OSH','CNOH','CNSH','COSH','NOSH','CNOSH','CNOSPFClBrIH','CClH','CBrH','CPH','CFH']
Feature_i = []
pdb = ligand_name
InFile = open(working_dir+'/'+ligand_name+'_alpha.pkl')
BarCollection = pickle.load(InFile)
for el in LIGELE:
if 'lig_'+el in BarCollection.keys():
Bars = BarCollection['lig_'+el]
Bar0Birth = []; Bar0Death = []; Bar1Birth = []; Bar1Death = []; Bar2Birth = []; Bar2Death = [];
for Bar in Bars:
if Bar[2] < Bar[1]:
continue
if Bar[2] > 12.0 and Bar[0] == 0: continue
if Bar[2] > 12.0 and Bar[0] > 0: Bar[2] = 12.0
if Bar[0] == 0:
Bar0Birth.append(Bar[1])
Bar0Death.append(Bar[2])
if Bar[0] == 1:
Bar1Birth.append(Bar[1])
Bar1Death.append(Bar[2])
if Bar[0] == 2:
Bar2Birth.append(Bar[1])
Bar2Death.append(Bar[2])
if len(Bar0Birth) > 0:
Bar0Birth = np.asarray(Bar0Birth, float)
Bar0Death = np.asarray(Bar0Death, float)
if len(Bar1Birth) > 0:
Bar1Birth = np.asarray(Bar1Birth, float)
Bar1Death = np.asarray(Bar1Death, float)
if len(Bar2Birth) > 0:
Bar2Birth = np.asarray(Bar2Birth, float)
Bar2Death = np.asarray(Bar2Death, float)
if len(Bar0Death) > 0:
Feature_i.append(np.mean(Bar0Death[:]))
Feature_i.append(np.std(Bar0Death[:]))
Feature_i.append(np.max(Bar0Death[:]))
Feature_i.append(np.min(Bar0Death[:]))
Feature_i.append(np.sum(Bar0Death[:]))
Feature_i.append(len(Bar0Death))
else:
Feature_i.extend([0.]*6)
if len(Bar1Death) > 0:
Feature_i.append(np.mean(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.std(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.max(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.min(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.sum(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(Bar1Birth[np.argmax(Bar1Death[:] - Bar1Birth[:])])
Feature_i.append(Bar1Death[np.argmax(Bar1Death[:] - Bar1Birth[:])])
Feature_i.append(np.mean(Bar1Birth[:]))
Feature_i.append(np.std(Bar1Birth[:]))
Feature_i.append(np.max(Bar1Birth[:]))
Feature_i.append(np.min(Bar1Birth[:]))
Feature_i.append(np.sum(Bar1Birth[:]))
Feature_i.append(np.mean(Bar1Death[:]))
Feature_i.append(np.std(Bar1Death[:]))
Feature_i.append(np.max(Bar1Death[:]))
Feature_i.append(np.min(Bar1Death[:]))
Feature_i.append(np.sum(Bar1Death[:]))
Feature_i.append(len(Bar1Death))
else:
Feature_i.extend([0.]*18)
if len(Bar2Death) > 0:
Feature_i.append(np.mean(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.std(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.max(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.min(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.sum(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(Bar2Birth[np.argmax(Bar2Death[:] - Bar2Birth[:])])
Feature_i.append(Bar2Death[np.argmax(Bar2Death[:] - Bar2Birth[:])])
Feature_i.append(np.mean(Bar2Birth[:]))
Feature_i.append(np.std(Bar2Birth[:]))
Feature_i.append(np.max(Bar2Birth[:]))
Feature_i.append(np.min(Bar2Birth[:]))
Feature_i.append(np.sum(Bar2Birth[:]))
Feature_i.append(np.mean(Bar2Death[:]))
Feature_i.append(np.std(Bar2Death[:]))
Feature_i.append(np.max(Bar2Death[:]))
Feature_i.append(np.min(Bar2Death[:]))
Feature_i.append(np.sum(Bar2Death[:]))
Feature_i.append(len(Bar2Death))
else:
Feature_i.extend([0.]*18)
else:
Feature_i.extend([0.]*42)
Feature_i = np.asarray(Feature_i, float)
outfile = open(working_dir+'/'+ligand_name+'_feature_alpha_handcrafted.npy', 'w')
np.save(outfile, Feature_i)
outfile.close()
def GenerateFeature_level1(ligand_name, working_dir):
small = 0.01
Feature_i = []
Cut = 12.0
LIGELE = ['C','N','O','S','CN','CO','CS','NO','NS','OS','CCl','CBr','CP','CF','CNO','CNS','COS','NOS','CNOS','CNOSPFClBrI','H','CH','NH','OH','SH','CNH','COH','CSH','NOH','NSH','OSH','CNOH','CNSH','COSH','NOSH','CNOSH','CNOSPFClBrIH','CClH','CBrH','CPH','CFH']
pdb = ligand_name
for el in LIGELE:
if os.path.exists(working_dir+'/'+ligand_name+'_'+el+'_level1.PH'):
InFile = open(working_dir+'/'+ligand_name+'_'+el+'_level1.PH')
lines = InFile.read().splitlines()
Bars = []
for line in lines:
a,b,c = line.split()
Bars.append([int(a), float(b), float(c)])
InFile.close()
Bar0Birth = []; Bar0Death = []; Bar1Birth = []; Bar1Death = []; Bar2Birth = []; Bar2Death = [];
for Bar in Bars:
if Bar[2] < Bar[1]:
continue
if Bar[2] > 12.0 and Bar[0] == 0: continue
if Bar[2] > 12.0 and Bar[0] > 0: Bar[2] = 12.0
if Bar[0] == 0 and Bar[2]-Bar[1] >= small:
Bar0Birth.append(Bar[1])
Bar0Death.append(Bar[2])
if Bar[0] == 1 and Bar[2]-Bar[1] >= small:
Bar1Birth.append(Bar[1])
Bar1Death.append(Bar[2])
if Bar[0] == 2 and Bar[2]-Bar[1] >= small:
Bar2Birth.append(Bar[1])
Bar2Death.append(Bar[2])
if len(Bar0Birth) > 0:
Bar0Birth = np.asarray(Bar0Birth, float)
Bar0Death = np.asarray(Bar0Death, float)
if len(Bar1Birth) > 0:
Bar1Birth = np.asarray(Bar1Birth, float)
Bar1Death = np.asarray(Bar1Death, float)
if len(Bar2Birth) > 0:
Bar2Birth = np.asarray(Bar2Birth, float)
Bar2Death = np.asarray(Bar2Death, float)
if len(Bar0Death) > 0:
Feature_i.append(np.mean(Bar0Death[:]))
Feature_i.append(np.std(Bar0Death[:]))
Feature_i.append(np.max(Bar0Death[:]))
Feature_i.append(np.min(Bar0Death[:]))
Feature_i.append(np.sum(Bar0Death[:]))
Feature_i.append(len(Bar0Death))
else:
Feature_i.extend([0.]*6)
if len(Bar1Death) > 0:
Feature_i.append(np.mean(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.std(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.max(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.min(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(np.sum(Bar1Death[:] - Bar1Birth[:]))
Feature_i.append(Bar1Birth[np.argmax(Bar1Death[:] - Bar1Birth[:])])
Feature_i.append(Bar1Death[np.argmax(Bar1Death[:] - Bar1Birth[:])])
Feature_i.append(np.mean(Bar1Birth[:]))
Feature_i.append(np.std(Bar1Birth[:]))
Feature_i.append(np.max(Bar1Birth[:]))
Feature_i.append(np.min(Bar1Birth[:]))
Feature_i.append(np.sum(Bar1Birth[:]))
Feature_i.append(np.mean(Bar1Death[:]))
Feature_i.append(np.std(Bar1Death[:]))
Feature_i.append(np.max(Bar1Death[:]))
Feature_i.append(np.min(Bar1Death[:]))
Feature_i.append(np.sum(Bar1Death[:]))
Feature_i.append(len(Bar1Death))
else:
Feature_i.extend([0.]*18)
if len(Bar2Death) > 0:
Feature_i.append(np.mean(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.std(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.max(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.min(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(np.sum(Bar2Death[:] - Bar2Birth[:]))
Feature_i.append(Bar2Birth[np.argmax(Bar2Death[:] - Bar2Birth[:])])
Feature_i.append(Bar2Death[np.argmax(Bar2Death[:] - Bar2Birth[:])])
Feature_i.append(np.mean(Bar2Birth[:]))
Feature_i.append(np.std(Bar2Birth[:]))
Feature_i.append(np.max(Bar2Birth[:]))
Feature_i.append(np.min(Bar2Birth[:]))
Feature_i.append(np.sum(Bar2Birth[:]))
Feature_i.append(np.mean(Bar2Death[:]))
Feature_i.append(np.std(Bar2Death[:]))
Feature_i.append(np.max(Bar2Death[:]))
Feature_i.append(np.min(Bar2Death[:]))
Feature_i.append(np.sum(Bar2Death[:]))
Feature_i.append(len(Bar2Death))
else:
Feature_i.extend([0.]*18)
else:
Feature_i.extend([0.]*42)
Feature_i = np.asarray(Feature_i, float)
outfile = open(working_dir+'/'+ligand_name+'_feature_ligand_level1_handcrafted.npy', 'w')
np.save(outfile, Feature_i)
outfile.close()
| 49.78
| 264
| 0.518682
| 1,108
| 9,956
| 4.541516
| 0.106498
| 0.158983
| 0.233704
| 0.222576
| 0.925278
| 0.919316
| 0.901828
| 0.889905
| 0.889905
| 0.889905
| 0
| 0.042868
| 0.313479
| 9,956
| 199
| 265
| 50.030151
| 0.693343
| 0
| 0
| 0.863158
| 1
| 0
| 0.037063
| 0.00683
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010526
| false
| 0
| 0.015789
| 0
| 0.026316
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ed585ed9b9e64b5cd7e7fef27facda2ab0843b74
| 4,320
|
py
|
Python
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
|
andrewtarzia/stk
|
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
|
[
"MIT"
] | 21
|
2018-04-12T16:25:24.000Z
|
2022-02-14T23:05:43.000Z
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
|
JelfsMaterialsGroup/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 8
|
2019-03-19T12:36:36.000Z
|
2020-11-11T12:46:00.000Z
|
tests/molecular/molecules/molecule/fixtures/cof/periodic_kagome.py
|
supramolecular-toolkit/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 5
|
2018-08-07T13:00:16.000Z
|
2021-11-01T00:55:10.000Z
|
import pytest
import stk
from ...case_data import CaseData
@pytest.fixture(
scope='session',
params=(
lambda name: CaseData(
molecule=stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicKagome(
building_blocks=(
stk.BuildingBlock(
smiles='BrC1=C(Br)[C+]=N1',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles=(
'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+]('
'Br)[C+2]1'
),
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(2, 2, 1),
),
),
smiles=(
'[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]='
'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N='
'[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C'
'+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1'
'5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%'
'20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21'
')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2]['
'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N'
'=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)'
'[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]'
'5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206['
'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+'
']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C'
'2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N='
'[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%'
'211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121'
),
name=name,
),
lambda name: CaseData(
molecule=stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicKagome(
building_blocks=(
stk.BuildingBlock(
smiles='BrC1=C(Br)[C+]=N1',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles=(
'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+]('
'Br)[C+2]1'
),
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(2, 2, 1),
optimizer=stk.PeriodicCollapser(),
),
),
smiles=(
'[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]='
'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N='
'[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C'
'+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1'
'5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%'
'20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21'
')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2]['
'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N'
'=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)'
'[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]'
'5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206['
'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+'
']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C'
'2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N='
'[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%'
'211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121'
),
name=name,
),
),
)
def cof_periodic_kagome(request) -> CaseData:
return request.param(
f'{request.fixturename}{request.param_index}',
)
| 45.473684
| 71
| 0.344907
| 682
| 4,320
| 2.164223
| 0.140762
| 0.075881
| 0.093496
| 0.02168
| 0.867209
| 0.867209
| 0.867209
| 0.867209
| 0.867209
| 0.867209
| 0
| 0.145964
| 0.371991
| 4,320
| 94
| 72
| 45.957447
| 0.398083
| 0
| 0
| 0.868132
| 0
| 0.373626
| 0.43125
| 0.417593
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010989
| false
| 0
| 0.032967
| 0.010989
| 0.054945
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ed5bad05b400253943df833d896315c0be535899
| 19,288
|
py
|
Python
|
test/unit/test_som_rom_parser.py
|
CospanDesign/nysa
|
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
|
[
"MIT"
] | 15
|
2015-08-31T20:50:39.000Z
|
2022-03-13T08:56:39.000Z
|
test/unit/test_som_rom_parser.py
|
CospanDesign/nysa
|
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
|
[
"MIT"
] | 5
|
2015-05-02T16:48:57.000Z
|
2017-06-15T16:25:34.000Z
|
test/unit/test_som_rom_parser.py
|
CospanDesign/nysa
|
ffe07f0b8fe2f6217e7a862d89b80f1b17163be9
|
[
"MIT"
] | 6
|
2016-09-02T16:02:13.000Z
|
2021-06-29T22:29:45.000Z
|
#!/usr/bin/python
import unittest
import json
import sys
import os
import string
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
from nysa.cbuilder import sdb_component as sdbc
from nysa.cbuilder import sdb_object_model as som
from nysa.cbuilder.som_rom_parser import parse_rom_image
from nysa.cbuilder.som_rom_generator import generate_rom_image
from nysa.cbuilder.sdb import SDBInfo
from nysa.cbuilder.sdb import SDBWarning
from nysa.cbuilder.sdb import SDBError
from nysa.common.status import StatusLevel
from nysa.common.status import Status
class Test (unittest.TestCase):
"""Unit test SDB Tree"""
def setUp(self):
pass
'''
def test_simple_rom(self):
rom_in = ROM1
som = parse_rom_image(rom_in)
rom_out = generate_rom_image(som)
rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)
self.assertEqual(rom_in, rom_out)
'''
def test_full_dionysus_read(self):
from nysa.host.platform_scanner import PlatformScanner
pscanner = PlatformScanner()
platform_dict = pscanner.get_platforms()
platform_names = platform_dict.keys()
if "dionysus" not in platform_names:
return
s = Status()
platform_instance = platform_dict["dionysus"](s)
platforms = platform_instance.scan()
if len(platforms) == 0:
return
dionysus = platforms[platforms.keys()[0]]
#print "Found Dionysus"
s.set_level("fatal")
s.Verbose("Read SDB")
dionysus.read_sdb()
def test_full_bus(self):
sm = som.SOM()
sm.initialize_root()
root = sm.get_root()
peripheral = sm.insert_bus()
peripheral.set_name("peripheral")
memory = sm.insert_bus()
memory.set_name("memory")
d1 = sdbc.create_device_record(name = "device 1", size = 0x100)
d2 = sdbc.create_device_record(name = "device 2", size = 0x100)
m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000)
m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000)
peripheral.set_child_spacing(0x0010000000)
root.set_child_spacing (0x0100000000)
sm.insert_component(peripheral, d1)
sm.insert_component(peripheral, d2)
sm.insert_component(memory, m1)
sm.insert_component(memory, m2)
rom = generate_rom_image(sm)
rom_in = sdbc.convert_rom_to_32bit_buffer(rom)
#rom_in = ROM2
#print_sdb_rom(rom_in)
sm = parse_rom_image(rom_in)
rom_out = generate_rom_image(sm)
rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)
#print_sdb_rom(rom_out)
self.assertEqual(rom_in, rom_out)
def test_full_bus_with_integration(self):
sm = som.SOM()
sm.initialize_root()
root = sm.get_root()
peripheral = sm.insert_bus()
peripheral.set_name("peripheral")
memory = sm.insert_bus()
memory.set_name("memory")
d1 = sdbc.create_device_record(name = "device 1", size = 0x100)
d2 = sdbc.create_device_record(name = "device 2", size = 0x100)
m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000)
m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000)
intr = sdbc.create_integration_record("Integration Data",
vendor_id = 0x800BEAF15DEADC03,
device_id = 0x00000000)
peripheral.set_child_spacing(0x0100000000)
sm.insert_component(peripheral, intr)
sm.insert_component(peripheral, d1)
sm.insert_component(peripheral, d2)
sm.insert_component(memory, m1)
sm.insert_component(memory, m2)
rom = generate_rom_image(sm)
rom_in = sdbc.convert_rom_to_32bit_buffer(rom)
#rom_in = ROM2
#print_sdb_rom(rom_in)
sm = parse_rom_image(rom_in)
rom_out = generate_rom_image(sm)
rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)
#print_sdb_rom(rom_out)
#compare_roms(rom_in, rom_out)
self.assertEqual(rom_in, rom_out)
def test_generate_one_sub_bus_with_url(self):
sm = som.SOM()
sm.initialize_root()
root = sm.get_root()
peripheral = sm.insert_bus()
peripheral.set_name("peripheral")
memory = sm.insert_bus()
memory.set_name("memory")
d1 = sdbc.create_device_record(name = "device 1", size = 0x100)
d2 = sdbc.create_device_record(name = "device 2", size = 0x100)
m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000)
m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000)
intr = sdbc.create_integration_record("Integration Data",
vendor_id = 0x800BEAF15DEADC03,
device_id = 0x00000000)
url = sdbc.create_repo_url_record("http://www.geocities.com")
sm.insert_component(root, url)
peripheral.set_child_spacing(0x0100000000)
sm.insert_component(peripheral, intr)
sm.insert_component(peripheral, d1)
sm.insert_component(peripheral, d2)
sm.insert_component(memory, m1)
sm.insert_component(memory, m2)
rom = generate_rom_image(sm)
rom_in = sdbc.convert_rom_to_32bit_buffer(rom)
#print_sdb(rom)
sm = parse_rom_image(rom_in)
rom_out = generate_rom_image(sm)
rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)
#print_sdb_rom(rom_out)
#compare_roms(rom_in, rom_out)
self.assertEqual(rom_in, rom_out)
def test_generate_one_sub_bus_with_url(self):
sm = som.SOM()
sm.initialize_root()
root = sm.get_root()
peripheral = sm.insert_bus()
peripheral.set_name("peripheral")
memory = sm.insert_bus()
memory.set_name("memory")
d1 = sdbc.create_device_record(name = "device 1", size = 0x100)
d2 = sdbc.create_device_record(name = "device 2", size = 0x100)
m1 = sdbc.create_device_record(name = "memory 1", size = 0x10000)
m2 = sdbc.create_device_record(name = "memory 2", size = 0x20000)
intr = sdbc.create_integration_record("Integration Data",
vendor_id = 0x800BEAF15DEADC03,
device_id = 0x00000000)
url = sdbc.create_repo_url_record("http://www.geocities.com")
synthesis = sdbc.create_synthesis_record("Synthesis Name", 123, "cool tool", 1.0, "jeff")
sm.insert_component(root, url)
sm.insert_component(root, synthesis)
peripheral.set_child_spacing(0x0100000000)
sm.insert_component(peripheral, intr)
sm.insert_component(peripheral, d1)
sm.insert_component(peripheral, d2)
sm.insert_component(memory, m1)
sm.insert_component(memory, m2)
rom = generate_rom_image(sm)
rom_in = sdbc.convert_rom_to_32bit_buffer(rom)
#print_sdb(rom)
sm = parse_rom_image(rom_in)
rom_out = generate_rom_image(sm)
rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)
#print_sdb_rom(rom_out)
#compare_roms(rom_in, rom_out)
self.assertEqual(rom_in, rom_out)
def test_generate_one_sub_bus_with_url(self):
rom_in = ROMD
#print_sdb(rom)
sm = parse_rom_image(rom_in)
rom_out = generate_rom_image(sm)
rom_out = sdbc.convert_rom_to_32bit_buffer(rom_out)
print_sdb_rom(rom_out)
#compare_roms(rom_in, rom_out)
self.assertEqual(rom_in, rom_out)
def compare_roms(rom_in, rom_out):
if len(rom_in) != len(rom_out):
print "Length of rom is not equal!"
return
rom_in = rom_in.splitlines()
rom_out = rom_out.splitlines()
for i in range (0, len(rom_in), 4):
if (i % 16 == 0):
magic = "0x%s" % (rom_in[i].lower())
last_val = int(rom_in[i + 15], 16) & 0xFF
print ""
if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0):
print "Interconnect"
elif last_val == 0x01:
print "Device"
elif last_val == 0x02:
print "Bridge"
elif last_val == 0x80:
print "Integration"
elif last_val == 0x81:
print "URL"
elif last_val == 0x82:
print "Synthesis"
elif last_val == 0xFF:
print "Empty"
else:
print "???"
if rom_in[i] == rom_out[i] and rom_in[i + 1] == rom_out[i + 1] and rom_in[i + 2] == rom_out[i + 2] and rom_in[i + 3] == rom_out[i + 3]:
print "%s %s : %s %s" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3])
else:
print "%s %s : %s %s != %s %s : %s %s" % (rom_in[i], rom_in[i + 1], rom_in[i + 2], rom_in[i + 3], rom_out[i], rom_out[i + 1], rom_out[i + 2], rom_out[i + 3])
def print_sdb_rom(rom):
#rom = sdbc.convert_rom_to_32bit_buffer(rom)
rom = rom.splitlines()
print "ROM"
for i in range (0, len(rom), 4):
if (i % 16 == 0):
magic = "0x%s" % (rom[i].lower())
last_val = int(rom[i + 15], 16) & 0xFF
print ""
if (magic == hex(sdbc.SDB_INTERCONNECT_MAGIC) and last_val == 0):
print "Interconnect"
elif last_val == 0x01:
print "Device"
elif last_val == 0x02:
print "Bridge"
elif last_val == 0x80:
print "Integration"
elif last_val == 0x81:
print "URL"
elif last_val == 0x82:
print "Synthesis"
elif last_val == 0xFF:
print "Empty"
else:
print "???"
print "%s %s : %s %s" % (rom[i], rom[i + 1], rom[i + 2], rom[i + 3])
ROM1 = "5344422D\n"\
"00010100\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"746F7000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"64657669\n"\
"63652031\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF"
ROM2 = "5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"03000000\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"746F7000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000020\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000040\n"\
"00000100\n"\
"00000000\n"\
"00000200\n"\
"00030000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000100\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"64657669\n"\
"63652031\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000207\n"\
"00000001\n"\
"00000000\n"\
"00000003\n"\
"00000100\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"64657669\n"\
"63652032\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00020100\n"\
"00000100\n"\
"00000000\n"\
"00000200\n"\
"00030000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00010000\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72792031\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00010000\n"\
"00000000\n"\
"00030000\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0105\n"\
"6D656D6F\n"\
"72792032\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF"
ROMD = "5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"746F7000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000020\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"20000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000040\n"\
"00000001\n"\
"00000000\n"\
"00000001\n"\
"00800000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000002\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00020100\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"20000000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"70657269\n"\
"70686572\n"\
"616C0000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000340\n"\
"80000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"140F0106\n"\
"53444200\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000101\n"\
"00000207\n"\
"00000000\n"\
"10000000\n"\
"00000000\n"\
"10000008\n"\
"80000000\n"\
"0000C594\n"\
"00000000\n"\
"00000001\n"\
"140F0107\n"\
"77625F67\n"\
"70696F00\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF\n"\
"5344422D\n"\
"00010100\n"\
"00000001\n"\
"00000000\n"\
"00000001\n"\
"00800000\n"\
"80000000\n"\
"0000C594\n"\
"00000001\n"\
"00000001\n"\
"140F0106\n"\
"6D656D6F\n"\
"72790000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000502\n"\
"00000207\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00800000\n"\
"80000000\n"\
"0000C594\n"\
"00000000\n"\
"00000001\n"\
"140F0107\n"\
"77625F73\n"\
"6472616D\n"\
"00000000\n"\
"00000000\n"\
"00000001\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"00000000\n"\
"000000FF"
| 27.792507
| 169
| 0.506999
| 2,144
| 19,288
| 4.387127
| 0.097015
| 0.209547
| 0.23283
| 0.298533
| 0.868063
| 0.824793
| 0.816606
| 0.80438
| 0.792367
| 0.777801
| 0
| 0.287109
| 0.352084
| 19,288
| 693
| 170
| 27.832612
| 0.465552
| 0.020479
| 0
| 0.875399
| 0
| 0
| 0.252728
| 0
| 0
| 0
| 0.01591
| 0
| 0.007987
| 0
| null | null | 0.001597
| 0.023962
| null | null | 0.039936
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
ed83e8c908ff960c5bf16835dd114bff6b5f51a1
| 123
|
py
|
Python
|
src/GalaxyDynamicsFromVc/units.py
|
pabferde/galaxy_dynamics_from_Vc
|
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
|
[
"MIT"
] | null | null | null |
src/GalaxyDynamicsFromVc/units.py
|
pabferde/galaxy_dynamics_from_Vc
|
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
|
[
"MIT"
] | null | null | null |
src/GalaxyDynamicsFromVc/units.py
|
pabferde/galaxy_dynamics_from_Vc
|
7232f7bfd6d2338bcec3bbf87478f7a482c749ef
|
[
"MIT"
] | null | null | null |
_Msun_kpc3_to_GeV_cm3_factor = 0.3/8.0e6
def Msun_kpc3_to_GeV_cm3(value):
return value*_Msun_kpc3_to_GeV_cm3_factor
| 17.571429
| 45
| 0.821138
| 25
| 123
| 3.4
| 0.52
| 0.282353
| 0.352941
| 0.458824
| 0.705882
| 0.517647
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 0.113821
| 123
| 6
| 46
| 20.5
| 0.678899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
9c10d305e4f704dfba7fd0b5306c365d4671b49e
| 41,012
|
py
|
Python
|
services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py
|
KZzizzle/osparc-simcore
|
981bc8d193f3f5d507e3225f857e0308c339e163
|
[
"MIT"
] | null | null | null |
services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py
|
KZzizzle/osparc-simcore
|
981bc8d193f3f5d507e3225f857e0308c339e163
|
[
"MIT"
] | 17
|
2020-10-15T16:06:05.000Z
|
2022-03-21T18:48:21.000Z
|
services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py
|
Surfict/osparc-simcore
|
1e0b89574ec17ecb089674f9e5daa83d624430c8
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
simcore-service-storage API
API definition for simcore-service-storage service # noqa: E501
OpenAPI spec version: 0.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from simcore_service_storage_sdk.api_client import ApiClient
class UsersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def check_action_post(self, action, **kwargs): # noqa: E501
"""Test checkpoint to ask server to fail or echo back the transmitted data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_action_post(action, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: (required)
:param str data:
:param FakeType fake_type:
:return: FakeEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_action_post_with_http_info(action, **kwargs) # noqa: E501
else:
(data) = self.check_action_post_with_http_info(action, **kwargs) # noqa: E501
return data
def check_action_post_with_http_info(self, action, **kwargs): # noqa: E501
"""Test checkpoint to ask server to fail or echo back the transmitted data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_action_post_with_http_info(action, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: (required)
:param str data:
:param FakeType fake_type:
:return: FakeEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['action', 'data', 'fake_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_action_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'action' is set
if ('action' not in local_var_params or
local_var_params['action'] is None):
raise ValueError("Missing the required parameter `action` when calling `check_action_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'action' in local_var_params:
path_params['action'] = local_var_params['action'] # noqa: E501
query_params = []
if 'data' in local_var_params:
query_params.append(('data', local_var_params['data'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'fake_type' in local_var_params:
body_params = local_var_params['fake_type']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/check/{action}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FakeEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Deletes File # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.delete_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def delete_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Deletes File # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `delete_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `delete_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def download_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns download link for requested file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.download_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def download_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns download link for requested file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method download_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `download_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `download_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `download_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PresignedLinkEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_file_metadata(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Get File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_metadata(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_file_metadata_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_file_metadata_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def get_file_metadata_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Get File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_metadata_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_file_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `get_file_metadata`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `get_file_metadata`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_file_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_files_metadata(self, location_id, user_id, **kwargs): # noqa: E501
"""Get Files Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_files_metadata(location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str location_id: (required)
:param str user_id: (required)
:param str uuid_filter:
:return: FileMetaDataArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_files_metadata_with_http_info(location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_files_metadata_with_http_info(location_id, user_id, **kwargs) # noqa: E501
return data
def get_files_metadata_with_http_info(self, location_id, user_id, **kwargs): # noqa: E501
"""Get Files Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_files_metadata_with_http_info(location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str location_id: (required)
:param str user_id: (required)
:param str uuid_filter:
:return: FileMetaDataArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['location_id', 'user_id', 'uuid_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_files_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `get_files_metadata`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_files_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
if 'uuid_filter' in local_var_params:
query_params.append(('uuid_filter', local_var_params['uuid_filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataArrayEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_storage_locations(self, user_id, **kwargs): # noqa: E501
"""Get available storage locations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_locations(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: (required)
:return: FileLocationArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_storage_locations_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.get_storage_locations_with_http_info(user_id, **kwargs) # noqa: E501
return data
def get_storage_locations_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Get available storage locations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_locations_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: (required)
:return: FileLocationArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_storage_locations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_storage_locations`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileLocationArrayEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def health_check(self, **kwargs): # noqa: E501
"""Service health-check endpoint # noqa: E501
Some general information on the API and state of the service behind # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.health_check_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.health_check_with_http_info(**kwargs) # noqa: E501
return data
def health_check_with_http_info(self, **kwargs): # noqa: E501
"""Service health-check endpoint # noqa: E501
Some general information on the API and state of the service behind # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method health_check" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HealthCheckEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_file_meta_data(self, file_id, location_id, **kwargs): # noqa: E501
"""Update File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_file_meta_data(file_id, location_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param FileMetaDataType file_meta_data_type:
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_file_meta_data_with_http_info(file_id, location_id, **kwargs) # noqa: E501
else:
(data) = self.update_file_meta_data_with_http_info(file_id, location_id, **kwargs) # noqa: E501
return data
def update_file_meta_data_with_http_info(self, file_id, location_id, **kwargs): # noqa: E501
"""Update File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_file_meta_data_with_http_info(file_id, location_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param FileMetaDataType file_meta_data_type:
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'file_meta_data_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_file_meta_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `update_file_meta_data`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `update_file_meta_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'file_meta_data_type' in local_var_params:
body_params = local_var_params['file_meta_data_type']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}/metadata', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns upload link or performs copy operation to datcore # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:param str extra_location:
:param str extra_source:
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.upload_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.upload_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def upload_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns upload link or performs copy operation to datcore # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:param str extra_location:
:param str extra_source:
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id', 'extra_location', 'extra_source'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `upload_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `upload_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `upload_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
if 'extra_location' in local_var_params:
query_params.append(('extra_location', local_var_params['extra_location'])) # noqa: E501
if 'extra_source' in local_var_params:
query_params.append(('extra_source', local_var_params['extra_source'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PresignedLinkEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.552178
| 127
| 0.624768
| 4,943
| 41,012
| 4.867085
| 0.039247
| 0.05786
| 0.090781
| 0.027933
| 0.950578
| 0.946671
| 0.937443
| 0.93021
| 0.921357
| 0.914166
| 0
| 0.015276
| 0.286526
| 41,012
| 986
| 128
| 41.59432
| 0.80691
| 0.296816
| 0
| 0.782528
| 1
| 0
| 0.197137
| 0.040822
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035316
| false
| 0
| 0.007435
| 0
| 0.094796
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9c2605f1809d64546c20b13c622af83a1ba7e6e8
| 210
|
py
|
Python
|
cymbology/identifiers/__init__.py
|
pmart123/security_id
|
95087be9525ab8d2fd47baa93f83aaa30e76bb54
|
[
"BSD-2-Clause"
] | 12
|
2015-09-15T17:17:39.000Z
|
2015-09-16T18:18:52.000Z
|
cymbology/identifiers/__init__.py
|
MartinThoma/cymbology
|
95087be9525ab8d2fd47baa93f83aaa30e76bb54
|
[
"BSD-2-Clause"
] | 5
|
2017-09-15T21:22:07.000Z
|
2021-08-19T09:15:59.000Z
|
cymbology/identifiers/__init__.py
|
pmart123/security_id
|
95087be9525ab8d2fd47baa93f83aaa30e76bb54
|
[
"BSD-2-Clause"
] | 1
|
2021-08-19T09:12:59.000Z
|
2021-08-19T09:12:59.000Z
|
from cymbology.identifiers.sedol import Sedol
from cymbology.identifiers.cusip import Cusip, cusip_from_isin
from cymbology.identifiers.isin import Isin
__all__ = ('Sedol', 'Cusip', 'cusip_from_isin', 'Isin')
| 35
| 62
| 0.804762
| 28
| 210
| 5.75
| 0.285714
| 0.242236
| 0.447205
| 0.223602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 210
| 5
| 63
| 42
| 0.847368
| 0
| 0
| 0
| 0
| 0
| 0.138095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
139d4a4bd97d70f26bdab675ca59d3c9590754fc
| 746
|
py
|
Python
|
tests/test_provider_Mongey_kafka_connect.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_Mongey_kafka_connect.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_Mongey_kafka_connect.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_Mongey_kafka-connect.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC)
def test_provider_import():
import terrascript.provider.Mongey.kafka_connect
def test_resource_import():
from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.Mongey.kafka_connect
#
# t = terrascript.provider.Mongey.kafka_connect.kafka_connect()
# s = str(t)
#
# assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s
# assert '0.2.3' in s
| 29.84
| 81
| 0.758713
| 103
| 746
| 5.330097
| 0.563107
| 0.174863
| 0.163934
| 0.189435
| 0.224044
| 0.156648
| 0
| 0
| 0
| 0
| 0
| 0.023548
| 0.146113
| 746
| 24
| 82
| 31.083333
| 0.838305
| 0.698391
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
13a86b3246874d1785eb144fa791b1d302c19c30
| 60,991
|
py
|
Python
|
ross/stochastic/st_results.py
|
JuliaMota/ross
|
88c2fa69d9a583dcdc33eab8deb35c797ebf4ef8
|
[
"MIT"
] | null | null | null |
ross/stochastic/st_results.py
|
JuliaMota/ross
|
88c2fa69d9a583dcdc33eab8deb35c797ebf4ef8
|
[
"MIT"
] | null | null | null |
ross/stochastic/st_results.py
|
JuliaMota/ross
|
88c2fa69d9a583dcdc33eab8deb35c797ebf4ef8
|
[
"MIT"
] | null | null | null |
"""STOCHASTIC ROSS plotting module.
This module returns graphs for each type of analyses in st_rotor_assembly.py.
"""
import numpy as np
from plotly import express as px
from plotly import graph_objects as go
from plotly import io as pio
from plotly.subplots import make_subplots
from ross.plotly_theme import tableau_colors
pio.renderers.default = "browser"
# set Plotly palette of colors
colors1 = px.colors.qualitative.Dark24
colors2 = px.colors.qualitative.Light24
class ST_CampbellResults:
"""Store stochastic results and provide plots for Campbell Diagram.
It's possible to visualize multiples harmonics in a single plot to check
other speeds which also excite a specific natural frequency.
Two options for plooting are available: Matplotlib and Bokeh. The user
chooses between them using the attribute plot_type. The default is bokeh
Parameters
----------
speed_range : array
Array with the speed range in rad/s.
wd : array
Array with the damped natural frequencies
log_dec : array
Array with the Logarithmic decrement
Returns
-------
subplots : Plotly graph_objects.make_subplots()
Plotly figure with diagrams for frequency and log dec.
"""
def __init__(self, speed_range, wd, log_dec):
self.speed_range = speed_range
self.wd = wd
self.log_dec = log_dec
def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs):
"""Plot the damped natural frequencies vs frequency.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0 and 100 inclusive.
harmonics: list, optional
List withe the harmonics to be plotted.
The default is to plot 1x.
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig = go.Figure()
x = np.concatenate((self.speed_range, self.speed_range[::-1]))
for j, h in enumerate(harmonics):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=self.speed_range * h,
opacity=1.0,
name="{}x speed".format(h),
line=dict(width=3, color=colors1[j], dash="dashdot"),
legendgroup="speed{}".format(j),
hovertemplate=("Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}"),
**kwargs,
)
)
for j in range(self.wd.shape[0]):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.mean(self.wd[j], axis=1),
opacity=1.0,
name="Mean - Mode {}".format(j + 1),
line=dict(width=3, color=colors1[j]),
legendgroup="mean{}".format(j),
hovertemplate=("Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}"),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.percentile(self.wd[j], p, axis=1),
opacity=0.6,
line=dict(width=2.5, color=colors2[j]),
name="percentile: {}%".format(p),
legendgroup="percentile{}{}".format(j, i),
hovertemplate=(
"Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}"
),
**kwargs,
)
)
for i, p in enumerate(conf_interval):
p1 = np.percentile(self.wd[j], 50 + p / 2, axis=1)
p2 = np.percentile(self.wd[j], 50 - p / 2, axis=1)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
line=dict(width=1, color=colors1[j]),
fill="toself",
fillcolor=colors1[j],
opacity=0.3,
name="confidence interval: {}% - Mode {}".format(p, j + 1),
legendgroup="conf{}{}".format(j, i),
hovertemplate=(
"Frequency: %{x:.3f}<br>" + "Frequency: %{y:.3f}"
),
**kwargs,
)
)
fig.update_xaxes(
title_text="<b>Rotor speed</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_yaxes(
title_text="<b>Damped Natural Frequencies</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_layout(
width=1200,
height=900,
plot_bgcolor="white",
legend=dict(
font=dict(family="sans-serif", size=14),
bgcolor="white",
bordercolor="black",
borderwidth=2,
),
)
return fig
def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs):
"""Plot the log_dec vs frequency.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be
between 0 and 100 inclusive.
harmonics: list, optional
List withe the harmonics to be plotted.
The default is to plot 1x.
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig = go.Figure()
x = np.concatenate((self.speed_range, self.speed_range[::-1]))
for j in range(self.log_dec.shape[0]):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.mean(self.log_dec[j], axis=1),
opacity=1.0,
name="Mean - Mode {}".format(j + 1),
line=dict(width=3, color=colors1[j]),
legendgroup="mean{}".format(j),
hovertemplate=("Frequency: %{x:.3f}<br>" + "Log Dec: %{y:.3f}"),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.percentile(self.log_dec[j], p, axis=1),
opacity=0.6,
line=dict(width=2.5, color=colors2[j]),
name="percentile: {}%".format(p),
legendgroup="percentile{}{}".format(j, i),
hoverinfo="none",
**kwargs,
)
)
for i, p in enumerate(conf_interval):
p1 = np.percentile(self.log_dec[j], 50 + p / 2, axis=1)
p2 = np.percentile(self.log_dec[j], 50 - p / 2, axis=1)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
line=dict(width=1, color=colors1[j]),
fill="toself",
fillcolor=colors1[j],
opacity=0.3,
name="confidence interval: {}% - Mode {}".format(p, j + 1),
legendgroup="conf{}{}".format(j, i),
hoverinfo="none",
**kwargs,
)
)
fig.update_xaxes(
title_text="<b>Rotor speed</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_yaxes(
title_text="<b>Logarithmic decrement</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_layout(
plot_bgcolor="white",
width=1200,
height=900,
legend=dict(
font=dict(family="sans-serif", size=14),
bgcolor="white",
bordercolor="black",
borderwidth=2,
),
)
return fig
def plot(self, percentile=[], conf_interval=[], *args, **kwargs):
"""Plot Campbell Diagram.
This method plots Campbell Diagram.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0 and 100 inclusive.
args: optional
harmonics : list, optional
List with the harmonics to be plotted.
The default is to plot 1x.
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
subplots : Plotly graph_objects.make_subplots()
Plotly figure with diagrams for frequency and log dec.
"""
fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs)
default_values = dict(showlegend=False)
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs)
subplots = make_subplots(rows=1, cols=2)
for data in fig0["data"]:
subplots.add_trace(data, 1, 1)
for data in fig1["data"]:
subplots.add_trace(data, 1, 2)
subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1)
subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1)
subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2)
subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2)
subplots.update_layout(
plot_bgcolor="white",
width=1800,
height=900,
legend=dict(
font=dict(family="sans-serif", size=14),
bgcolor="white",
bordercolor="black",
borderwidth=2,
),
)
return subplots
class ST_FrequencyResponseResults:
"""Store stochastic results and provide plots for Frequency Response.
Parameters
----------
speed_range : array
Array with the speed range in rad/s.
magnitude : array
Array with the frequencies, magnitude (dB) of the frequency
response for each pair input/output.
phase : array
Array with the frequencies, phase of the frequency
response for each pair input/output.
Returns
-------
subplots : Plotly graph_objects.make_subplots()
Plotly figure with amplitude vs frequency phase angle vs frequency.
"""
def __init__(self, speed_range, magnitude, phase):
self.speed_range = speed_range
self.magnitude = magnitude
self.phase = phase
def plot_magnitude(
self,
percentile=[],
conf_interval=[],
units="mic-pk-pk",
**kwargs,
):
"""Plot amplitude vs frequency.
This method plots the frequency response magnitude given an output and
an input using Plotly.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0% and 100% inclusive.
units : str, optional
Unit system
Default is "mic-pk-pk".
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
if units == "m":
y_axis_label = "<b>Amplitude (m)</b>"
elif units == "mic-pk-pk":
y_axis_label = "<b>Amplitude (μ pk-pk)</b>"
else:
y_axis_label = "<b>Amplitude (dB)</b>"
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig = go.Figure()
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.mean(self.magnitude, axis=1),
opacity=1.0,
name="Mean",
line=dict(width=3, color="black"),
legendgroup="mean",
hovertemplate=("Frequency: %{x:.2f}<br>" + "Amplitude: %{y:.2e}"),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.percentile(self.magnitude, p, axis=1),
opacity=0.6,
line=dict(width=2.5, color=colors2[i]),
name="percentile: {}%".format(p),
legendgroup="percentile{}".format(i),
hovertemplate=("Frequency: %{x:.2f}<br>" + "Amplitude: %{y:.2e}"),
**kwargs,
)
)
x = np.concatenate((self.speed_range, self.speed_range[::-1]))
for i, p in enumerate(conf_interval):
p1 = np.percentile(self.magnitude, 50 + p / 2, axis=1)
p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
line=dict(width=1, color=colors1[i]),
fill="toself",
fillcolor=colors1[i],
opacity=0.5,
name="confidence interval: {}%".format(p),
legendgroup="conf{}".format(i),
hovertemplate=("Frequency: %{x:.2f}<br>" + "Amplitude: %{y:.2e}"),
**kwargs,
)
)
fig.update_xaxes(
title_text="<b>Frequency</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_yaxes(
title_text=y_axis_label,
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_layout(
plot_bgcolor="white",
width=1200,
height=900,
legend=dict(
font=dict(family="sans-serif", size=14),
bgcolor="white",
bordercolor="black",
borderwidth=2,
),
)
return fig
def plot_phase(self, percentile=[], conf_interval=[], **kwargs):
"""Plot phase angle response.
This method plots the phase response given an output and an input
using bokeh.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0 and 100 inclusive.
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig = go.Figure()
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.mean(self.phase, axis=1),
opacity=1.0,
name="Mean",
line=dict(width=3, color="black"),
legendgroup="mean",
hovertemplate=("Frequency: %{x:.2f}<br>" + "Phase: %{y:.2f}"),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.speed_range,
y=np.percentile(self.phase, p, axis=1),
opacity=0.6,
line=dict(width=2.5, color=colors2[i]),
name="percentile: {}%".format(p),
legendgroup="percentile{}".format(i),
hovertemplate=("Frequency: %{x:.2f}<br>" + "Phase: %{y:.2f}"),
**kwargs,
)
)
x = np.concatenate((self.speed_range, self.speed_range[::-1]))
for i, p in enumerate(conf_interval):
p1 = np.percentile(self.phase, 50 + p / 2, axis=1)
p2 = np.percentile(self.phase, 50 - p / 2, axis=1)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
line=dict(width=1, color=colors1[i]),
fill="toself",
fillcolor=colors1[i],
opacity=0.5,
name="confidence interval: {}%".format(p),
legendgroup="conf{}".format(i),
hovertemplate=("Frequency: %{x:.2f}<br>" + "Phase: %{y:.2f}"),
**kwargs,
)
)
fig.update_xaxes(
title_text="<b>Frequency</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_yaxes(
title_text="<b>Phase Angle</b>",
title_font=dict(family="Arial", size=20),
tickfont=dict(size=16),
gridcolor="lightgray",
showline=True,
linewidth=2.5,
linecolor="black",
mirror=True,
)
fig.update_layout(
plot_bgcolor="white",
width=1200,
height=900,
legend=dict(
font=dict(family="sans-serif", size=14),
bgcolor="white",
bordercolor="black",
borderwidth=2,
),
)
return fig
def plot_polar_bode(
self,
percentile=[],
conf_interval=[],
units="mic-pk-pk",
**kwargs,
):
"""Plot polar forced response using Plotly.
Parameters
----------
dof : int
Degree of freedom.
units : str
Magnitude unit system.
Default is "mic-pk-pk"
polar_kwargs : optional
Additional key word arguments can be passed to change the plot layout only
(e.g. width=1000, height=800, ...).
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
if units == "m":
r_axis_label = "<b>Amplitude (m)</b>"
elif units == "mic-pk-pk":
r_axis_label = "<b>Amplitude (μ pk-pk)</b>"
else:
r_axis_label = "<b>Amplitude (dB)</b>"
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig = go.Figure()
fig.add_trace(
go.Scatterpolar(
r=np.mean(self.magnitude, axis=1),
theta=np.mean(self.phase, axis=1),
customdata=self.speed_range,
thetaunit="radians",
line=dict(width=3.0, color="black"),
name="Mean",
legendgroup="mean",
hovertemplate=(
"<b>Amplitude: %{r:.2e}</b><br>"
+ "<b>Phase: %{theta:.2f}</b><br>"
+ "<b>Frequency: %{customdata:.2f}</b>"
),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatterpolar(
r=np.percentile(self.magnitude, p, axis=1),
theta=np.percentile(self.phase, p, axis=1),
customdata=self.speed_range,
thetaunit="radians",
opacity=0.6,
line=dict(width=2.5, color=colors2[i]),
name="percentile: {}%".format(p),
legendgroup="percentile{}".format(i),
hovertemplate=(
"<b>Amplitude: %{r:.2e}</b><br>"
+ "<b>Phase: %{theta:.2f}</b><br>"
+ "<b>Frequency: %{customdata:.2f}</b>"
),
**kwargs,
)
)
for i, p in enumerate(conf_interval):
p1 = np.percentile(self.magnitude, 50 + p / 2, axis=1)
p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1)
p3 = np.percentile(self.phase, 50 + p / 2, axis=1)
p4 = np.percentile(self.phase, 50 - p / 2, axis=1)
fig.add_trace(
go.Scatterpolar(
r=np.concatenate((p1, p2[::-1])),
theta=np.concatenate((p3, p4[::-1])),
thetaunit="radians",
line=dict(width=1, color=colors1[i]),
fill="toself",
fillcolor=colors1[i],
opacity=0.5,
name="confidence interval: {}%".format(p),
legendgroup="conf{}".format(i),
**kwargs,
)
)
fig.update_layout(
polar=dict(
radialaxis=dict(
title_text=r_axis_label,
title_font=dict(family="Arial", size=14),
gridcolor="lightgray",
exponentformat="power",
),
angularaxis=dict(
tickfont=dict(size=14),
gridcolor="lightgray",
linecolor="black",
linewidth=2.5,
),
),
)
return fig
def plot(self, percentile=[], conf_interval=[], units="mic-pk-pk", **kwargs):
"""Plot frequency response.
This method plots the frequency and phase response given an output
and an input.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be
between 0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be
between 0 and 100 inclusive.
units : str, optional
Unit system
Default is "mic-pk-pk"
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
subplots : Plotly graph_objects.make_subplots()
Plotly figure with amplitude vs frequency phase angle vs frequency.
"""
fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs)
default_values = dict(showlegend=False)
for k, v in default_values.items():
kwargs.setdefault(k, v)
fig1 = self.plot_phase(percentile, conf_interval, **kwargs)
fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs)
subplots = make_subplots(
rows=2, cols=2, specs=[[{}, {"type": "polar", "rowspan": 2}], [{}, None]]
)
for data in fig0["data"]:
subplots.add_trace(data, row=1, col=1)
for data in fig1["data"]:
subplots.add_trace(data, row=2, col=1)
for data in fig2["data"]:
subplots.add_trace(data, row=1, col=2)
subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1)
subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1)
subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1)
subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1)
subplots.update_layout(
plot_bgcolor="white",
polar_bgcolor="white",
width=1800,
height=900,
polar=dict(
radialaxis=fig2.layout.polar.radialaxis,
angularaxis=fig2.layout.polar.angularaxis,
),
legend=dict(
font=dict(family="sans-serif", size=14),
bgcolor="white",
bordercolor="black",
borderwidth=2,
),
)
return subplots
class ST_TimeResponseResults:
"""Store stochastic results and provide plots for Time Response and Orbit Response.
Parameters
----------
time_range : 1-dimensional array
Time array.
yout : array
System response.
xout : array
Time evolution of the state vector.
nodes_list: array
list with nodes from a rotor model.
nodes_pos: array
Rotor nodes axial positions.
number_dof : int
Number of degrees of freedom per shaft element's node
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
def __init__(self, time_range, yout, xout, number_dof, nodes_list, nodes_pos):
self.time_range = time_range
self.yout = yout
self.xout = xout
self.nodes_list = nodes_list
self.nodes_pos = nodes_pos
self.number_dof = number_dof
def plot_1d(
self, probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs
):
"""Plot time response.
This method plots the time response given a tuple of probes with their nodes
and orientations.
Parameters
----------
probe : list of tuples
List with tuples (node, orientation angle).
node : int
indicate the node where the probe is located.
orientation : float,
probe orientation angle about the shaft. The 0 refers to +X direction.
percentile : list, optional
Sequence of percentiles to compute, which must be
between 0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be
between 0 and 100 inclusive.
fig : Plotly graph_objects.Figure()
The figure object with the plot.
args : optional
Additional plot axes
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
if fig is None:
fig = go.Figure()
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
for i, p in enumerate(probe):
dofx = p[0] * self.number_dof
dofy = p[0] * self.number_dof + 1
angle = p[1]
# fmt: off
operator = np.array(
[[np.cos(angle), - np.sin(angle)],
[np.cos(angle), + np.sin(angle)]]
)
probe_resp = np.zeros_like(self.yout[:, :, 0])
for j, y in enumerate(self.yout):
_probe_resp = operator @ np.vstack((y[:, dofx], y[:, dofy]))
probe_resp[j] = (
_probe_resp[0] * np.cos(angle) ** 2 +
_probe_resp[1] * np.sin(angle) ** 2
)
# fmt: on
fig.add_trace(
go.Scatter(
x=self.time_range,
y=np.mean(probe_resp, axis=0),
opacity=1.0,
name=f"Probe {i + 1} - Mean",
line=dict(width=3.0),
hovertemplate=("Time: %{x:.3f}<br>" + "Amplitude: %{y:.2e}"),
**kwargs,
)
)
for j, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.time_range,
y=np.percentile(probe_resp, p, axis=0),
opacity=0.6,
line=dict(width=2.5),
name=f"Probe {i + 1} - percentile: {p}%",
hovertemplate=("Time: %{x:.3f}<br>" + "Amplitude: %{y:.2e}"),
**kwargs,
)
)
x = np.concatenate((self.time_range, self.time_range[::-1]))
for j, p in enumerate(conf_interval):
p1 = np.percentile(probe_resp, 50 + p / 2, axis=0)
p2 = np.percentile(probe_resp, 50 - p / 2, axis=0)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
line=dict(width=1),
fill="toself",
fillcolor=colors1[j],
opacity=0.5,
name=f"Probe {i + 1} - confidence interval: {p}%",
hovertemplate=("Time: %{x:.3f}<br>" + "Amplitude: %{y:.2e}"),
**kwargs,
)
)
fig.update_xaxes(title_text="<b>Time (s)</b>")
fig.update_yaxes(title_text="<b>Amplitude</b>")
return fig
def plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args, **kwargs):
"""Plot orbit response (2D).
This function plots orbits for a given node on the rotor system in a 2D view.
Parameters
----------
node : int
Select the node to display the respective orbit response.
percentile : list, optional
Sequence of percentiles to compute, which must be
between 0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be
between 0 and 100 inclusive.
fig : Plotly graph_objects.Figure()
The figure object with the plot.
args : optional
Additional plot axes
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
ndof = self.number_dof
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
if fig is None:
fig = go.Figure()
fig.add_trace(
go.Scatter(
x=np.mean(self.yout[..., ndof * node], axis=0),
y=np.mean(self.yout[..., ndof * node + 1], axis=0),
opacity=1.0,
name="Mean",
line=dict(width=3, color="black"),
hovertemplate=(
"X - Amplitude: %{x:.2e}<br>" + "Y - Amplitude: %{y:.2e}"
),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=np.percentile(self.yout[..., ndof * node], p, axis=0),
y=np.percentile(self.yout[..., ndof * node + 1], p, axis=0),
opacity=0.6,
line=dict(width=2.5, color=colors2[i]),
name="percentile: {}%".format(p),
hovertemplate=(
"X - Amplitude: %{x:.2e}<br>" + "Y - Amplitude: %{y:.2e}"
),
**kwargs,
)
)
for i, p in enumerate(conf_interval):
p1 = np.percentile(self.yout[..., ndof * node], 50 + p / 2, axis=0)
p2 = np.percentile(self.yout[..., ndof * node], 50 - p / 2, axis=0)
p3 = np.percentile(self.yout[..., ndof * node + 1], 50 + p / 2, axis=0)
p4 = np.percentile(self.yout[..., ndof * node + 1], 50 - p / 2, axis=0)
fig.add_trace(
go.Scatter(
x=np.concatenate((p1, p2[::-1])),
y=np.concatenate((p3, p4[::-1])),
line=dict(width=1, color=colors1[i]),
fill="toself",
fillcolor=colors1[i],
opacity=0.5,
name="confidence interval: {}%".format(p),
hovertemplate=(
"X - Amplitude: %{x:.2e}<br>" + "Y - Amplitude: %{y:.2e}"
),
**kwargs,
)
)
fig.update_xaxes(title_text="<b>Amplitude</b>")
fig.update_yaxes(title_text="<b>Amplitude</b>")
fig.update_layout(title="<b>Rotor Orbit: node {}</b>".format(node)),
return fig
def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs):
"""Plot orbit response (3D).
This function plots orbits for each node on the rotor system in a 3D view.
Parameters
----------
percentile : list, optional
Sequence of percentiles to compute, which must be
between 0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be
between 0 and 100 inclusive.
fig : Plotly graph_objects.Figure()
The figure object with the plot.
args : optional
Additional plot axes
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
ndof = self.number_dof
default_values = dict(mode="lines")
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
for k, v in default_values.items():
kwargs.setdefault(k, v)
if fig is None:
fig = go.Figure()
line = np.zeros(len(self.nodes_pos))
fig.add_trace(
go.Scatter3d(
x=self.nodes_pos,
y=line,
z=line,
line=dict(width=2.0, color="black", dash="dashdot"),
showlegend=False,
mode="lines",
)
)
for j, n in enumerate(self.nodes_list):
x = np.ones(self.yout.shape[1]) * self.nodes_pos[n]
fig.add_trace(
go.Scatter3d(
x=x,
y=np.mean(self.yout[..., ndof * n], axis=0),
z=np.mean(self.yout[..., ndof * n + 1], axis=0),
line=dict(width=5, color="black"),
name="Mean",
legendgroup="mean",
showlegend=True if j == 0 else False,
hovertemplate=(
"Nodal Position: %{x:.2f}<br>"
+ "X - Amplitude: %{y:.2e}<br>"
+ "Y - Amplitude: %{z:.2e}"
),
**kwargs,
)
)
for i, p in enumerate(percentile):
fig.add_trace(
go.Scatter3d(
x=x,
y=np.percentile(self.yout[..., ndof * n], p, axis=0),
z=np.percentile(self.yout[..., ndof * n + 1], p, axis=0),
opacity=1.0,
name="percentile: {}%".format(p),
line=dict(width=3, color=colors1[i]),
legendgroup="perc{}".format(p),
showlegend=True if j == 0 else False,
hovertemplate=(
"Nodal Position: %{x:.2f}<br>"
+ "X - Amplitude: %{y:.2e}<br>"
+ "Y - Amplitude: %{z:.2e}"
),
**kwargs,
)
)
for i, p in enumerate(conf_interval):
fig.add_trace(
go.Scatter3d(
x=x,
y=np.percentile(self.yout[..., ndof * n], 50 + p / 2, axis=0),
z=np.percentile(
self.yout[..., ndof * n + 1], 50 + p / 2, axis=0
),
line=dict(width=3.5, color=colors1[i]),
opacity=0.6,
name="confidence interval: {}%".format(p),
legendgroup="conf_interval{}".format(p),
showlegend=True if j == 0 else False,
hovertemplate=(
"Nodal Position: %{x:.2f}<br>"
+ "X - Amplitude: %{y:.2e}<br>"
+ "Y - Amplitude: %{z:.2e}"
),
**kwargs,
)
)
fig.add_trace(
go.Scatter3d(
x=x,
y=np.percentile(self.yout[..., ndof * n], 50 - p / 2, axis=0),
z=np.percentile(
self.yout[..., ndof * n + 1], 50 - p / 2, axis=0
),
line=dict(width=3.5, color=colors1[i]),
opacity=0.6,
name="confidence interval: {}%".format(p),
legendgroup="conf_interval{}".format(p),
showlegend=False,
hovertemplate=(
"Nodal Position: %{x:.2f}<br>"
+ "X - Amplitude: %{y:.2e}<br>"
+ "Y - Amplitude: %{z:.2e}"
),
**kwargs,
)
)
fig.update_layout(
scene=dict(
xaxis=dict(title=dict(text="<b>Rotor Length</b>"), showspikes=False),
yaxis=dict(title=dict(text="<b>Amplitude - X</b>"), showspikes=False),
zaxis=dict(title=dict(text="<b>Amplitude - Y</b>"), showspikes=False),
),
)
return fig
class ST_ForcedResponseResults:
"""Store stochastic results and provide plots for Forced Response.
Parameters
----------
force_resp : array
Array with the force response for each node for each frequency.
frequency_range : array
Array with the frequencies.
magnitude : array
Magnitude of the frequency response for node for each frequency.
phase : array
Phase of the frequency response for node for each frequency.
number_dof = int
Number of degrees of freedom per shaft element's node.
Returns
-------
subplots : Plotly graph_objects.make_subplots()
Plotly figure with amplitude vs frequency phase angle vs frequency.
"""
def __init__(self, forced_resp, magnitude, phase, frequency_range, number_dof):
self.forced_resp = forced_resp
self.magnitude = magnitude
self.phase = phase
self.frequency_range = frequency_range
self.number_dof = number_dof
def plot_magnitude(
self,
probe,
percentile=[],
conf_interval=[],
fig=None,
units="mic-pk-pk",
**kwargs,
):
"""Plot frequency response.
This method plots the unbalance response magnitude.
Parameters
----------
probe : list of tuples
List with tuples (node, orientation angle).
node : int
indicate the node where the probe is located.
orientation : float,
probe orientation angle about the shaft. The 0 refers to +X direction.
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0% and 100% inclusive.
fig : Plotly graph_objects.Figure()
The figure object with the plot.
units : str, optional
Unit system
Default is "mic-pk-pk".
kwargs : optional
Additional key word arguments can be passed to change the plot layout
(e.g. width=800, height=600, ...).
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
Bokeh plot axes with magnitude plot.
"""
if units == "m":
y_axis_label = "<b>Amplitude (m)</b>"
elif units == "mic-pk-pk":
y_axis_label = "<b>Amplitude (μ pk-pk)</b>"
else:
y_axis_label = "<b>Amplitude (dB)</b>"
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
if fig is None:
fig = go.Figure()
color_i = 0
color_p = 0
for i, p in enumerate(probe):
dofx = p[0] * self.number_dof
dofy = p[0] * self.number_dof + 1
angle = p[1]
# fmt: off
operator = np.array(
[[np.cos(angle), - np.sin(angle)],
[np.cos(angle), + np.sin(angle)]]
)
probe_resp = np.zeros_like(self.magnitude[:, :, 0])
for j, mag in enumerate(self.magnitude):
_probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy]))
probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 +
(_probe_resp[1] * np.sin(angle)) ** 2)
# fmt: on
fig.add_trace(
go.Scatter(
x=self.frequency_range,
y=np.mean(probe_resp, axis=0),
opacity=1.0,
mode="lines",
line=dict(width=3, color=list(tableau_colors)[i]),
name=f"Probe {i + 1} - Mean",
legendgroup=f"Probe {i + 1} - Mean",
hovertemplate="Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}",
)
)
for j, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.frequency_range,
y=np.percentile(probe_resp, p, axis=0),
opacity=0.6,
mode="lines",
line=dict(width=2.5, color=colors1[color_p]),
name=f"Probe {i + 1} - percentile: {p}%",
legendgroup=f"Probe {i + 1} - percentile: {p}%",
hovertemplate="Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}",
)
)
color_p += 1
x = np.concatenate((self.frequency_range, self.frequency_range[::-1]))
for j, p in enumerate(conf_interval):
p1 = np.percentile(probe_resp, 50 + p / 2, axis=0)
p2 = np.percentile(probe_resp, 50 - p / 2, axis=0)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
mode="lines",
line=dict(width=1, color=colors2[color_i]),
fill="toself",
fillcolor=colors2[color_i],
opacity=0.5,
name=f"Probe {i + 1} - confidence interval: {p}%",
legendgroup=f"Probe {i + 1} - confidence interval: {p}%",
hovertemplate="Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}",
)
)
color_i += 1
fig.update_xaxes(title_text="<b>Frequency</b>")
fig.update_yaxes(title_text=y_axis_label)
fig.update_layout(**kwargs)
return fig
def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs):
"""Plot frequency response.
This method plots the phase response given a set of probes.
Parameters
----------
probe : list of tuples
List with tuples (node, orientation angle).
node : int
indicate the node where the probe is located.
orientation : float,
probe orientation angle about the shaft. The 0 refers to +X direction.
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0 and 100 inclusive.
fig : Plotly graph_objects.Figure()
The figure object with the plot.
kwargs : optional
Additional key word arguments can be passed to change the plot layout
(e.g. width=800, height=600, ...).
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
if fig is None:
fig = go.Figure()
color_p = 0
color_i = 0
for i, p in enumerate(probe):
probe_phase = np.zeros_like(self.phase[:, :, 0])
for j, phs in enumerate(self.phase):
aux_phase = phs[:, p[0] * self.number_dof]
probe_phase[i] = np.array(
[i + 2 * np.pi if i < 0 else i for i in aux_phase]
)
angle = p[1]
probe_phase[i] = probe_phase[i] - angle
fig.add_trace(
go.Scatter(
x=self.frequency_range,
y=np.mean(probe_phase, axis=0),
opacity=1.0,
mode="lines",
line=dict(width=3, color=list(tableau_colors)[i]),
name=f"Probe {i + 1} - Mean",
legendgroup=f"Probe {i + 1} - Mean",
hovertemplate="Frequency: %{x:.2f}<br>Phase: %{y:.2f}",
)
)
for j, p in enumerate(percentile):
fig.add_trace(
go.Scatter(
x=self.frequency_range,
y=np.percentile(probe_phase, p, axis=0),
opacity=0.6,
mode="lines",
line=dict(width=2.5, color=colors1[color_p]),
name=f"Probe {i + 1} - percentile: {p}%",
legendgroup=f"Probe {i + 1} - percentile: {p}%",
hovertemplate="Frequency: %{x:.2f}<br>Phase: %{y:.2f}",
)
)
color_p += 1
x = np.concatenate((self.frequency_range, self.frequency_range[::-1]))
for j, p in enumerate(conf_interval):
p1 = np.percentile(probe_phase, 50 + p / 2, axis=0)
p2 = np.percentile(probe_phase, 50 - p / 2, axis=0)
fig.add_trace(
go.Scatter(
x=x,
y=np.concatenate((p1, p2[::-1])),
mode="lines",
line=dict(width=1, color=colors2[color_i]),
fill="toself",
fillcolor=colors2[color_i],
opacity=0.5,
name=f"Probe {i + 1} - confidence interval: {p}%",
legendgroup=f"Probe {i + 1} - confidence interval: {p}%",
hovertemplate="Frequency: %{x:.2f}<br>Phase: %{y:.2f}",
)
)
color_i += 1
fig.update_xaxes(title_text="<b>Frequency</b>")
fig.update_yaxes(title_text="<b>Phase Angle</b>")
fig.update_layout(**kwargs),
return fig
def plot_polar_bode(
self,
probe,
percentile=[],
conf_interval=[],
fig=None,
units="mic-pk-pk",
**kwargs,
):
"""Plot polar forced response using Plotly.
Parameters
----------
probe : list of tuples
List with tuples (node, orientation angle).
node : int
indicate the node where the probe is located.
orientation : float,
probe orientation angle about the shaft. The 0 refers to +X direction.
percentile : list, optional
Sequence of percentiles to compute, which must be between
0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be between
0 and 100 inclusive.
fig : Plotly graph_objects.Figure()
The figure object with the plot.
units : str
Magnitude unit system.
Default is "mic-pk-pk"
polar_kwargs : optional
Additional key word arguments can be passed to change the plot layout only
(e.g. width=1000, height=800, ...).
*See Plotly Python Figure Reference for more information.
Returns
-------
fig : Plotly graph_objects.Figure()
The figure object with the plot.
"""
conf_interval = np.sort(conf_interval)
percentile = np.sort(percentile)
if units == "m":
r_axis_label = "<b>Amplitude (m)</b>"
elif units == "mic-pk-pk":
r_axis_label = "<b>Amplitude (μ pk-pk)</b>"
else:
r_axis_label = "<b>Amplitude (dB)</b>"
if fig is None:
fig = go.Figure()
color_p = 0
color_i = 0
for i, p in enumerate(probe):
dofx = p[0] * self.number_dof
dofy = p[0] * self.number_dof + 1
angle = p[1]
# fmt: off
operator = np.array(
[[np.cos(angle), - np.sin(angle)],
[np.cos(angle), + np.sin(angle)]]
)
probe_resp = np.zeros_like(self.magnitude[:, :, 0])
for j, mag in enumerate(self.magnitude):
_probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy]))
probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 +
(_probe_resp[1] * np.sin(angle)) ** 2)
# fmt: on
probe_phase = np.zeros_like(self.phase[:, :, 0])
for j, phs in enumerate(self.phase):
aux_phase = phs[:, p[0] * self.number_dof]
probe_phase[i] = np.array(
[i + 2 * np.pi if i < 0 else i for i in aux_phase]
)
angle = p[1]
probe_phase[i] = probe_phase[i] - angle
fig.add_trace(
go.Scatterpolar(
r=np.mean(probe_resp, axis=0),
theta=np.mean(probe_phase, axis=0),
customdata=self.frequency_range,
thetaunit="radians",
mode="lines",
line=dict(width=3.0, color=list(tableau_colors)[i]),
name=f"Probe {i + 1} - Mean",
legendgroup=f"Probe {i + 1} - Mean",
hovertemplate=(
"<b>Amplitude: %{r:.2e}</b><br>"
+ "<b>Phase: %{theta:.2f}</b><br>"
+ "<b>Frequency: %{customdata:.2f}</b>"
),
)
)
for j, p in enumerate(percentile):
fig.add_trace(
go.Scatterpolar(
r=np.percentile(probe_resp, p, axis=0),
theta=np.percentile(probe_phase, p, axis=0),
customdata=self.frequency_range,
thetaunit="radians",
opacity=0.6,
line=dict(width=2.5, color=colors1[color_p]),
name=f"Probe {i + 1} - percentile: {p}%",
legendgroup=f"Probe {i + 1} - percentile{p}",
hovertemplate=(
"<b>Amplitude: %{r:.2e}</b><br>"
+ "<b>Phase: %{theta:.2f}</b><br>"
+ "<b>Frequency: %{customdata:.2f}</b>"
),
)
)
color_p += 1
for j, p in enumerate(conf_interval):
p1 = np.percentile(probe_resp, 50 + p / 2, axis=0)
p2 = np.percentile(probe_resp, 50 - p / 2, axis=0)
p3 = np.percentile(probe_phase, 50 + p / 2, axis=0)
p4 = np.percentile(probe_phase, 50 - p / 2, axis=0)
fig.add_trace(
go.Scatterpolar(
r=np.concatenate((p1, p2[::-1])),
theta=np.concatenate((p3, p4[::-1])),
thetaunit="radians",
line=dict(width=1, color=colors2[color_i]),
fill="toself",
fillcolor=colors2[color_i],
opacity=0.5,
name=f"Probe {i + 1} - confidence interval: {p}%",
legendgroup=f"Probe {i + 1} - confidence interval: {p}%",
)
)
color_i += 1
fig.update_layout(
polar=dict(
radialaxis=dict(title_text=r_axis_label, exponentformat="E"),
angularaxis=dict(exponentformat="E"),
),
**kwargs,
)
return fig
def plot(
self,
probe,
percentile=[],
conf_interval=[],
fig=None,
units="mic-pk-pk",
**kwargs,
):
"""Plot frequency response.
This method plots the frequency and phase response given a set of probes.
Parameters
----------
dof : int
Degree of freedom to observe the response.
percentile : list, optional
Sequence of percentiles to compute, which must be
between 0 and 100 inclusive.
conf_interval : list, optional
Sequence of confidence intervals to compute, which must be
between 0 and 100 inclusive.
units : str, optional
Unit system
Default is "mic-pk-pk"
kwargs : optional
Additional key word arguments can be passed to change the plot
(e.g. line=dict(width=4.0, color="royalblue"), opacity=1.0, ...)
*See Plotly Python Figure Reference for more information.
Returns
-------
subplots : Plotly graph_objects.make_subplots()
Plotly figure with amplitude vs frequency phase angle vs frequency.
"""
# fmt: off
fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs)
fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs)
fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs)
if fig is None:
fig = make_subplots(
rows=2, cols=2, specs=[[{}, {"type": "polar", "rowspan": 2}], [{}, None]]
)
# fmt: on
for data in fig0["data"]:
data.showlegend = False
fig.add_trace(data, row=1, col=1)
for data in fig1["data"]:
data.showlegend = False
fig.add_trace(data, row=2, col=1)
for data in fig2["data"]:
fig.add_trace(data, row=1, col=2)
fig.update_xaxes(fig0.layout.xaxis, row=1, col=1)
fig.update_yaxes(fig0.layout.yaxis, row=1, col=1)
fig.update_xaxes(fig1.layout.xaxis, row=2, col=1)
fig.update_yaxes(fig1.layout.yaxis, row=2, col=1)
fig.update_layout(
polar=dict(
radialaxis=fig2.layout.polar.radialaxis,
angularaxis=fig2.layout.polar.angularaxis,
),
)
return fig
| 36.763713
| 92
| 0.480202
| 6,504
| 60,991
| 4.424816
| 0.057349
| 0.029188
| 0.020779
| 0.016262
| 0.898155
| 0.880051
| 0.850481
| 0.824977
| 0.803954
| 0.763543
| 0
| 0.025359
| 0.403879
| 60,991
| 1,658
| 93
| 36.785887
| 0.766186
| 0.228066
| 0
| 0.7507
| 0
| 0
| 0.094922
| 0.003401
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016807
| false
| 0
| 0.005602
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
13be33895810fafc0b133ddfa170c7d200a7bd44
| 56
|
py
|
Python
|
json_schema_checker/composed/__init__.py
|
zorgulle/json_schema_checker
|
20cac68f899528619e5059f0e1fbee0a0f7219d6
|
[
"MIT"
] | null | null | null |
json_schema_checker/composed/__init__.py
|
zorgulle/json_schema_checker
|
20cac68f899528619e5059f0e1fbee0a0f7219d6
|
[
"MIT"
] | null | null | null |
json_schema_checker/composed/__init__.py
|
zorgulle/json_schema_checker
|
20cac68f899528619e5059f0e1fbee0a0f7219d6
|
[
"MIT"
] | null | null | null |
from .composed import List
from .composed import IntList
| 28
| 29
| 0.839286
| 8
| 56
| 5.875
| 0.625
| 0.510638
| 0.765957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 56
| 2
| 29
| 28
| 0.959184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
13d92122cdb041cef0407b7567e1db27fbf5978f
| 676
|
py
|
Python
|
emoji/coffee.py
|
wbprice/ojimoji
|
7b1a8b5ed0062d1d52e151e7412e1131e3de7924
|
[
"MIT"
] | null | null | null |
emoji/coffee.py
|
wbprice/ojimoji
|
7b1a8b5ed0062d1d52e151e7412e1131e3de7924
|
[
"MIT"
] | null | null | null |
emoji/coffee.py
|
wbprice/ojimoji
|
7b1a8b5ed0062d1d52e151e7412e1131e3de7924
|
[
"MIT"
] | null | null | null |
import numpy
h = .25
s = 1
bitmap = numpy.array([
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0],
[0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0],
[0,0,1,1,1,0,1,0,1,1,1,0,0,1,0,0],
[0,0,1,1,0,1,0,1,0,1,1,0,0,1,0,0],
[0,0,1,1,1,0,1,0,1,1,1,0,1,0,0,0],
[0,0,1,1,0,1,0,1,0,1,1,1,0,0,0,0],
[0,0,1,1,1,0,1,0,1,1,1,0,0,0,0,0],
[0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0],
[0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])
| 30.727273
| 39
| 0.426036
| 265
| 676
| 1.086792
| 0.033962
| 1.118056
| 1.552083
| 1.930556
| 0.888889
| 0.888889
| 0.888889
| 0.881944
| 0.881944
| 0.881944
| 0
| 0.443493
| 0.136095
| 676
| 21
| 40
| 32.190476
| 0.049658
| 0
| 0
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
b9887b38cf06939bc8dd710e9861e2366862482a
| 3,120
|
py
|
Python
|
firelight/interfaces/light.py
|
roshie548/firelight
|
3a5af5e2a1e5784127baebcf1517ffddcaff4062
|
[
"MIT"
] | 16
|
2021-11-29T03:05:31.000Z
|
2022-01-19T05:32:45.000Z
|
firelight/interfaces/light.py
|
roshie548/firelight
|
3a5af5e2a1e5784127baebcf1517ffddcaff4062
|
[
"MIT"
] | null | null | null |
firelight/interfaces/light.py
|
roshie548/firelight
|
3a5af5e2a1e5784127baebcf1517ffddcaff4062
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from .color import Color
class LightSystem(ABC):
@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'set_transition_time')
and callable(subclass.set_transition_time)
and hasattr(subclass, 'discover_lights')
and callable(subclass.discover_lights)
and hasattr(subclass, 'set_color_all_lights')
and callable(subclass.set_color_all_lights))
@abstractmethod
def discover_lights(self):
"""Discover the lights and groups in this LightSystem."""
raise NotImplementedError
@abstractmethod
def set_transition_time(self, transition_time: int):
"""Set how long it takes in milliseconds for colors to transition."""
raise NotImplementedError
@abstractmethod
def set_color(self, color: Color):
"""Set the color of all the lights in the LightSystem."""
raise NotImplementedError
class LightGroup(ABC):
@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'turn_on')
and callable(subclass.turn_on)
and hasattr(subclass, 'turn_off')
and callable(subclass.turn_off)
and hasattr(subclass, 'set_transition_time')
and callable(subclass.set_transition_time)
and hasattr(subclass, 'set_color')
and callable(subclass.set_color))
@abstractmethod
def turn_on(self):
"""Turn on the lights in this group."""
raise NotImplementedError
@abstractmethod
def turn_off(self):
"""Turn off the lights in this group."""
raise NotImplementedError
@abstractmethod
def set_transition_time(self, transition_time: int):
"""Set how long it takes in milliseconds for colors to transition."""
raise NotImplementedError
@abstractmethod
def set_color(self, color: Color):
"""Set the color of this light."""
raise NotImplementedError
class LightDevice(ABC):
@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'turn_on')
and callable(subclass.turn_on)
and hasattr(subclass, 'turn_off')
and callable(subclass.turn_off)
and hasattr(subclass, 'set_transition_time')
and callable(subclass.set_transition_time)
and hasattr(subclass, 'set_color')
and callable(subclass.set_color))
@abstractmethod
def turn_on(self):
"""Turn on this light."""
raise NotImplementedError
@abstractmethod
def turn_off(self):
"""Turn off the light."""
raise NotImplementedError
@abstractmethod
def set_transition_time(self, transition_time: int):
"""Set how long it takes in milliseconds for colors to transition."""
raise NotImplementedError
@abstractmethod
def set_color(self, color: Color):
"""Set the color of this light."""
raise NotImplementedError
| 32.842105
| 77
| 0.641026
| 333
| 3,120
| 5.822823
| 0.138138
| 0.068076
| 0.107788
| 0.169159
| 0.856627
| 0.814853
| 0.814853
| 0.814853
| 0.814853
| 0.772047
| 0
| 0
| 0.277885
| 3,120
| 94
| 78
| 33.191489
| 0.86063
| 0.148077
| 0
| 0.833333
| 0
| 0
| 0.053805
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212121
| false
| 0
| 0.030303
| 0.045455
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b9dc15c3ca6876833207138ba4d65fbd0be25acd
| 61,341
|
py
|
Python
|
tests/integration/test_streaming_e2e.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_streaming_e2e.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_streaming_e2e.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | null | null | null |
"""Streaming integration tests."""
# pylint:disable=no-self-use,invalid-name,too-many-arguments,too-few-public-methods,line-too-long
# pylint:disable=too-many-statements,too-many-locals,too-many-lines
import threading
import time
import json
from queue import Queue
from splitio.client.factory import get_factory
from tests.helpers.mockserver import SSEMockServer, SplitMockServer
try: # try to import python3 names. fallback to python2
from urllib.parse import parse_qs
except ImportError:
from urlparse import parse_qs
class StreamingIntegrationTests(object):
"""Test streaming operation and failover."""
def test_happiness(self):
"""Test initialization & splits/segment updates."""
auth_server_response = {
'pushEnabled': True,
'token': ('eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.'
'eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pO'
'RFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk1UWXlNVGN4T1RRNE13P'
'T1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcIjpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm'
'9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJ'
'zXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRh'
'dGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFibHktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4c'
'CI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0MDk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5E'
'vJh17WlOlAKhcD0')
}
split_changes = {
-1: {
'since': -1,
'till': 1,
'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]
},
1: {
'since': 1,
'till': 1,
'splits': []
}
}
segment_changes = {}
split_backend_requests = Queue()
split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests,
auth_server_response)
sse_requests = Queue()
sse_server = SSEMockServer(sse_requests)
split_backend.start()
sse_server.start()
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
kwargs = {
'sdk_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'events_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'auth_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'streaming_api_base_url': 'http://localhost:%d' % sse_server.port(),
'config': {'connectTimeout': 10000}
}
factory = get_factory('some_apikey', **kwargs)
factory.block_until_ready(1)
assert factory.ready
assert factory.client().get_treatment('maldo', 'split1') == 'on'
time.sleep(1)
split_changes[1] = {
'since': 1,
'till': 2,
'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]
}
split_changes[2] = {'since': 2, 'till': 2, 'splits': []}
sse_server.publish(make_split_change_event(2))
time.sleep(1)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
split_changes[2] = {
'since': 2,
'till': 3,
'splits': [make_split_with_segment('split2', 2, True, False,
'off', 'user', 'off', 'segment1')]
}
split_changes[3] = {'since': 3, 'till': 3, 'splits': []}
segment_changes[('segment1', -1)] = {
'name': 'segment1',
'added': ['maldo'],
'removed': [],
'since': -1,
'till': 1
}
segment_changes[('segment1', 1)] = {'name': 'segment1', 'added': [],
'removed': [], 'since': 1, 'till': 1}
sse_server.publish(make_split_change_event(3))
time.sleep(1)
sse_server.publish(make_segment_change_event('segment1', 1))
time.sleep(1)
assert factory.client().get_treatment('pindon', 'split2') == 'off'
assert factory.client().get_treatment('maldo', 'split2') == 'on'
# Validate the SSE request
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
# Initial apikey validation
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/__SOME_INVALID_SEGMENT__?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Initial splits fetch
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after first notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after second notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Segment change notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/segment1?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until segment1 since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/segment1?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Cleanup
destroy_event = threading.Event()
factory.destroy(destroy_event)
destroy_event.wait()
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
sse_server.stop()
split_backend.stop()
def test_occupancy_flicker(self):
"""Test that changes in occupancy switch between polling & streaming properly."""
auth_server_response = {
'pushEnabled': True,
'token': ('eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.'
'eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pO'
'RFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk1UWXlNVGN4T1RRNE13P'
'T1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcIjpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm'
'9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJ'
'zXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRh'
'dGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFibHktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4c'
'CI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0MDk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5E'
'vJh17WlOlAKhcD0')
}
split_changes = {
-1: {
'since': -1,
'till': 1,
'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]
},
1: {'since': 1, 'till': 1, 'splits': []}
}
segment_changes = {}
split_backend_requests = Queue()
split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests,
auth_server_response)
sse_requests = Queue()
sse_server = SSEMockServer(sse_requests)
split_backend.start()
sse_server.start()
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
kwargs = {
'sdk_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'events_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'auth_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'streaming_api_base_url': 'http://localhost:%d' % sse_server.port(),
'config': {'connectTimeout': 10000, 'featuresRefreshRate': 10}
}
factory = get_factory('some_apikey', **kwargs)
factory.block_until_ready(1)
assert factory.ready
time.sleep(2)
# Get a hook of the task so we can query its status
task = factory._sync_manager._synchronizer._split_tasks.split_task._task # pylint:disable=protected-access
assert not task.running()
assert factory.client().get_treatment('maldo', 'split1') == 'on'
# Make a change in the BE but don't send the event.
# After dropping occupancy, the sdk should switch to polling
# and perform a syncAll that gets this change
split_changes[1] = {
'since': 1,
'till': 2,
'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]
}
split_changes[2] = {'since': 2, 'till': 2, 'splits': []}
sse_server.publish(make_occupancy('control_pri', 0))
sse_server.publish(make_occupancy('control_sec', 0))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
assert task.running()
# We make another chagne in the BE and don't send the event.
# We restore occupancy, and it should be fetched by the
# sync all after streaming is restored.
split_changes[2] = {
'since': 2,
'till': 3,
'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]
}
split_changes[3] = {'since': 3, 'till': 3, 'splits': []}
sse_server.publish(make_occupancy('control_pri', 1))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'on'
assert not task.running()
# Now we make another change and send an event so it's propagated
split_changes[3] = {
'since': 3,
'till': 4,
'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]
}
split_changes[4] = {'since': 4, 'till': 4, 'splits': []}
sse_server.publish(make_split_change_event(4))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
# Kill the split
split_changes[4] = {
'since': 4,
'till': 5,
'splits': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)]
}
split_changes[5] = {'since': 5, 'till': 5, 'splits': []}
sse_server.publish(make_split_kill_event('split1', 'frula', 5))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'frula'
# Validate the SSE request
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
# Initial apikey validation
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/__SOME_INVALID_SEGMENT__?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Initial splits fetch
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after first notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after second notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=4'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Split kill
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=4'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=5'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Cleanup
destroy_event = threading.Event()
factory.destroy(destroy_event)
destroy_event.wait()
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
sse_server.stop()
split_backend.stop()
def test_start_without_occupancy(self):
"""Test an SDK starting with occupancy on 0 and switching to streamin afterwards."""
auth_server_response = {
'pushEnabled': True,
'token': ('eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.'
'eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pO'
'RFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk1UWXlNVGN4T1RRNE13P'
'T1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcIjpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm'
'9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJ'
'zXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRh'
'dGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFibHktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4c'
'CI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0MDk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5E'
'vJh17WlOlAKhcD0')
}
split_changes = {
-1: {
'since': -1,
'till': 1,
'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]
},
1: {'since': 1, 'till': 1, 'splits': []}
}
segment_changes = {}
split_backend_requests = Queue()
split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests,
auth_server_response)
sse_requests = Queue()
sse_server = SSEMockServer(sse_requests)
split_backend.start()
sse_server.start()
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 0))
sse_server.publish(make_occupancy('control_sec', 0))
kwargs = {
'sdk_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'events_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'auth_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'streaming_api_base_url': 'http://localhost:%d' % sse_server.port(),
'config': {'connectTimeout': 10000, 'featuresRefreshRate': 10}
}
factory = get_factory('some_apikey', **kwargs)
factory.block_until_ready(1)
assert factory.ready
time.sleep(2)
# Get a hook of the task so we can query its status
task = factory._sync_manager._synchronizer._split_tasks.split_task._task # pylint:disable=protected-access
assert task.running()
assert factory.client().get_treatment('maldo', 'split1') == 'on'
# Make a change in the BE but don't send the event.
# After restoring occupancy, the sdk should switch to polling
# and perform a syncAll that gets this change
split_changes[1] = {
'since': 1,
'till': 2,
'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]
}
split_changes[2] = {'since': 2, 'till': 2, 'splits': []}
sse_server.publish(make_occupancy('control_sec', 1))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
assert not task.running()
# Validate the SSE request
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
# Initial apikey validation
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/__SOME_INVALID_SEGMENT__?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Initial splits fetch
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after push down
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after push restored
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Second iteration of previous syncAll
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Cleanup
destroy_event = threading.Event()
factory.destroy(destroy_event)
destroy_event.wait()
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
sse_server.stop()
split_backend.stop()
def test_streaming_status_changes(self):
"""Test changes between streaming enabled, paused and disabled."""
auth_server_response = {
'pushEnabled': True,
'token': ('eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.'
'eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pO'
'RFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk1UWXlNVGN4T1RRNE13P'
'T1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcIjpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm'
'9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJ'
'zXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRh'
'dGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFibHktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4c'
'CI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0MDk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5E'
'vJh17WlOlAKhcD0')
}
split_changes = {
-1: {
'since': -1,
'till': 1,
'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]
},
1: {'since': 1, 'till': 1, 'splits': []}
}
segment_changes = {}
split_backend_requests = Queue()
split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests,
auth_server_response)
sse_requests = Queue()
sse_server = SSEMockServer(sse_requests)
split_backend.start()
sse_server.start()
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
kwargs = {
'sdk_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'events_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'auth_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'streaming_api_base_url': 'http://localhost:%d' % sse_server.port(),
'config': {'connectTimeout': 10000, 'featuresRefreshRate': 10}
}
factory = get_factory('some_apikey', **kwargs)
factory.block_until_ready(1)
assert factory.ready
time.sleep(2)
# Get a hook of the task so we can query its status
task = factory._sync_manager._synchronizer._split_tasks.split_task._task # pylint:disable=protected-access
assert not task.running()
assert factory.client().get_treatment('maldo', 'split1') == 'on'
# Make a change in the BE but don't send the event.
# After dropping occupancy, the sdk should switch to polling
# and perform a syncAll that gets this change
split_changes[1] = {
'since': 1,
'till': 2,
'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]
}
split_changes[2] = {'since': 2, 'till': 2, 'splits': []}
sse_server.publish(make_control_event('STREAMING_PAUSED', 1))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
assert task.running()
# We make another chagne in the BE and don't send the event.
# We restore occupancy, and it should be fetched by the
# sync all after streaming is restored.
split_changes[2] = {
'since': 2,
'till': 3,
'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]
}
split_changes[3] = {'since': 3, 'till': 3, 'splits': []}
sse_server.publish(make_control_event('STREAMING_ENABLED', 2))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'on'
assert not task.running()
# Now we make another change and send an event so it's propagated
split_changes[3] = {
'since': 3,
'till': 4,
'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]
}
split_changes[4] = {'since': 4, 'till': 4, 'splits': []}
sse_server.publish(make_split_change_event(4))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
assert not task.running()
split_changes[4] = {
'since': 4,
'till': 5,
'splits': [make_simple_split('split1', 5, True, False, 'off', 'user', True)]
}
split_changes[5] = {'since': 5, 'till': 5, 'splits': []}
sse_server.publish(make_control_event('STREAMING_DISABLED', 2))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'on'
assert task.running()
assert 'PushStatusHandler' not in [t.name for t in threading.enumerate()]
# Validate the SSE request
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
# Initial apikey validation
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/__SOME_INVALID_SEGMENT__?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Initial splits fetch
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll on push down
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after push is up
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=4'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming disabled
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=4'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=5'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Cleanup
destroy_event = threading.Event()
factory.destroy(destroy_event)
destroy_event.wait()
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
sse_server.stop()
split_backend.stop()
def test_server_closes_connection(self):
"""Test that if the server closes the connection, the whole flow is retried with BO."""
auth_server_response = {
'pushEnabled': True,
'token': ('eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.'
'eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pO'
'RFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk1UWXlNVGN4T1RRNE13P'
'T1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcIjpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm'
'9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJ'
'zXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRh'
'dGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFibHktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4c'
'CI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0MDk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5E'
'vJh17WlOlAKhcD0')
}
split_changes = {
-1: {
'since': -1,
'till': 1,
'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]
},
1: {
'since': 1,
'till': 1,
'splits': []
}
}
segment_changes = {}
split_backend_requests = Queue()
split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests,
auth_server_response)
sse_requests = Queue()
sse_server = SSEMockServer(sse_requests)
split_backend.start()
sse_server.start()
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
kwargs = {
'sdk_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'events_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'auth_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'streaming_api_base_url': 'http://localhost:%d' % sse_server.port(),
'config': {'connectTimeout': 10000, 'featuresRefreshRate': 100,
'segmentsRefreshRate': 100, 'metricsRefreshRate': 100,
'impressionsRefreshRate': 100, 'eventsPushRate': 100}
}
factory = get_factory('some_apikey', **kwargs)
factory.block_until_ready(1)
assert factory.ready
assert factory.client().get_treatment('maldo', 'split1') == 'on'
task = factory._sync_manager._synchronizer._split_tasks.split_task._task # pylint:disable=protected-access
assert not task.running()
time.sleep(1)
split_changes[1] = {
'since': 1,
'till': 2,
'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]
}
split_changes[2] = {'since': 2, 'till': 2, 'splits': []}
sse_server.publish(make_split_change_event(2))
time.sleep(1)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
sse_server.publish(SSEMockServer.GRACEFUL_REQUEST_END)
time.sleep(1)
assert factory.client().get_treatment('maldo', 'split1') == 'off'
assert task.running()
time.sleep(2) # wait for the backoff to expire so streaming gets re-attached
# re-send initial event AND occupancy
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
time.sleep(2)
assert not task.running()
split_changes[2] = {
'since': 2,
'till': 3,
'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]
}
split_changes[3] = {'since': 3, 'till': 3, 'splits': []}
sse_server.publish(make_split_change_event(3))
time.sleep(1)
assert factory.client().get_treatment('maldo', 'split1') == 'on'
assert not task.running()
# Validate the SSE requests
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
# Initial apikey validation
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/__SOME_INVALID_SEGMENT__?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Initial splits fetch
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after first notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll on retryable error handling
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth after connection breaks
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected again
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after new notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Cleanup
destroy_event = threading.Event()
factory.destroy(destroy_event)
destroy_event.wait()
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
sse_server.stop()
split_backend.stop()
def test_ably_errors_handling(self):
"""Test incoming ably errors and validate its handling."""
import logging
logger = logging.getLogger('splitio')
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
auth_server_response = {
'pushEnabled': True,
'token': ('eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.'
'eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pO'
'RFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjcmliZVwiXSxcIk1UWXlNVGN4T1RRNE13P'
'T1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcIjpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm'
'9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJ'
'zXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRh'
'dGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFibHktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4c'
'CI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0MDk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5E'
'vJh17WlOlAKhcD0')
}
split_changes = {
-1: {
'since': -1,
'till': 1,
'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]
},
1: {'since': 1, 'till': 1, 'splits': []}
}
segment_changes = {}
split_backend_requests = Queue()
split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests,
auth_server_response)
sse_requests = Queue()
sse_server = SSEMockServer(sse_requests)
split_backend.start()
sse_server.start()
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
kwargs = {
'sdk_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'events_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'auth_api_base_url': 'http://localhost:%d/api' % split_backend.port(),
'streaming_api_base_url': 'http://localhost:%d' % sse_server.port(),
'config': {'connectTimeout': 10000, 'featuresRefreshRate': 10}
}
factory = get_factory('some_apikey', **kwargs)
factory.block_until_ready(1)
assert factory.ready
time.sleep(2)
# Get a hook of the task so we can query its status
task = factory._sync_manager._synchronizer._split_tasks.split_task._task # pylint:disable=protected-access
assert not task.running()
assert factory.client().get_treatment('maldo', 'split1') == 'on'
# Make a change in the BE but don't send the event.
# We'll send an ignorable error and check it has nothing happened
split_changes[1] = {
'since': 1,
'till': 2,
'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]
}
split_changes[2] = {'since': 2, 'till': 2, 'splits': []}
sse_server.publish(make_ably_error_event(60000, 600))
time.sleep(1)
assert factory.client().get_treatment('maldo', 'split1') == 'on'
assert not task.running()
sse_server.publish(make_ably_error_event(40145, 401))
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
time.sleep(3)
assert task.running()
assert factory.client().get_treatment('maldo', 'split1') == 'off'
# Re-publish initial events so that the retry succeeds
sse_server.publish(make_initial_event())
sse_server.publish(make_occupancy('control_pri', 2))
sse_server.publish(make_occupancy('control_sec', 2))
time.sleep(3)
assert not task.running()
# Assert streaming is working properly
split_changes[2] = {
'since': 2,
'till': 3,
'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]
}
split_changes[3] = {'since': 3, 'till': 3, 'splits': []}
sse_server.publish(make_split_change_event(3))
time.sleep(2)
assert factory.client().get_treatment('maldo', 'split1') == 'on'
assert not task.running()
# Send a non-retryable ably error
sse_server.publish(make_ably_error_event(40200, 402))
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
time.sleep(3)
# Assert sync-task is running and the streaming status handler thread is over
assert task.running()
assert 'PushStatusHandler' not in [t.name for t in threading.enumerate()]
# Validate the SSE requests
sse_request = sse_requests.get()
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
assert sse_request.method == 'GET'
path, qs = sse_request.path.split('?', 1)
assert path == '/event-stream'
qs = parse_qs(qs)
assert qs['accessToken'][0] == (
'eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05'
'US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk1UW'
'XlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zZWdtZW50c1wiOltcInN1YnNjc'
'mliZVwiXSxcIk1UWXlNVGN4T1RRNE13PT1fTWpBNE16Y3pORFUxTWc9PV9zcGxpdHNcI'
'jpbXCJzdWJzY3JpYmVcIl0sXCJjb250cm9sX3ByaVwiOltcInN1YnNjcmliZVwiLFwiY'
'2hhbm5lbC1tZXRhZGF0YTpwdWJsaXNoZXJzXCJdLFwiY29udHJvbF9zZWNcIjpbXCJzd'
'WJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXX0iLCJ4LWFib'
'HktY2xpZW50SWQiOiJjbGllbnRJZCIsImV4cCI6MTYwNDEwMDU5MSwiaWF0IjoxNjA0M'
'Dk2OTkxfQ.aP9BfR534K6J9h8gfDWg_CQgpz5EvJh17WlOlAKhcD0'
)
assert set(qs['channels'][0].split(',')) == set(['MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'[?occupancy=metrics.publishers]control_pri',
'[?occupancy=metrics.publishers]control_sec'])
assert qs['v'][0] == '1.1'
# Initial apikey validation
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/segmentChanges/__SOME_INVALID_SEGMENT__?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Initial splits fetch
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=-1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after streaming connected
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll retriable error
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=1'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Auth again
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/auth'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after push is up
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Fetch after notification
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=2'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Iteration until since == till
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# SyncAll after non recoverable ably error
req = split_backend_requests.get()
assert req.method == 'GET'
assert req.path == '/api/splitChanges?since=3'
assert req.headers['authorization'] == 'Bearer some_apikey'
# Cleanup
destroy_event = threading.Event()
factory.destroy(destroy_event)
destroy_event.wait()
sse_server.publish(sse_server.GRACEFUL_REQUEST_END)
sse_server.stop()
split_backend.stop()
def make_split_change_event(change_number):
"""Make a split change event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': change_number-1,
'encoding':'json',
'channel':'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'data': json.dumps({
'type': 'SPLIT_UPDATE',
'changeNumber': change_number
})
})
}
def make_split_kill_event(name, default_treatment, change_number):
"""Make a split change event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': change_number-1,
'encoding':'json',
'channel':'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_splits',
'data': json.dumps({
'type': 'SPLIT_KILL',
'splitName': name,
'defaultTreatment': default_treatment,
'changeNumber': change_number
})
})
}
def make_initial_event():
"""Make a split change event."""
return {'id':'TVUsxaabHs:0:0'}
def make_occupancy(channel, publishers):
"""Make an occupancy event."""
return {
'event': 'message',
'data': json.dumps({
'id':'aP6EuhrcUm:0:0',
'timestamp':1604325712734,
'encoding': 'json',
'channel': "[?occupancy=metrics.publishers]%s" % channel,
'data': json.dumps({'metrics': {'publishers': publishers}}),
'name':'[meta]occupancy'
})
}
def make_segment_change_event(name, change_number):
"""Make a split change event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': change_number-1,
'encoding':'json',
'channel':'MTYyMTcxOTQ4Mw==_MjA4MzczNDU1Mg==_segments',
'data': json.dumps({
'type': 'SEGMENT_UPDATE',
'segmentName': name,
'changeNumber': change_number
})
})
}
def make_control_event(control_type, timestamp):
"""Make a control event."""
return {
'event': 'message',
'data': json.dumps({
'id':'TVUsxaabHs:0:0',
'clientId':'pri:MzM0ODI1MTkxMw==',
'timestamp': timestamp,
'encoding':'json',
'channel':'[?occupancy=metrics.publishers]control_pri',
'data': json.dumps({
'type': 'CONTROL',
'controlType': control_type,
})
})
}
def make_ably_error_event(code, status):
"""Make a control event."""
return {
'event': 'error',
'data': json.dumps({
'message':'Invalid accessToken in request: sarasa',
'code': code,
'statusCode': status,
'href':"https://help.ably.io/error/%d" % code
})
}
def make_simple_split(name, cn, active, killed, default_treatment, tt, on):
"""Make a simple split."""
return {
'trafficTypeName': tt,
'name': name,
'seed': 1699838640,
'status': 'ACTIVE' if active else 'ARCHIVED',
'changeNumber': cn,
'killed': killed,
'defaultTreatment': default_treatment,
'conditions': [
{
'matcherGroup': {
'combiner': 'AND',
'matchers': [
{
'matcherType': 'ALL_KEYS',
'negate': False,
'userDefinedSegmentMatcherData': None,
'whitelistMatcherData': None
}
]
},
'partitions': [
{'treatment': 'on' if on else 'off', 'size': 100},
{'treatment': 'off' if on else 'on', 'size': 0}
]
}
]
}
def make_split_with_segment(name, cn, active, killed, default_treatment,
tt, on, segment):
"""Make a split with a segment."""
return {
'trafficTypeName': tt,
'name': name,
'seed': cn,
'status': 'ACTIVE' if active else 'ARCHIVED',
'changeNumber': cn,
'killed': killed,
'defaultTreatment': default_treatment,
'configurations': {
'on': '{\'size\':15,\'test\':20}'
},
'conditions': [
{
'matcherGroup': {
'combiner': 'AND',
'matchers': [
{
'matcherType': 'IN_SEGMENT',
'negate': False,
'userDefinedSegmentMatcherData': {'segmentName': segment},
'whitelistMatcherData': None
}
]
},
'partitions': [{
'treatment': 'on' if on else 'off',
'size': 100
}]
}
]
}
| 43.137131
| 115
| 0.600887
| 5,530
| 61,341
| 6.487523
| 0.065461
| 0.051929
| 0.046159
| 0.044236
| 0.922232
| 0.916072
| 0.90509
| 0.900045
| 0.892519
| 0.891125
| 0
| 0.039155
| 0.285959
| 61,341
| 1,421
| 116
| 43.167488
| 0.779932
| 0.07517
| 0
| 0.843049
| 0
| 0
| 0.323108
| 0.199537
| 0
| 0
| 0
| 0
| 0.268161
| 1
| 0.013453
| false
| 0
| 0.008969
| 0
| 0.03139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9e0c71df07f6cc03e495d11899558d7e577552a
| 3,803
|
py
|
Python
|
repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 21
|
2018-11-20T15:58:39.000Z
|
2022-03-15T19:57:24.000Z
|
repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 732
|
2018-11-21T18:33:26.000Z
|
2022-03-31T16:16:24.000Z
|
repos/system_upgrade/common/actors/systemfacts/tests/test_systemfacts_selinux.py
|
sm00th/leapp-repository
|
1c171ec3a5f9260a3c6f84a9b15cad78a875ac61
|
[
"Apache-2.0"
] | 85
|
2018-11-20T17:55:00.000Z
|
2022-03-29T09:40:31.000Z
|
import warnings
import pytest
from leapp.libraries.actor.systemfacts import get_selinux_status
from leapp.models import SELinuxFacts
no_selinux = False
try:
import selinux
except ImportError:
no_selinux = True
warnings.warn(
'Tests which uses `selinux` will be skipped'
' due to library unavailability.', ImportWarning)
reason_to_skip_msg = "Selinux is not available"
# FIXME: create valid tests...
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_enabled_enforcing(monkeypatch):
"""
Test case SELinux is enabled in enforcing mode
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 1)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', lambda: [0, 1])
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': True,
'enabled': True,
'runtime_mode': 'enforcing',
'static_mode': 'enforcing'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_enabled_permissive(monkeypatch):
"""
Test case SELinux is enabled in permissive mode
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', lambda: [0, 0])
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': True,
'enabled': True,
'runtime_mode': 'permissive',
'static_mode': 'permissive'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_disabled(monkeypatch):
"""
Test case SELinux is disabled
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', lambda: [0, 0])
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': False,
'enabled': False,
'runtime_mode': 'permissive',
'static_mode': 'permissive'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
class MockNoConfigFileOSError(object):
def __init__(self):
raise OSError
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_disabled_no_config_file(monkeypatch):
"""
Test case SELinux is disabled
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', MockNoConfigFileOSError)
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': False,
'enabled': False,
'runtime_mode': 'permissive',
'static_mode': 'disabled'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
| 38.414141
| 83
| 0.674993
| 400
| 3,803
| 6.1675
| 0.1925
| 0.145926
| 0.202675
| 0.094852
| 0.80381
| 0.80381
| 0.803405
| 0.733279
| 0.733279
| 0.733279
| 0
| 0.007343
| 0.212201
| 3,803
| 98
| 84
| 38.806122
| 0.816088
| 0.048383
| 0
| 0.617647
| 0
| 0
| 0.23412
| 0.073075
| 0
| 0
| 0
| 0.010204
| 0.058824
| 1
| 0.073529
| false
| 0
| 0.102941
| 0
| 0.191176
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b9ffed8a41299969ab07da01999635758df5ba4f
| 11,469
|
py
|
Python
|
utils/data_loader.py
|
elieser1101/loglizer
|
985c5f582fbbe4d6365184086ac091134a5b5d07
|
[
"MIT"
] | null | null | null |
utils/data_loader.py
|
elieser1101/loglizer
|
985c5f582fbbe4d6365184086ac091134a5b5d07
|
[
"MIT"
] | null | null | null |
utils/data_loader.py
|
elieser1101/loglizer
|
985c5f582fbbe4d6365184086ac091134a5b5d07
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Shilin He'
import pandas as pd
import os
import numpy as np
def hdfs_data_loader(para):
""" load the log sequence matrix and labels from the file path.
Args:
--------
para: the parameters dictionary
Returns:
--------
raw_data: log sequences matrix
label_data: labels matrix
"""
file_path = para['path'] + para['log_seq_file_name']
label_path = para['path'] + para['label_file_name']
# load log sequence matrix
pre_df = pd.read_csv(file_path, nrows=1, header=None, delimiter=r'\s+')
columns = pre_df.columns.tolist()
# remove the last column of block name
use_cols = columns[:-1]
data_df = pd.read_csv(file_path, delimiter=r'\s+', header=None, usecols =use_cols, dtype =int)
raw_data = data_df.as_matrix()
# load lables
label_df = pd.read_csv(label_path, delimiter=r'\s+', header=None, usecols = [0], dtype =int) # usecols must be a list
label_data = label_df.as_matrix()
print("The raw data shape is {} and label shape is {}".format(raw_data.shape, label_data.shape))
assert raw_data.shape[0] == label_data.shape[0]
print('The number of anomaly instances is %d' % sum(label_data))
return raw_data, label_data
def bgl_data_loader(para):
""" load the logs and the log_event_mapping from the file path.
Args:
--------
para: the parameters dictionary
Returns:
--------
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
"""
file_path = para['path'] + para['log_file_name']
event_mapping_path = para['path'] + para['log_event_mapping']
# load data
data_df = pd.read_csv(file_path, delimiter=r'\s+', header=None, names = ['label','time'], usecols = para['select_column']) #, parse_dates = [1], date_parser=dateparse)
# convert to date time format
data_df['time'] = pd.to_datetime(data_df['time'], format="%Y-%m-%d-%H.%M.%S.%f")
# calculate the time interval since the start time
data_df['seconds_since'] = (data_df['time']-data_df['time'][0]).dt.total_seconds().astype(int)
# get the label for each log
data_df['label'] = (data_df['label'] != '-').astype(int)
raw_data = data_df[['label','seconds_since']].as_matrix()
# load the event mapping list
event_mapping = pd.read_csv(event_mapping_path, delimiter=r'\s+', header=None, usecols = [0], dtype =int)
event_mapping_data = event_mapping.as_matrix()
print("The raw data shape is {} and label shape is {}".format(raw_data.shape, event_mapping_data.shape))
assert raw_data.shape[0] == event_mapping_data.shape[0]
print('The number of anomaly logs is %d, but it requires further processing' % sum(raw_data[:, 0]))
return raw_data, event_mapping_data
def bgl_preprocess_data(para, raw_data, event_mapping_data):
""" split logs into sliding windows, built an event count matrix and get the corresponding label
Args:
--------
para: the parameters dictionary
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
Returns:
--------
event_count_matrix: event count matrix, where each row is an instance (log sequence vector)
labels: a list of labels, 1 represents anomaly
"""
# create the directory for saving the sliding windows (start_index, end_index), which can be directly loaded in future running
if not os.path.exists(para['save_path']):
os.mkdir(para['save_path'])
log_size = raw_data.shape[0]
sliding_file_path = para['save_path']+'sliding_'+str(para['window_size'])+'h_'+str(para['step_size'])+'h.csv'
#=================divide into sliding windows=============#
start_end_index_list = [] # list of tuples, tuple contains two number, which represent the start and end of sliding time window
label_data, time_data = raw_data[:,0], raw_data[:, 1]
if not os.path.exists(sliding_file_path):
# split into sliding window
start_time = time_data[0]
start_index = 0
end_index = 0
# get the first start, end index, end time
for cur_time in time_data:
if cur_time < start_time + para['window_size']*3600:
end_index += 1
end_time = cur_time
else:
start_end_pair=tuple((start_index,end_index))
start_end_index_list.append(start_end_pair)
break
# move the start and end index until next sliding window
while end_index < log_size:
start_time = start_time + para['step_size']*3600
end_time = end_time + para['step_size']*3600
for i in range(start_index,end_index):
if time_data[i] < start_time:
i+=1
else:
break
for j in range(end_index, log_size):
if time_data[j] < end_time:
j+=1
else:
break
start_index = i
end_index = j
start_end_pair = tuple((start_index, end_index))
start_end_index_list.append(start_end_pair)
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset\n'%inst_number)
np.savetxt(sliding_file_path,start_end_index_list,delimiter=',',fmt='%d')
else:
print('Loading start_end_index_list from file')
start_end_index_list = pd.read_csv(sliding_file_path, header=None).as_matrix()
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset' % inst_number)
# get all the log indexes in each time window by ranging from start_index to end_index
expanded_indexes_list=[]
for t in range(inst_number):
index_list = []
expanded_indexes_list.append(index_list)
for i in range(inst_number):
start_index = start_end_index_list[i][0]
end_index = start_end_index_list[i][1]
for l in range(start_index, end_index):
expanded_indexes_list[i].append(l)
event_mapping_data = [row[0] for row in event_mapping_data]
event_num = len(list(set(event_mapping_data)))
print('There are %d log events'%event_num)
#=================get labels and event count of each sliding window =============#
labels = []
event_count_matrix = np.zeros((inst_number,event_num))
for j in range(inst_number):
label = 0 #0 represent success, 1 represent failure
for k in expanded_indexes_list[j]:
event_index = event_mapping_data[k]
event_count_matrix[j, event_index] += 1
if label_data[k]:
label = 1
continue
labels.append(label)
assert inst_number == len(labels)
print("Among all instances, %d are anomalies"%sum(labels))
assert event_count_matrix.shape[0] == len(labels)
return event_count_matrix, labels
def deepia_data_loader(para):
""" load the logs and the log_event_mapping from the file path.
Args:
--------
para: the parameters dictionary
Returns:
--------
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
"""
file_path = para['path'] + para['log_file_name']
event_mapping_path = para['path'] + para['log_event_mapping']
# load data
data_df = pd.read_csv(file_path, delimiter=r'\s+', header=None, names=['month', 'day', 'hour'],
usecols=para['select_column']) # , parse_dates = [1], date_parser=dateparse)
# convert to date time format
data_df = data_df[['month', 'day', 'hour']].apply(lambda x: list(map(str, x)))
data_df['time'] = data_df[['month', 'day', 'hour']].apply(lambda x: '-'.join(x), axis=1) #
data_df['time'] = pd.to_datetime(data_df['time'], format="%b-%d-%H:%M:%S")
# calculate the time interval since the start time
data_df['seconds_since'] = (data_df['time'] - data_df['time'][0]).dt.total_seconds().astype(int)
# get the label for each log
# data_df['label'] = (data_df['label'] != '-').astype(int)
raw_data = data_df[['seconds_since']].as_matrix()
# load the event mapping list
event_mapping = pd.read_csv(event_mapping_path, delimiter=r'\s+', header=None, usecols = [0], dtype =int)
event_mapping_data = event_mapping.as_matrix()
print("The raw data shape is {} and label shape is {}".format(raw_data.shape, event_mapping_data.shape))
assert raw_data.shape[0] == event_mapping_data.shape[0]
#print('The number of anomaly logs is %d, but it requires further processing' % sum(raw_data[:, 0]))
return raw_data, event_mapping_data
def deepia_preprocess_data(para, raw_data, event_mapping_data):
""" split logs into sliding windows, built an event count matrix and get the corresponding label
Args:
--------
para: the parameters dictionary
raw_data: list of (label, time)
event_mapping_data: a list of event index, where each row index indicates a corresponding log
Returns:
--------
event_count_matrix: event count matrix, where each row is an instance (log sequence vector)
labels: a list of labels, 1 represents anomaly
"""
# create the directory for saving the sliding windows (start_index, end_index), which can be directly loaded in future running
if not os.path.exists(para['save_path']):
os.mkdir(para['save_path'])
log_size = raw_data.shape[0]
sliding_file_path = para['save_path']+'sliding_'+str(para['window_size'])+'h_'+str(para['step_size'])+'h.csv'
#=================divide into sliding windows=============#
start_end_index_list = [] # list of tuples, tuple contains two number, which represent the start and end of sliding time window
time_data = raw_data[:,0]
if not os.path.exists(sliding_file_path):
# split into sliding window
start_time = time_data[0]
start_index = 0
end_index = 0
# get the first start, end index, end time
for cur_time in time_data:
if cur_time < start_time + para['window_size']*3600:
end_index += 1
end_time = cur_time
else:
start_end_pair=tuple((start_index,end_index))
start_end_index_list.append(start_end_pair)
break
# move the start and end index until next sliding window
while end_index < log_size:
start_time = start_time + para['step_size']*3600
end_time = end_time + para['step_size']*3600
for i in range(start_index,end_index):
if time_data[i] < start_time:
i+=1
else:
break
for j in range(end_index, log_size):
if time_data[j] < end_time:
j+=1
else:
break
start_index = i
end_index = j
start_end_pair = tuple((start_index, end_index))
start_end_index_list.append(start_end_pair)
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset\n'%inst_number)
np.savetxt(sliding_file_path,start_end_index_list,delimiter=',',fmt='%d')
else:
print('Loading start_end_index_list from file')
start_end_index_list = pd.read_csv(sliding_file_path, header=None).as_matrix()
inst_number = len(start_end_index_list)
print('there are %d instances (sliding windows) in this dataset' % inst_number)
# get all the log indexes in each time window by ranging from start_index to end_index
expanded_indexes_list=[]
for t in range(inst_number):
index_list = []
expanded_indexes_list.append(index_list)
for i in range(inst_number):
start_index = start_end_index_list[i][0]
end_index = start_end_index_list[i][1]
for l in range(start_index, end_index):
expanded_indexes_list[i].append(l)
event_mapping_data = [row[0] for row in event_mapping_data]
event_num = len(list(set(event_mapping_data)))
print('There are %d log events'%event_num)
#=================get labels and event count of each sliding window =============#
event_count_matrix = np.zeros((inst_number,event_num))
for j in range(inst_number):
for k in expanded_indexes_list[j]:
event_index = event_mapping_data[k]
event_count_matrix[j, event_index] += 1
#print("Among all instances, %d are anomalies"%sum(labels))
return event_count_matrix
| 38.877966
| 168
| 0.717674
| 1,839
| 11,469
| 4.231104
| 0.114193
| 0.049351
| 0.045238
| 0.043696
| 0.909009
| 0.89243
| 0.887033
| 0.882534
| 0.870582
| 0.859273
| 0
| 0.007697
| 0.150405
| 11,469
| 294
| 169
| 39.010204
| 0.790846
| 0.300201
| 0
| 0.744444
| 0
| 0
| 0.148532
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 1
| 0.027778
| false
| 0
| 0.016667
| 0
| 0.072222
| 0.077778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a07ddc6734dd5ce8f0853fa4326c144429dfb84
| 5,214
|
py
|
Python
|
imgaug/augmenters/flip.py
|
pAoenix/image-Augmented
|
4acaa7dc48c6167c1716e39e9e78b1cea2067b4a
|
[
"MIT"
] | 1
|
2020-12-30T02:48:40.000Z
|
2020-12-30T02:48:40.000Z
|
imgaug/augmenters/flip.py
|
pAoenix/image-Augmented
|
4acaa7dc48c6167c1716e39e9e78b1cea2067b4a
|
[
"MIT"
] | null | null | null |
imgaug/augmenters/flip.py
|
pAoenix/image-Augmented
|
4acaa7dc48c6167c1716e39e9e78b1cea2067b4a
|
[
"MIT"
] | 2
|
2020-01-14T14:29:49.000Z
|
2021-02-20T07:47:02.000Z
|
"""
Augmenters that apply mirroring/flipping operations to images.
Do not import directly from this file, as the categorization is not final.
Use instead ::
from imgaug import augmenters as iaa
and then e.g. ::
seq = iaa.Sequential([
iaa.Fliplr((0.0, 1.0)),
iaa.Flipud((0.0, 1.0))
])
List of augmenters:
* Fliplr
* Flipud
"""
from __future__ import print_function, division, absolute_import
from .. import parameters as iap
import numpy as np
import six.moves as sm
from .meta import Augmenter
class Fliplr(Augmenter): # pylint: disable=locally-disabled, unused-variable, line-too-long
"""
Flip/mirror input images horizontally.
Parameters
----------
p : number or StochasticParameter, optional(default=0)
Probability of each image to get flipped.
name : string, optional(default=None)
See `Augmenter.__init__()`
deterministic : bool, optional(default=False)
See `Augmenter.__init__()`
random_state : int or np.random.RandomState or None, optional(default=None)
See `Augmenter.__init__()`
Examples
--------
>>> aug = iaa.Fliplr(0.5)
would horizontally flip/mirror 50 percent of all input images.
>>> aug = iaa.Fliplr(1.0)
would horizontally flip/mirror all input images.
"""
def __init__(self, p=0, name=None, deterministic=False, random_state=None):
super(Fliplr, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
self.p = iap.handle_probability_param(p, "p")
def _augment_images(self, images, random_state, parents, hooks):
nb_images = len(images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i in sm.xrange(nb_images):
if samples[i] == 1:
images[i] = np.fliplr(images[i])
return images
def _augment_heatmaps(self, heatmaps, random_state, parents, hooks):
arrs_flipped = self._augment_images(
[heatmaps_i.arr_0to1 for heatmaps_i in heatmaps],
random_state=random_state,
parents=parents,
hooks=hooks
)
for heatmaps_i, arr_flipped in zip(heatmaps, arrs_flipped):
heatmaps_i.arr_0to1 = arr_flipped
return heatmaps
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
nb_images = len(keypoints_on_images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i, keypoints_on_image in enumerate(keypoints_on_images):
if samples[i] == 1:
width = keypoints_on_image.shape[1]
for keypoint in keypoints_on_image.keypoints:
keypoint.x = (width - 1) - keypoint.x
return keypoints_on_images
def get_parameters(self):
return [self.p]
class Flipud(Augmenter): # pylint: disable=locally-disabled, unused-variable, line-too-long
"""
Flip/mirror input images vertically.
Parameters
----------
p : number or StochasticParameter, optional(default=0)
Probability of each image to get flipped.
name : string, optional(default=None)
See `Augmenter.__init__()`
deterministic : bool, optional(default=False)
See `Augmenter.__init__()`
random_state : int or np.random.RandomState or None, optional(default=None)
See `Augmenter.__init__()`
Examples
--------
>>> aug = iaa.Flipud(0.5)
would vertically flip/mirror 50 percent of all input images.
>>> aug = iaa.Flipud(1.0)
would vertically flip/mirror all input images.
"""
def __init__(self, p=0, name=None, deterministic=False, random_state=None):
super(Flipud, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
self.p = iap.handle_probability_param(p, "p")
def _augment_images(self, images, random_state, parents, hooks):
nb_images = len(images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i in sm.xrange(nb_images):
if samples[i] == 1:
images[i] = np.flipud(images[i])
return images
def _augment_heatmaps(self, heatmaps, random_state, parents, hooks):
arrs_flipped = self._augment_images(
[heatmaps_i.arr_0to1 for heatmaps_i in heatmaps],
random_state=random_state,
parents=parents,
hooks=hooks
)
for heatmaps_i, arr_flipped in zip(heatmaps, arrs_flipped):
heatmaps_i.arr_0to1 = arr_flipped
return heatmaps
def _augment_keypoints(self, keypoints_on_images, random_state, parents, hooks):
nb_images = len(keypoints_on_images)
samples = self.p.draw_samples((nb_images,), random_state=random_state)
for i, keypoints_on_image in enumerate(keypoints_on_images):
if samples[i] == 1:
height = keypoints_on_image.shape[0]
for keypoint in keypoints_on_image.keypoints:
keypoint.y = (height - 1) - keypoint.y
return keypoints_on_images
def get_parameters(self):
return [self.p]
| 32.185185
| 103
| 0.652091
| 653
| 5,214
| 4.967841
| 0.194487
| 0.088163
| 0.041924
| 0.054254
| 0.809494
| 0.809494
| 0.809494
| 0.809494
| 0.781134
| 0.781134
| 0
| 0.010194
| 0.247411
| 5,214
| 161
| 104
| 32.385093
| 0.816514
| 0.325086
| 0
| 0.753623
| 0
| 0
| 0.000597
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144928
| false
| 0
| 0.072464
| 0.028986
| 0.362319
| 0.014493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a0e0bcfcfbc438530da36eb95d62a35b14a3931
| 33,998
|
py
|
Python
|
modules/platforms/python/pyignite/api/key_value.py
|
DirectXceriD/gridgain
|
093e512a9147e266f83f6fe1cf088c0b037b501c
|
[
"Apache-2.0",
"CC0-1.0"
] | 1
|
2019-03-11T08:52:37.000Z
|
2019-03-11T08:52:37.000Z
|
modules/platforms/python/pyignite/api/key_value.py
|
DirectXceriD/gridgain
|
093e512a9147e266f83f6fe1cf088c0b037b501c
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
modules/platforms/python/pyignite/api/key_value.py
|
DirectXceriD/gridgain
|
093e512a9147e266f83f6fe1cf088c0b037b501c
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
# GridGain Community Edition Licensing
# Copyright 2019 GridGain Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License") modified with Commons Clause
# Restriction; you may not use this file except in compliance with the License. You may obtain a
# copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
#
# Commons Clause Restriction
#
# The Software is provided to you by the Licensor under the License, as defined below, subject to
# the following condition.
#
# Without limiting other conditions in the License, the grant of rights under the License will not
# include, and the License does not grant to you, the right to Sell the Software.
# For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you
# under the License to provide to third parties, for a fee or other consideration (including without
# limitation fees for hosting or consulting/ support services related to the Software), a product or
# service whose value derives, entirely or substantially, from the functionality of the Software.
# Any license notice or attribution required by the License must also include this Commons Clause
# License Condition notice.
#
# For purposes of the clause above, the “Licensor” is Copyright 2019 GridGain Systems, Inc.,
# the “License” is the Apache License, Version 2.0, and the Software is the GridGain Community
# Edition software provided with this notice.
from typing import Iterable, Union
from pyignite.queries.op_codes import *
from pyignite.datatypes import (
Map, Bool, Byte, Int, Long, AnyDataArray, AnyDataObject,
)
from pyignite.datatypes.key_value import PeekModes
from pyignite.queries import Query, Response
from pyignite.utils import cache_id
def cache_put(
connection: 'Connection', cache: Union[str, int], key, value,
key_hint=None, value_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache (overwriting existing value if any).
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given value
should be converted.
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status if a value
is written, non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_PUT,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
return query_struct.perform(connection, {
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'value': value,
})
def cache_get(
connection: 'Connection', cache: Union[str, int], key,
key_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Retrieves a value from cache by key.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and a value
retrieved on success, non-zero status and an error description on failure.
"""
query_struct = Query(
OP_CACHE_GET,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
},
response_config=[
('value', AnyDataObject),
],
)
if result.status != 0:
return result
result.value = result.value['value']
return result
def cache_get_all(
connection: 'Connection', cache: Union[str, int], keys: Iterable,
binary=False, query_id=None,
) -> 'APIResult':
"""
Retrieves multiple key-value pairs from cache.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param keys: list of keys or tuples of (key, key_hint),
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and a dict, made of
retrieved key-value pairs, non-zero status and an error description
on failure.
"""
query_struct = Query(
OP_CACHE_GET_ALL,
[
('hash_code', Int),
('flag', Byte),
('keys', AnyDataArray()),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'keys': keys,
},
response_config=[
('data', Map),
],
)
if result.status == 0:
result.value = dict(result.value)['data']
return result
def cache_put_all(
connection: 'Connection', cache: Union[str, int], pairs: dict,
binary=False, query_id=None,
) -> 'APIResult':
"""
Puts multiple key-value pairs to cache (overwriting existing associations
if any).
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param pairs: dictionary type parameters, contains key-value pairs to save.
Each key or value can be an item of representable Python type or a tuple
of (item, hint),
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status if key-value pairs
are written, non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_PUT_ALL,
[
('hash_code', Int),
('flag', Byte),
('data', Map),
],
query_id=query_id,
)
return query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'data': pairs,
},
)
def cache_contains_key(
connection: 'Connection', cache: Union[str, int], key,
key_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Returns a value indicating whether given key is present in cache.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param binary: pass True to keep the value in binary form. False
by default,
:param query_id: a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and a bool value
retrieved on success: `True` when key is present, `False` otherwise,
non-zero status and an error description on failure.
"""
query_struct = Query(
OP_CACHE_CONTAINS_KEY,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
},
response_config=[
('value', Bool),
],
)
if result.status == 0:
result.value = result.value['value']
return result
def cache_contains_keys(
connection: 'Connection', cache: Union[str, int], keys: Iterable,
binary=False, query_id=None,
) -> 'APIResult':
"""
Returns a value indicating whether all given keys are present in cache.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param keys: a list of keys or (key, type hint) tuples,
:param binary: pass True to keep the value in binary form. False
by default,
:param query_id: a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and a bool value
retrieved on success: `True` when all keys are present, `False` otherwise,
non-zero status and an error description on failure.
"""
query_struct = Query(
OP_CACHE_CONTAINS_KEYS,
[
('hash_code', Int),
('flag', Byte),
('keys', AnyDataArray()),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'keys': keys,
},
response_config=[
('value', Bool),
],
)
if result.status == 0:
result.value = result.value['value']
return result
def cache_get_and_put(
connection: 'Connection', cache: Union[str, int], key, value,
key_hint=None, value_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache, and returns the previous value
for that key, or null value if there was not such key.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given value
should be converted.
:param binary: pass True to keep the value in binary form. False
by default,
:param query_id: a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and an old value
or None if a value is written, non-zero status and an error description
in case of error.
"""
query_struct = Query(
OP_CACHE_GET_AND_PUT,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'value': value,
},
response_config=[
('value', AnyDataObject),
],
)
if result.status == 0:
result.value = result.value['value']
return result
def cache_get_and_replace(
connection: 'Connection', cache: Union[str, int], key, value,
key_hint=None, value_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache, returning previous value
for that key, if and only if there is a value currently mapped
for that key.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given value
should be converted.
:param binary: pass True to keep the value in binary form. False
by default,
:param query_id: a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and an old value
or None on success, non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_GET_AND_REPLACE, [
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'value': value,
},
response_config=[
('value', AnyDataObject),
],
)
if result.status == 0:
result.value = result.value['value']
return result
def cache_get_and_remove(
connection: 'Connection', cache: Union[str, int], key,
key_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Removes the cache entry with specified key, returning the value.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param binary: pass True to keep the value in binary form. False
by default,
:param query_id: a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and an old value
or None, non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_GET_AND_REMOVE, [
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
},
response_config=[
('value', AnyDataObject),
],
)
if result.status == 0:
result.value = result.value['value']
return result
def cache_put_if_absent(
connection: 'Connection', cache: Union[str, int], key, value,
key_hint=None, value_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache only if the key
does not already exist.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given value
should be converted.
:param binary: (optional) pass True to keep the value in binary form. False
by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status on success,
non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_PUT_IF_ABSENT,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'value': value,
},
response_config=[
('success', Bool),
],
)
if result.status == 0:
result.value = result.value['success']
return result
def cache_get_and_put_if_absent(
connection: 'Connection', cache: Union[str, int], key, value,
key_hint=None, value_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache only if the key does not
already exist.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given value
should be converted.
:param binary: (optional) pass True to keep the value in binary form. False
by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and an old value
or None on success, non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_GET_AND_PUT_IF_ABSENT,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'value': value,
},
response_config=[
('value', AnyDataObject),
],
)
if result.status == 0:
result.value = result.value['value']
return result
def cache_replace(
connection: 'Connection', cache: Union[str, int], key, value,
key_hint=None, value_hint=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache only if the key already exist.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry. Can be of any supported type,
:param value: value for the key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param value_hint: (optional) Ignite data type, for which the given value
should be converted.
:param binary: pass True to keep the value in binary form. False
by default,
:param query_id: a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and a boolean
success code, or non-zero status and an error description if something
has gone wrong.
"""
query_struct = Query(
OP_CACHE_REPLACE,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'value': value,
},
response_config=[
('success', Bool),
],
)
if result.status == 0:
result.value = result.value['success']
return result
def cache_replace_if_equals(
connection: 'Connection', cache: Union[str, int], key, sample, value,
key_hint=None, sample_hint=None, value_hint=None,
binary=False, query_id=None,
) -> 'APIResult':
"""
Puts a value with a given key to cache only if the key already exists
and value equals provided sample.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry,
:param sample: a sample to compare the stored value with,
:param value: new value for the given key,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param sample_hint: (optional) Ignite data type, for whic
the given sample should be converted
:param value_hint: (optional) Ignite data type, for which the given value
should be converted,
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned
as-is in response.query_id. When the parameter is omitted, a random
value is generated,
:return: API result data object. Contains zero status and a boolean
success code, or non-zero status and an error description if something
has gone wrong.
"""
query_struct = Query(
OP_CACHE_REPLACE_IF_EQUALS,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('sample', sample_hint or AnyDataObject),
('value', value_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'sample': sample,
'value': value,
},
response_config=[
('success', Bool),
],
)
if result.status == 0:
result.value = result.value['success']
return result
def cache_clear(
connection: 'Connection', cache: Union[str, int], binary=False,
query_id=None,
) -> 'APIResult':
"""
Clears the cache without notifying listeners or cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned
as-is in response.query_id. When the parameter is omitted, a random
value is generated,
:return: API result data object. Contains zero status on success,
non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_CLEAR,
[
('hash_code', Int),
('flag', Byte),
],
query_id=query_id,
)
return query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
},
)
def cache_clear_key(
connection: 'Connection', cache: Union[str, int], key,
key_hint: object=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Clears the cache key without notifying listeners or cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned
as-is in response.query_id. When the parameter is omitted, a random
value is generated,
:return: API result data object. Contains zero status on success,
non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_CLEAR_KEY,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
],
query_id=query_id,
)
return query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
},
)
def cache_clear_keys(
connection: 'Connection', cache: Union[str, int], keys: list,
binary=False, query_id=None,
) -> 'APIResult':
"""
Clears the cache keys without notifying listeners or cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param keys: list of keys or tuples of (key, key_hint),
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status on success,
non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_CLEAR_KEYS,
[
('hash_code', Int),
('flag', Byte),
('keys', AnyDataArray()),
],
query_id=query_id,
)
return query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'keys': keys,
},
)
def cache_remove_key(
connection: 'Connection', cache: Union[str, int], key,
key_hint: object=None, binary=False, query_id=None,
) -> 'APIResult':
"""
Clears the cache key without notifying listeners or cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned
as-is in response.query_id. When the parameter is omitted, a random
value is generated,
:return: API result data object. Contains zero status and a boolean
success code, or non-zero status and an error description if something
has gone wrong.
"""
query_struct = Query(
OP_CACHE_REMOVE_KEY,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
},
response_config=[
('success', Bool),
],
)
if result.status == 0:
result.value = result.value['success']
return result
def cache_remove_if_equals(
connection: 'Connection', cache: Union[str, int], key, sample,
key_hint=None, sample_hint=None,
binary=False, query_id=None,
) -> 'APIResult':
"""
Removes an entry with a given key if provided value is equal to
actual value, notifying listeners and cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param key: key for the cache entry,
:param sample: a sample to compare the stored value with,
:param key_hint: (optional) Ignite data type, for which the given key
should be converted,
:param sample_hint: (optional) Ignite data type, for whic
the given sample should be converted
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned
as-is in response.query_id. When the parameter is omitted, a random
value is generated,
:return: API result data object. Contains zero status and a boolean
success code, or non-zero status and an error description if something
has gone wrong.
"""
query_struct = Query(
OP_CACHE_REMOVE_IF_EQUALS,
[
('hash_code', Int),
('flag', Byte),
('key', key_hint or AnyDataObject),
('sample', sample_hint or AnyDataObject),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'key': key,
'sample': sample,
},
response_config=[
('success', Bool),
],
)
if result.status == 0:
result.value = result.value['success']
return result
def cache_remove_keys(
connection: 'Connection', cache: Union[str, int], keys: Iterable,
binary=False, query_id=None,
) -> 'APIResult':
"""
Removes entries with given keys, notifying listeners and cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param keys: list of keys or tuples of (key, key_hint),
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status on success,
non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_REMOVE_KEYS,
[
('hash_code', Int),
('flag', Byte),
('keys', AnyDataArray()),
],
query_id=query_id,
)
return query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'keys': keys,
},
)
def cache_remove_all(
connection: 'Connection', cache: Union[str, int], binary=False,
query_id=None,
) -> 'APIResult':
"""
Removes all entries from cache, notifying listeners and cache writers.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status on success,
non-zero status and an error description otherwise.
"""
query_struct = Query(
OP_CACHE_REMOVE_ALL,
[
('hash_code', Int),
('flag', Byte),
],
query_id=query_id,
)
return query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
},
)
def cache_get_size(
connection: 'Connection', cache: Union[str, int], peek_modes=0,
binary=False, query_id=None,
) -> 'APIResult':
"""
Gets the number of entries in cache.
:param connection: connection to Ignite server,
:param cache: name or ID of the cache,
:param peek_modes: (optional) limit count to near cache partition
(PeekModes.NEAR), primary cache (PeekModes.PRIMARY), or backup cache
(PeekModes.BACKUP). Defaults to all cache partitions (PeekModes.ALL),
:param binary: (optional) pass True to keep the value in binary form.
False by default,
:param query_id: (optional) a value generated by client and returned as-is
in response.query_id. When the parameter is omitted, a random value
is generated,
:return: API result data object. Contains zero status and a number of
cache entries on success, non-zero status and an error description
otherwise.
"""
if not isinstance(peek_modes, (list, tuple)):
if peek_modes == 0:
peek_modes = []
else:
peek_modes = [peek_modes]
query_struct = Query(
OP_CACHE_GET_SIZE,
[
('hash_code', Int),
('flag', Byte),
('peek_modes', PeekModes),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'peek_modes': peek_modes,
},
response_config=[
('count', Long),
],
)
if result.status == 0:
result.value = result.value['count']
return result
| 33.561698
| 100
| 0.62789
| 4,460
| 33,998
| 4.687444
| 0.063453
| 0.035157
| 0.021142
| 0.017937
| 0.869846
| 0.862336
| 0.851287
| 0.845642
| 0.840381
| 0.834593
| 0
| 0.002955
| 0.283399
| 33,998
| 1,012
| 101
| 33.594862
| 0.855149
| 0.502265
| 0
| 0.725455
| 0
| 0
| 0.086419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038182
| false
| 0
| 0.010909
| 0
| 0.089091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a2c1076d5d797f1927b5d8d8d4594e8e5c92647
| 9,615
|
py
|
Python
|
fast_fine_tuna/fast_fine_tuna.py
|
vinid/fast_fine_tuna
|
2d128f58df0407448cdb2e179972573afa7ac636
|
[
"MIT"
] | null | null | null |
fast_fine_tuna/fast_fine_tuna.py
|
vinid/fast_fine_tuna
|
2d128f58df0407448cdb2e179972573afa7ac636
|
[
"MIT"
] | null | null | null |
fast_fine_tuna/fast_fine_tuna.py
|
vinid/fast_fine_tuna
|
2d128f58df0407448cdb2e179972573afa7ac636
|
[
"MIT"
] | null | null | null |
from transformers import AutoModel, AutoModelForSequenceClassification, AutoTokenizer, AutoConfig
from sklearn.model_selection import StratifiedKFold
import numpy as np
import torch
from fast_fine_tuna.dataset import MainDatasetDouble, MainDataset
from transformers import AdamW
from torch.utils.data import DataLoader
import os
from tqdm import tqdm
from fast_fine_tuna.models import MiniModel
from torch import nn
class FastFineTuna:
def __init__(self, model_name, tokenizer_name):
self.model_name = model_name
self.tokenizer_name = tokenizer_name
self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
def cross_validate_fit(self, texts, labels, splits=5, epochs=5, batch_size=16, learning_rate=5e-5):
config = AutoConfig.from_pretrained(self.model_name, num_labels=len(set(labels)),
finetuning_task="custom")
tokenizer = AutoTokenizer.from_pretrained(self.tokenizer_name)
texts = np.array(texts)
labels = np.array(labels)
skf = StratifiedKFold(n_splits=splits)
original = []
predicted = []
for train_index, test_index in skf.split(texts, labels):
model = AutoModelForSequenceClassification.from_pretrained(self.model_name, config=config)
X_train, X_test = texts[train_index].tolist(), texts[test_index].tolist()
y_train, y_test = labels[train_index].tolist(), labels[test_index].tolist()
# not the smartest way to do this, but faster to code up
tokenized_train = tokenizer(X_train, truncation=True, padding=True)
tokenized_test = tokenizer(X_test, truncation=True, padding=True)
train_dataset = MainDataset(tokenized_train, y_train)
test_dataset = MainDataset(tokenized_test, y_test)
model.to(self.device)
model.train()
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
optim = AdamW(model.parameters(), lr=learning_rate)
pbar = tqdm(total=epochs, position=0, leave=True)
for epoch in range(epochs):
pbar.update(1)
for batch in train_loader:
optim.zero_grad()
input_ids = batch['input_ids'].to(self.device)
attention_mask = batch['attention_mask'].to(self.device)
lab = batch['labels'].to(self.device)
outputs = model(input_ids, attention_mask=attention_mask, labels=lab)
loss = outputs[0]
loss.backward()
optim.step()
pbar.close()
model.eval()
loader = DataLoader(test_dataset, batch_size=batch_size)
original.extend(y_test)
with torch.no_grad():
for batch in loader:
input_ids = batch['input_ids'].to(self.device)
attention_mask = batch['attention_mask'].to(self.device)
outputs = model(input_ids, attention_mask=attention_mask)
predicted.extend(torch.argmax(outputs["logits"], axis=1).cpu().numpy().tolist())
del model
return original, predicted
def train_and_save(self, texts, labels, path, epochs=5, batch_size=16, learning_rate=5e-5):
config = AutoConfig.from_pretrained(self.model_name, num_labels=len(set(labels)),
finetuning_task="custom")
model = AutoModelForSequenceClassification.from_pretrained(self.model_name, config=config)
tokenizer = AutoTokenizer.from_pretrained(self.tokenizer_name)
tokenized_train = tokenizer(texts, truncation=True, padding=True)
train_dataset = MainDataset(tokenized_train, labels)
model.to(self.device)
model.train()
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
optim = AdamW(model.parameters(), lr=learning_rate)
pbar = tqdm(total=epochs, position=0, leave=True)
for epoch in range(epochs):
pbar.update(1)
for batch in train_loader:
optim.zero_grad()
input_ids = batch['input_ids'].to(self.device)
attention_mask = batch['attention_mask'].to(self.device)
lab = batch['labels'].to(self.device)
outputs = model(input_ids, attention_mask=attention_mask, labels=lab)
loss = outputs[0]
loss.backward()
optim.step()
pbar.close()
os.makedirs(path)
model.save_pretrained(path)
tokenizer.save_pretrained(path)
class DoubleFastFineTuna:
def __init__(self, model_name, tokenizer_name):
self.model_name = model_name
self.tokenizer_name = tokenizer_name
self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
def cross_validate_fit(self, texts, labels_A, labels_B, splits=5, epochs=5, batch_size=16, learning_rate=5e-5,
):
tokenizer = AutoTokenizer.from_pretrained(self.tokenizer_name)
texts = np.array(texts)
labels_A = np.array(labels_A)
labels_B = np.array(labels_B)
skf = StratifiedKFold(n_splits=splits)
original_A = []
original_B = []
predicted_A = []
predicted_B = []
for train_index, test_index in skf.split(texts, labels_A, labels_B):
model = MiniModel(self.model_name, len(set(labels_A)), len(set(labels_B)))
X_train, X_test = texts[train_index].tolist(), texts[test_index].tolist()
y_A_train, y_A_test = labels_A[train_index].tolist(), labels_A[test_index].tolist()
y_B_train, y_B_test = labels_B[train_index].tolist(), labels_B[test_index].tolist()
# not the smartest way to do this, but faster to code up
tokenized_train = tokenizer(X_train, truncation=True, padding=True)
tokenized_test = tokenizer(X_test, truncation=True, padding=True)
train_dataset = MainDatasetDouble(tokenized_train, y_A_train, y_B_train)
test_dataset = MainDatasetDouble(tokenized_test, y_A_test, y_B_test)
model.to(self.device)
model.train()
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
optim = AdamW(model.parameters(), lr=learning_rate)
pbar = tqdm(total=epochs, position=0, leave=True)
for epoch in range(epochs):
pbar.update(1)
for batch in train_loader:
optim.zero_grad()
input_ids = batch['input_ids'].to(self.device)
attention_mask = batch['attention_mask'].to(self.device)
lab_A = batch['labels_A'].to(self.device)
lab_B = batch['labels_B'].to(self.device)
outputs = model(input_ids, attention_mask=attention_mask)
loss = nn.CrossEntropyLoss()
loss_A = loss(outputs[0], lab_A)
loss_B = loss(outputs[1], lab_B)
loss = loss_A + loss_B
loss.backward()
optim.step()
pbar.close()
model.eval()
loader = DataLoader(test_dataset, batch_size=batch_size)
original_A.extend(y_A_test)
original_B.extend(y_B_test)
with torch.no_grad():
for batch in loader:
input_ids = batch['input_ids'].to(self.device)
attention_mask = batch['attention_mask'].to(self.device)
outputs = model(input_ids, attention_mask=attention_mask)
predicted_A.extend(torch.argmax(outputs[0], axis=1).cpu().numpy().tolist())
predicted_B.extend(torch.argmax(outputs[1], axis=1).cpu().numpy().tolist())
del model
return original_A, original_B, predicted_A, predicted_B
def train_and_save(self, texts, labels, path, epochs=5, batch_size=16, learning_rate=5e-5):
config = AutoConfig.from_pretrained(self.model_name, num_labels=len(set(labels)),
finetuning_task="custom")
model = AutoModelForSequenceClassification.from_pretrained(self.model_name, config=config)
tokenizer = AutoTokenizer.from_pretrained(self.tokenizer_name)
tokenized_train = tokenizer(texts, truncation=True, padding=True)
train_dataset = MainDataset(tokenized_train, labels)
model.to(self.device)
model.train()
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
optim = AdamW(model.parameters(), lr=learning_rate)
pbar = tqdm(total=epochs, position=0, leave=True)
for epoch in range(epochs):
pbar.update(1)
for batch in train_loader:
optim.zero_grad()
input_ids = batch['input_ids'].to(self.device)
attention_mask = batch['attention_mask'].to(self.device)
lab = batch['labels'].to(self.device)
outputs = model(input_ids, attention_mask=attention_mask, labels=lab)
loss = outputs[0]
loss.backward()
optim.step()
pbar.close()
os.makedirs(path)
model.save_pretrained(path)
tokenizer.save_pretrained(path)
| 40.914894
| 114
| 0.615081
| 1,121
| 9,615
| 5.048171
| 0.123104
| 0.055133
| 0.044531
| 0.024386
| 0.816399
| 0.810214
| 0.797844
| 0.797844
| 0.785828
| 0.753137
| 0
| 0.005835
| 0.287051
| 9,615
| 234
| 115
| 41.089744
| 0.819694
| 0.011336
| 0
| 0.718391
| 0
| 0
| 0.022098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.063218
| 0
| 0.12069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a4363b0709ea506c58a60f1aaca731beda241f8
| 5,631
|
py
|
Python
|
ktrain/graph/learner.py
|
husmen/ktrain
|
4147b0bd146deb513c6f94505908294a5163efac
|
[
"Apache-2.0"
] | 1,013
|
2019-06-04T14:25:24.000Z
|
2022-03-26T05:52:00.000Z
|
ktrain/graph/learner.py
|
husmen/ktrain
|
4147b0bd146deb513c6f94505908294a5163efac
|
[
"Apache-2.0"
] | 427
|
2019-06-17T13:45:50.000Z
|
2022-03-25T16:23:49.000Z
|
ktrain/graph/learner.py
|
husmen/ktrain
|
4147b0bd146deb513c6f94505908294a5163efac
|
[
"Apache-2.0"
] | 272
|
2019-06-05T03:19:07.000Z
|
2022-03-28T02:23:37.000Z
|
from ..imports import *
from .. import utils as U
from ..core import GenLearner
class NodeClassLearner(GenLearner):
"""
```
Main class used to tune and train Keras models for node classification
Main parameters are:
model (Model): A compiled instance of keras.engine.training.Model
train_data (Iterator): a Iterator instance for training set
val_data (Iterator): A Iterator instance for validation set
```
"""
def __init__(self, model, train_data=None, val_data=None,
batch_size=U.DEFAULT_BS, eval_batch_size=U.DEFAULT_BS,
workers=1, use_multiprocessing=False):
super().__init__(model, train_data=train_data, val_data=val_data,
batch_size=batch_size, eval_batch_size=eval_batch_size,
workers=workers, use_multiprocessing=use_multiprocessing)
return
def view_top_losses(self, n=4, preproc=None, val_data=None):
"""
```
Views observations with top losses in validation set.
Typically over-ridden by Learner subclasses.
Args:
n(int or tuple): a range to select in form of int or tuple
e.g., n=8 is treated as n=(0,8)
preproc (Preprocessor): A TextPreprocessor or ImagePreprocessor.
For some data like text data, a preprocessor
is required to undo the pre-processing
to correctly view raw data.
val_data: optional val_data to use instead of self.val_data
Returns:
list of n tuples where first element is either
filepath or id of validation example and second element
is loss.
```
"""
val = self._check_val(val_data)
# get top losses and associated data
tups = self.top_losses(n=n, val_data=val, preproc=preproc)
# get multilabel status and class names
classes = preproc.get_classes() if preproc is not None else None
# iterate through losses
for tup in tups:
# get data
idx = tup[0]
loss = tup[1]
truth = tup[2]
pred = tup[3]
print('----------')
print("id:%s | loss:%s | true:%s | pred:%s)\n" % (idx, round(loss,2), truth, pred))
#print(obs)
return
def layer_output(self, layer_id, example_id=0, batch_id=0, use_val=False):
"""
```
Prints output of layer with index <layer_id> to help debug models.
Uses first example (example_id=0) from training set, by default.
```
"""
raise Exception('currently_unsupported: layer_output method is not yet supported for ' +
'graph neural networks in ktrain')
class LinkPredLearner(GenLearner):
"""
```
Main class used to tune and train Keras models for link prediction
Main parameters are:
model (Model): A compiled instance of keras.engine.training.Model
train_data (Iterator): a Iterator instance for training set
val_data (Iterator): A Iterator instance for validation set
```
"""
def __init__(self, model, train_data=None, val_data=None,
batch_size=U.DEFAULT_BS, eval_batch_size=U.DEFAULT_BS,
workers=1, use_multiprocessing=False):
super().__init__(model, train_data=train_data, val_data=val_data,
batch_size=batch_size, eval_batch_size=eval_batch_size,
workers=workers, use_multiprocessing=use_multiprocessing)
return
def view_top_losses(self, n=4, preproc=None, val_data=None):
"""
```
Views observations with top losses in validation set.
Typically over-ridden by Learner subclasses.
Args:
n(int or tuple): a range to select in form of int or tuple
e.g., n=8 is treated as n=(0,8)
preproc (Preprocessor): A TextPreprocessor or ImagePreprocessor.
For some data like text data, a preprocessor
is required to undo the pre-processing
to correctly view raw data.
val_data: optional val_data to use instead of self.val_data
Returns:
list of n tuples where first element is either
filepath or id of validation example and second element
is loss.
```
"""
val = self._check_val(val_data)
# get top losses and associated data
tups = self.top_losses(n=n, val_data=val, preproc=preproc)
# get multilabel status and class names
classes = preproc.get_classes() if preproc is not None else None
# iterate through losses
for tup in tups:
# get data
idx = tup[0]
loss = tup[1]
truth = tup[2]
pred = tup[3]
print('----------')
print("id:%s | loss:%s | true:%s | pred:%s)\n" % (idx, round(loss,2), truth, pred))
#print(obs)
return
def layer_output(self, layer_id, example_id=0, batch_id=0, use_val=False):
"""
```
Prints output of layer with index <layer_id> to help debug models.
Uses first example (example_id=0) from training set, by default.
```
"""
raise Exception('currently_unsupported: layer_output method is not yet supported for ' +
'graph neural networks in ktrain')
| 35.19375
| 96
| 0.582845
| 704
| 5,631
| 4.517045
| 0.215909
| 0.044025
| 0.026415
| 0.026415
| 0.958491
| 0.958491
| 0.958491
| 0.958491
| 0.958491
| 0.958491
| 0
| 0.006954
| 0.335997
| 5,631
| 159
| 97
| 35.415094
| 0.843541
| 0.43154
| 0
| 0.897959
| 0
| 0
| 0.107653
| 0.016111
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.061224
| 0
| 0.306122
| 0.081633
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbe414ec11ed223b8f3b005ec5b7199d7a73066f
| 3,737
|
py
|
Python
|
Phase-1/Python Basic 1/Day-3.py
|
CodedLadiesInnovateTech/python-challenges
|
22ce26c68fea6c7c243ada831e47c52e27a62127
|
[
"MIT"
] | 11
|
2020-05-11T08:41:21.000Z
|
2022-02-27T08:21:37.000Z
|
Phase-1/Python Basic 1/Day-3.py
|
CodedLadiesInnovateTech/python-challenges
|
22ce26c68fea6c7c243ada831e47c52e27a62127
|
[
"MIT"
] | 9
|
2020-05-12T10:46:06.000Z
|
2020-05-28T17:37:19.000Z
|
Phase-1/Python Basic 1/Day-3.py
|
CodedLadiesInnovateTech/python-challenges
|
22ce26c68fea6c7c243ada831e47c52e27a62127
|
[
"MIT"
] | 44
|
2020-05-10T20:53:32.000Z
|
2021-04-25T18:47:08.000Z
|
<<<<<<< HEAD
"""
1. Write a Python program to print the documents (syntax, description etc.) of Python built-in function(s).
Sample function : abs()
Expected Result :
abs(number) -> number
Return the absolute value of the argument.
Tools: help function
2. Write a Python program to print the calendar of a given month and year.
Tools: Use 'calendar' module.
3. Write a Python program to print the following here document.
Sample string :
a string that you "don't" have to escape
This
is a ....... multi-line
heredoc string --------> example
Tools: string formating
4. Write a Python program to calculate number of days between two dates.
Sample dates : (2014, 7, 2), (2014, 7, 11)
Expected output : 9 days
Tools: Datetime module, timedelta module
5. Write a Python program to get the volume of a sphere with radius 6.
Tools: input function, math
6. Write a Python program to get the difference between a given number and 17, if the number is greater than 17 return double the absolute difference.
Tools: abs function, input function, math
7. Write a Python program to test whether a number is within 100 of 1000 or 2000.
Tools: maths,input function
8. Write a Python program to calculate the sum of three given numbers, if the values are equal then return three times of their sum.
Tools: math, input function
9. Write a Python program to get a new string from a given string where "Is" has been added to the front. If the given string already begins with "Is" then return the string unchanged.
Tools: input function, string formating
10. Write a Python program to get a string which is n (non-negative integer) copies of a given string.
Tools: input function, slicing
=======
"""
1. Write a Python program to print the documents (syntax, description etc.) of Python built-in function(s).
Sample function : abs()
Expected Result :
abs(number) -> number
Return the absolute value of the argument.
Tools: help function
2. Write a Python program to print the calendar of a given month and year.
Tools: Use 'calendar' module.
3. Write a Python program to print the following here document.
Sample string :
a string that you "don't" have to escape
This
is a ....... multi-line
heredoc string --------> example
Tools: string formating
4. Write a Python program to calculate number of days between two dates.
Sample dates : (2014, 7, 2), (2014, 7, 11)
Expected output : 9 days
Tools: Datetime module, timedelta module
5. Write a Python program to get the volume of a sphere with radius 6.
Tools: input function, math
6. Write a Python program to get the difference between a given number and 17, if the number is greater than 17 return double the absolute difference.
Tools: abs function, input function, math
7. Write a Python program to test whether a number is within 100 of 1000 or 2000.
Tools: maths,input function
8. Write a Python program to calculate the sum of three given numbers, if the values are equal then return three times of their sum.
Tools: math, input function
9. Write a Python program to get a new string from a given string where "Is" has been added to the front. If the given string already begins with "Is" then return the string unchanged.
Tools: input function, string formating
10. Write a Python program to get a string which is n (non-negative integer) copies of a given string.
Tools: input function, slicing
>>>>>>> f4444ec0d72c645d12694e90df7429456db0611c
"""
| 35.932692
| 185
| 0.690393
| 570
| 3,737
| 4.526316
| 0.210526
| 0.046512
| 0.093023
| 0.147287
| 0.982946
| 0.982946
| 0.982946
| 0.982946
| 0.982946
| 0.982946
| 0
| 0.039174
| 0.248595
| 3,737
| 104
| 186
| 35.932692
| 0.87963
| 0
| 0
| 0
| 0
| 0
| 0.009091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.088235
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbfdaede95b2536399d16c62c421baf5bd420ceb
| 6,688
|
py
|
Python
|
chess_commentary_model/transformers_model/dataset_preprocessing.py
|
Rseiji/TCC-2020
|
da68a49da38adf1bcf590b3028894d7834a28157
|
[
"MIT"
] | null | null | null |
chess_commentary_model/transformers_model/dataset_preprocessing.py
|
Rseiji/TCC-2020
|
da68a49da38adf1bcf590b3028894d7834a28157
|
[
"MIT"
] | 2
|
2020-08-30T22:47:54.000Z
|
2021-03-31T19:58:11.000Z
|
chess_commentary_model/transformers_model/dataset_preprocessing.py
|
Rseiji/TCC-2020
|
da68a49da38adf1bcf590b3028894d7834a28157
|
[
"MIT"
] | null | null | null |
"""Métodos de preprocessamento de testes individuais
"""
import pandas as pd
import numpy as np
import math
def test_1(df, seed=0):
"""training: balanced; test: balanced
training: 80k (40k 0, 40k 1)
test: 20k (10k 0, 10k 1)
"""
df_ones = df[df['label'] == 1]
df_zeros = df[df['label'] == 0]
df_ones = df_ones.sample(frac=1, random_state=seed).reset_index(drop=True)
df_zeros = df_zeros.sample(frac=1, random_state=seed).reset_index(drop=True)
df_ones_training = df_ones.loc[:40000]
df_zeros_training = df_zeros.loc[:40000]
df_ones_test = df_ones.loc[40000:50000]
df_zeros_test = df_zeros.loc[40000:50000]
df_training = pd.concat([df_ones_training, df_zeros_training])
df_training = df_training.sample(frac=1).reset_index(drop=True)
df_test = pd.concat([df_ones_test, df_zeros_test])
df_test = df_test.sample(frac=1).reset_index(drop=True)
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test
def test_2(df, seed=0):
"""training: balanced; test: unbalanced
training: 80k (40k 0, 40k 1)
test: 20k (4k 0, 16k 1)
"""
df_ones = df[df['label'] == 1]
df_zeros = df[df['label'] == 0]
df_ones = df_ones.sample(frac=1, random_state=seed).reset_index(drop=True)
df_zeros = df_zeros.sample(frac=1, random_state=seed).reset_index(drop=True)
df_ones_training = df_ones.loc[:40000]
df_zeros_training = df_zeros.loc[:40000]
df_ones_test = df_ones.loc[40000:44000]
df_zeros_test = df_zeros.loc[40000:56000]
df_training = pd.concat([df_ones_training, df_zeros_training])
df_training = df_training.sample(frac=1).reset_index(drop=True)
df_test = pd.concat([df_ones_test, df_zeros_test])
df_test = df_test.sample(frac=1).reset_index(drop=True)
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test
def test_3(df, seed=0):
"""training: unbalanced; test: unbalanced
training: 80k (16k 1, 64k 0)
test: 20k (4k 1, 16k 0)
"""
df_ones = df[df['label'] == 1]
df_zeros = df[df['label'] == 0]
df_ones = df_ones.sample(frac=1, random_state=seed).reset_index(drop=True)
df_zeros = df_zeros.sample(frac=1, random_state=seed).reset_index(drop=True)
df_ones_training = df_ones.loc[:16000]
df_zeros_training = df_zeros.loc[:64000]
df_ones_test = df_ones.loc[16000:20000]
df_zeros_test = df_zeros.loc[64000:80000]
df_training = pd.concat([df_ones_training, df_zeros_training])
df_training = df_training.sample(frac=1).reset_index(drop=True)
df_test = pd.concat([df_ones_test, df_zeros_test])
df_test = df_test.sample(frac=1).reset_index(drop=True)
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test
##################################
## Tests on old dataset
##################################
def test_4(df, seed=0):
""" training: balanced; test: balanced
training: 58k (29k 0, 29k 1)
test: 14.5k (7.25k 0, 7.25k 1)
"""
df_ones = df[df['label'] == 1]
df_zeros = df[df['label'] == 0]
df_ones = df_ones.sample(frac=1, random_state=seed).reset_index(drop=True)
df_zeros = df_zeros.sample(frac=1, random_state=seed).reset_index(drop=True)
df_ones_training = df_ones.loc[:29000]
df_zeros_training = df_zeros.loc[:29000]
df_ones_test = df_ones.loc[29000:36250]
df_zeros_test = df_zeros.loc[29000:36250]
df_training = pd.concat([df_ones_training, df_zeros_training])
df_training = df_training.sample(frac=1).reset_index(drop=True)
df_test = pd.concat([df_ones_test, df_zeros_test])
df_test = df_test.sample(frac=1).reset_index(drop=True)
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test
def test_5(df, seed=0):
"""training: balanced; test: unbalanced
training: 58k (29000 0, 29000 1)
test: 14.5k (12905 0, 1595 1)
"""
df_ones = df[df['label'] == 1]
df_zeros = df[df['label'] == 0]
df_ones = df_ones.sample(frac=1, random_state=seed).reset_index(drop=True)
df_zeros = df_zeros.sample(frac=1, random_state=seed).reset_index(drop=True)
df_ones_training = df_ones.loc[:29000]
df_zeros_training = df_zeros.loc[:29000]
df_ones_test = df_ones.loc[29000:30595]
df_zeros_test = df_zeros.loc[29000:41905]
df_training = pd.concat([df_ones_training, df_zeros_training])
df_training = df_training.sample(frac=1).reset_index(drop=True)
df_test = pd.concat([df_ones_test, df_zeros_test])
df_test = df_test.sample(frac=1).reset_index(drop=True)
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test
def test_6(df, seed=0):
"""training: unbalanced; test: unbalanced
training: 58k (6380 1, 51620 0)
test: 14.5k (1595 1, 12905 0)
"""
df_ones = df[df['label'] == 1]
df_zeros = df[df['label'] == 0]
df_ones = df_ones.sample(frac=1, random_state=seed).reset_index(drop=True)
df_zeros = df_zeros.sample(frac=1, random_state=seed).reset_index(drop=True)
df_ones_training = df_ones.loc[:6380]
df_zeros_training = df_zeros.loc[:51620]
df_ones_test = df_ones.loc[6380:7975]
df_zeros_test = df_zeros.loc[51620:64525]
df_training = pd.concat([df_ones_training, df_zeros_training])
df_training = df_training.sample(frac=1).reset_index(drop=True)
df_test = pd.concat([df_ones_test, df_zeros_test])
df_test = df_test.sample(frac=1).reset_index(drop=True)
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test
| 36.546448
| 80
| 0.689145
| 1,027
| 6,688
| 4.189873
| 0.077897
| 0.075296
| 0.061353
| 0.100395
| 0.918894
| 0.918894
| 0.888682
| 0.864513
| 0.7934
| 0.7934
| 0
| 0.061784
| 0.165072
| 6,688
| 182
| 81
| 36.747253
| 0.708811
| 0.095395
| 0
| 0.774775
| 0
| 0
| 0.035052
| 0
| 0
| 0
| 0
| 0.005495
| 0
| 1
| 0.054054
| false
| 0
| 0.027027
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e01d041f8b5c1564d154529462e58e50b56f4910
| 5,264
|
py
|
Python
|
augment.py
|
docongminh/Text-Image-Augmentation-python
|
da27e8346ce2339f801335923faf7b14e026fd90
|
[
"Apache-2.0"
] | 217
|
2020-02-09T07:44:18.000Z
|
2022-03-24T03:52:51.000Z
|
ocraug/augment.py
|
lzmisscc/Text-Image-Augmentation-python
|
12f104452e939444eb0fd4ac96143b78d091845b
|
[
"Apache-2.0"
] | 5
|
2020-03-23T02:24:33.000Z
|
2022-03-13T07:02:04.000Z
|
ocraug/augment.py
|
lzmisscc/Text-Image-Augmentation-python
|
12f104452e939444eb0fd4ac96143b78d091845b
|
[
"Apache-2.0"
] | 42
|
2020-02-10T06:42:31.000Z
|
2022-03-13T11:54:18.000Z
|
# -*- coding:utf-8 -*-
# Author: RubanSeven
# import cv2
import numpy as np
# from transform import get_perspective_transform, warp_perspective
from warp_mls import WarpMLS
def distort(src, segment):
img_h, img_w = src.shape[:2]
cut = img_w // segment
thresh = cut // 3
# thresh = img_h // segment // 3
# thresh = img_h // 5
src_pts = list()
dst_pts = list()
src_pts.append([0, 0])
src_pts.append([img_w, 0])
src_pts.append([img_w, img_h])
src_pts.append([0, img_h])
dst_pts.append([np.random.randint(thresh), np.random.randint(thresh)])
dst_pts.append([img_w - np.random.randint(thresh), np.random.randint(thresh)])
dst_pts.append([img_w - np.random.randint(thresh), img_h - np.random.randint(thresh)])
dst_pts.append([np.random.randint(thresh), img_h - np.random.randint(thresh)])
half_thresh = thresh * 0.5
for cut_idx in np.arange(1, segment, 1):
src_pts.append([cut * cut_idx, 0])
src_pts.append([cut * cut_idx, img_h])
dst_pts.append([cut * cut_idx + np.random.randint(thresh) - half_thresh,
np.random.randint(thresh) - half_thresh])
dst_pts.append([cut * cut_idx + np.random.randint(thresh) - half_thresh,
img_h + np.random.randint(thresh) - half_thresh])
trans = WarpMLS(src, src_pts, dst_pts, img_w, img_h)
dst = trans.generate()
return dst
def stretch(src, segment):
img_h, img_w = src.shape[:2]
cut = img_w // segment
thresh = cut * 4 // 5
# thresh = img_h // segment // 3
# thresh = img_h // 5
src_pts = list()
dst_pts = list()
src_pts.append([0, 0])
src_pts.append([img_w, 0])
src_pts.append([img_w, img_h])
src_pts.append([0, img_h])
dst_pts.append([0, 0])
dst_pts.append([img_w, 0])
dst_pts.append([img_w, img_h])
dst_pts.append([0, img_h])
half_thresh = thresh * 0.5
for cut_idx in np.arange(1, segment, 1):
move = np.random.randint(thresh) - half_thresh
src_pts.append([cut * cut_idx, 0])
src_pts.append([cut * cut_idx, img_h])
dst_pts.append([cut * cut_idx + move, 0])
dst_pts.append([cut * cut_idx + move, img_h])
trans = WarpMLS(src, src_pts, dst_pts, img_w, img_h)
dst = trans.generate()
return dst
def perspective(src):
img_h, img_w = src.shape[:2]
thresh = img_h // 2
src_pts = list()
dst_pts = list()
src_pts.append([0, 0])
src_pts.append([img_w, 0])
src_pts.append([img_w, img_h])
src_pts.append([0, img_h])
dst_pts.append([0, np.random.randint(thresh)])
dst_pts.append([img_w, np.random.randint(thresh)])
dst_pts.append([img_w, img_h - np.random.randint(thresh)])
dst_pts.append([0, img_h - np.random.randint(thresh)])
trans = WarpMLS(src, src_pts, dst_pts, img_w, img_h)
dst = trans.generate()
return dst
# def distort(src, segment):
# img_h, img_w = src.shape[:2]
# dst = np.zeros_like(src, dtype=np.uint8)
#
# cut = img_w // segment
# thresh = img_h // 8
#
# src_pts = list()
# # dst_pts = list()
#
# src_pts.append([-np.random.randint(thresh), -np.random.randint(thresh)])
# src_pts.append([-np.random.randint(thresh), img_h + np.random.randint(thresh)])
#
# # dst_pts.append([0, 0])
# # dst_pts.append([0, img_h])
# dst_box = np.array([[0, 0], [0, img_h], [cut, 0], [cut, img_h]], dtype=np.float32)
#
# half_thresh = thresh * 0.5
#
# for cut_idx in np.arange(1, segment, 1):
# src_pts.append([cut * cut_idx + np.random.randint(thresh) - half_thresh,
# np.random.randint(thresh) - half_thresh])
# src_pts.append([cut * cut_idx + np.random.randint(thresh) - half_thresh,
# img_h + np.random.randint(thresh) - half_thresh])
#
# # dst_pts.append([cut * i, 0])
# # dst_pts.append([cut * i, img_h])
#
# src_box = np.array(src_pts[-4:-2] + src_pts[-2:-1] + src_pts[-1:], dtype=np.float32)
#
# # mat = cv2.getPerspectiveTransform(src_box, dst_box)
# # print(mat)
# # dst[:, cut * (cut_idx - 1):cut * cut_idx] = cv2.warpPerspective(src, mat, (cut, img_h))
#
# mat = get_perspective_transform(dst_box, src_box)
# dst[:, cut * (cut_idx - 1):cut * cut_idx] = warp_perspective(src, mat, (cut, img_h))
# # print(mat)
#
# src_pts.append([img_w + np.random.randint(thresh) - half_thresh,
# np.random.randint(thresh) - half_thresh])
# src_pts.append([img_w + np.random.randint(thresh) - half_thresh,
# img_h + np.random.randint(thresh) - half_thresh])
# src_box = np.array(src_pts[-4:-2] + src_pts[-2:-1] + src_pts[-1:], dtype=np.float32)
#
# # mat = cv2.getPerspectiveTransform(src_box, dst_box)
# # dst[:, cut * (segment - 1):] = cv2.warpPerspective(src, mat, (img_w - cut * (segment - 1), img_h))
# mat = get_perspective_transform(dst_box, src_box)
# dst[:, cut * (segment - 1):] = warp_perspective(src, mat, (img_w - cut * (segment - 1), img_h))
#
# return dst
| 33.74359
| 107
| 0.586816
| 783
| 5,264
| 3.711367
| 0.077905
| 0.130076
| 0.14969
| 0.209566
| 0.88128
| 0.852719
| 0.835857
| 0.812113
| 0.790089
| 0.76084
| 0
| 0.021147
| 0.254369
| 5,264
| 155
| 108
| 33.96129
| 0.719236
| 0.43959
| 0
| 0.626866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044776
| false
| 0
| 0.029851
| 0
| 0.119403
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ebc4327dea5e082563be3e589c1e4f6b395a97a
| 7,146
|
py
|
Python
|
tests/component/test_grid_mixin.py
|
csdms/pymt
|
188222d7858cd3e8eb15564e56d9b7f0cb43cae5
|
[
"MIT"
] | 38
|
2017-06-30T17:10:53.000Z
|
2022-01-05T07:38:03.000Z
|
tests/component/test_grid_mixin.py
|
csdms/pymt
|
188222d7858cd3e8eb15564e56d9b7f0cb43cae5
|
[
"MIT"
] | 96
|
2017-04-04T18:52:41.000Z
|
2021-11-01T21:30:48.000Z
|
tests/component/test_grid_mixin.py
|
csdms/pymt
|
188222d7858cd3e8eb15564e56d9b7f0cb43cae5
|
[
"MIT"
] | 15
|
2017-05-23T15:40:16.000Z
|
2021-06-14T21:30:28.000Z
|
import numpy as np
import pytest
from pytest import approx
from pymt.component.grid import GridMixIn
class Port:
def __init__(self, name, uses=None, provides=None):
self._name = name
self._uses = uses or []
self._provides = provides or []
def get_component_name(self):
return self._name
def get_input_item_count(self):
return len(self._uses)
def get_input_item_list(self):
return self._uses
def get_output_item_count(self):
return len(self._provides)
def get_output_item_list(self):
return self._provides
def test_exchange_items():
class Component(GridMixIn):
def __init__(self):
self._port = Port("test", uses=["invar"], provides=["outvar"])
super().__init__()
c = Component()
assert c.input_items == ["invar"]
assert c.output_items == ["outvar"]
def test_no_exchange_items():
class Component(GridMixIn):
def __init__(self):
self._port = Port("test")
super().__init__()
c = Component()
assert c.input_items == []
assert c.output_items == []
def test_raster_1d():
class RasterPort(Port):
def get_grid_shape(self, grid_id):
return (3,)
def get_grid_spacing(self, grid_id):
return (2.0,)
def get_grid_origin(self, grid_id):
return (3.0,)
class Component(GridMixIn):
def __init__(self):
self._port = RasterPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_x("invar") == approx(np.array([3.0, 5.0, 7.0]))
def test_raster_2d():
class RasterPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_spacing(self, grid_id):
return (2.0, 1.0)
def get_grid_origin(self, grid_id):
return (0.0, 0.0)
class Component(GridMixIn):
def __init__(self):
self._port = RasterPort("test-2d", uses=["invar"], provides=["outvar"])
super().__init__()
c = Component()
assert c.name == "test-2d"
assert c.get_grid_type(0) == "RASTER"
assert c.get_x(0) == approx(np.array([[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]]))
assert c.get_y(0) == approx(np.array([[0.0, 0.0, 0.0], [2.0, 2.0, 2.0]]))
assert np.all(c.get_connectivity(0) == np.array([0, 1, 4, 3, 1, 2, 5, 4]))
assert np.all(c.get_offset(0) == np.array([4, 8]))
def test_raster_3d():
class RasterPort(Port):
def get_grid_shape(self, grid_id):
return (2, 2, 3)
def get_grid_spacing(self, grid_id):
return (1.0, 2.0, 1.0)
def get_grid_origin(self, grid_id):
return (0.0, 0.0, 0.0)
class Component(GridMixIn):
def __init__(self):
self._port = RasterPort("test-3d", uses=["invar"])
super().__init__()
c = Component()
assert c.get_x(0) == approx(
np.array(
[[[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]], [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0]]]
)
)
assert c.get_y(0) == approx(
np.array(
[[[0.0, 0.0, 0.0], [2.0, 2.0, 2.0]], [[0.0, 0.0, 0.0], [2.0, 2.0, 2.0]]]
)
)
assert c.get_z(0) == approx(
np.array(
[[[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]]
)
)
def test_rectilinear():
class RectilinearPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_x(self, grid_id):
return (0.0, 3.0, 4)
def get_grid_y(self, grid_id):
return (2.0, 7.0)
class Component(GridMixIn):
def __init__(self):
self._port = RectilinearPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "RECTILINEAR"
assert c.get_x(0) == approx(np.array([[0.0, 3.0, 4.0], [0.0, 3.0, 4.0]]))
assert c.get_y(0) == approx(np.array([[2.0, 2.0, 2.0], [7.0, 7.0, 7.0]]))
def test_structured():
class StructuredPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0, 0.0, 1.0, 2.0])
def get_grid_y(self, grid_id):
return np.array([0.0, 1.0, 2.0, 1.0, 2.0, 3.0])
class Component(GridMixIn):
def __init__(self):
self._port = StructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "STRUCTURED"
assert c.get_x(0) == approx(np.array([0.0, 1.0, 2.0, 0.0, 1.0, 2.0]))
assert c.get_y(0) == approx(np.array([0.0, 1.0, 2.0, 1.0, 2.0, 3.0]))
def test_unstructured():
class UnstructuredPort(Port):
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 0.0, 1.0, 2.0])
def get_grid_y(self, grid_id):
return np.array([0.0, 0.0, 1.0, 1.0, 0.0])
def get_grid_connectivity(self, grid_id):
return np.array([0, 1, 3, 2, 4, 3, 1])
def get_grid_offset(self, grid_id):
return np.array([4, 7])
class Component(GridMixIn):
def __init__(self):
self._port = UnstructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "UNSTRUCTURED"
assert c.get_x(0) == approx(np.array([0.0, 1.0, 0.0, 1.0, 2.0]))
assert c.get_y(0) == approx(np.array([0.0, 0.0, 1.0, 1.0, 0.0]))
def test_get_grid_shape_is_none():
class UnstructuredPort(Port):
def get_grid_shape(self, grid_id):
return None
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0])
class Component(GridMixIn):
def __init__(self):
self._port = UnstructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "UNSTRUCTURED"
def test_get_grid_shape_raises():
class UnstructuredPort(Port):
def get_grid_shape(self, grid_id):
raise NotImplementedError("get_grid_shape")
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0])
class Component(GridMixIn):
def __init__(self):
self._port = UnstructuredPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "UNSTRUCTURED"
def test_structured_1d():
class RectilinearPort(Port):
def get_grid_shape(self, grid_id):
return (2, 3)
def get_grid_x(self, grid_id):
return np.array([0.0, 1.0, 2.0])
def get_grid_y(self, grid_id):
raise NotImplementedError("get_grid_y")
def get_grid_z(self, grid_id):
raise NotImplementedError("get_grid_z")
class Component(GridMixIn):
def __init__(self):
self._port = RectilinearPort("test", uses=["invar"])
super().__init__()
c = Component()
assert c.get_grid_type(0) == "RECTILINEAR"
with pytest.raises(IndexError):
c.get_z(0)
| 27.805447
| 84
| 0.558354
| 1,074
| 7,146
| 3.463687
| 0.069832
| 0.04086
| 0.035484
| 0.026882
| 0.816398
| 0.783602
| 0.744892
| 0.710753
| 0.691129
| 0.673656
| 0
| 0.062864
| 0.278757
| 7,146
| 256
| 85
| 27.914063
| 0.658906
| 0
| 0
| 0.510638
| 0
| 0
| 0.034005
| 0
| 0
| 0
| 0
| 0
| 0.138298
| 1
| 0.292553
| false
| 0
| 0.021277
| 0.154255
| 0.579787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
161f5fc0724b14420397243336670a4b9fb7062e
| 20,580
|
py
|
Python
|
aws_lambda/pytorch/source/caffe2/python/operator_test/elementwise_op_broadcast_test.py
|
YevhenVieskov/ML-DL-in-production
|
03839abcb93a49d4f05c43aa4e446a040027cdb0
|
[
"MIT"
] | 4
|
2020-09-17T11:50:17.000Z
|
2021-08-25T06:14:10.000Z
|
aws_lambda/pytorch/source/caffe2/python/operator_test/elementwise_op_broadcast_test.py
|
YevhenVieskov/ML-DL-in-production
|
03839abcb93a49d4f05c43aa4e446a040027cdb0
|
[
"MIT"
] | null | null | null |
aws_lambda/pytorch/source/caffe2/python/operator_test/elementwise_op_broadcast_test.py
|
YevhenVieskov/ML-DL-in-production
|
03839abcb93a49d4f05c43aa4e446a040027cdb0
|
[
"MIT"
] | 6
|
2020-10-16T13:28:31.000Z
|
2021-08-25T12:08:34.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from hypothesis import given
import numpy as np
from caffe2.proto import caffe2_pb2
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
# TODO(jiayq): make them hypothesis tests for better coverage.
class TestElementwiseBroadcast(serial.SerializedTestCase):
@given(**hu.gcs)
def test_broadcast_Add(self, gc, dc):
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(4, 5).astype(np.float32)
op = core.CreateOperator("Add", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X + Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
# broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(3, 4).astype(np.float32)
op = core.CreateOperator("Add", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X + Y[:, :, np.newaxis])
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
# broadcasting the first dimension
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(2).astype(np.float32)
op = core.CreateOperator("Add", ["X", "Y"], "out", broadcast=1, axis=0)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(
out, X + Y[:, np.newaxis, np.newaxis, np.newaxis])
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
# broadcasting with single elem dimensions at both ends
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(1, 4, 1).astype(np.float32)
op = core.CreateOperator("Add", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X + Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
@given(**hu.gcs)
def test_broadcast_Mul(self, gc, dc):
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(4, 5).astype(np.float32)
op = core.CreateOperator("Mul", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X * Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
# broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(3, 4).astype(np.float32)
op = core.CreateOperator("Mul", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X * Y[:, :, np.newaxis])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting the first dimension
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(2).astype(np.float32)
op = core.CreateOperator("Mul", ["X", "Y"], "out", broadcast=1, axis=0)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(
out, X * Y[:, np.newaxis, np.newaxis, np.newaxis])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting with single elem dimensions at both ends
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(1, 4, 1).astype(np.float32)
op = core.CreateOperator("Mul", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X * Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
@given(**hu.gcs)
def test_broadcast_Sub(self, gc, dc):
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(4, 5).astype(np.float32)
op = core.CreateOperator("Sub", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X - Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
# broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(3, 4).astype(np.float32)
op = core.CreateOperator("Sub", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X - Y[:, :, np.newaxis])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting the first dimension
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(2).astype(np.float32)
op = core.CreateOperator("Sub", ["X", "Y"], "out", broadcast=1, axis=0)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(
out, X - Y[:, np.newaxis, np.newaxis, np.newaxis])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting with single elem dimensions at both ends
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(1, 4, 1).astype(np.float32)
op = core.CreateOperator("Sub", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X - Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
self.assertGradientChecks(gc, op, [X, Y], 1, [0])
@serial.given(**hu.gcs)
def test_broadcast_powt(self, gc, dc):
np.random.seed(101)
#operator
def powt_op(X, Y):
return [np.power(X, Y)]
#two gradients Y*X^(Y-1) and X^Y * ln(X)
def powt_grad(g_out, outputs, fwd_inputs):
[X, Y] = fwd_inputs
Z = outputs[0]
return ([Y * np.power(X, Y - 1), Z * np.log(X)] * g_out)
#1. Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0
Y = np.random.rand(4, 5).astype(np.float32) + 2.0
#two gradients Y*X^(Y-1) and X^Y * ln(X)
#latter gradient is sumed over 1 and 0 dims to account for broadcast
def powt_grad_broadcast(g_out, outputs, fwd_inputs):
[GX, GY] = powt_grad(g_out, outputs, fwd_inputs)
return ([GX, np.sum(np.sum(GY, 1), 0)])
op = core.CreateOperator("Pow", ["X", "Y"], "Z", broadcast=1)
self.assertReferenceChecks(device_option=gc,
op=op,
inputs=[X, Y],
reference=powt_op,
output_to_grad="Z",
grad_reference=powt_grad_broadcast)
#2. broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0
Y = np.random.rand(3, 4).astype(np.float32) + 2.0
#pow op with the latter array increased by one dim
def powt_op_axis1(X, Y):
return powt_op(X, Y[:, :, np.newaxis])
#two gradients Y*X^(Y-1) and X^Y * ln(X)
#latter gradient is sumed over 3 and 0 dims to account for broadcast
def powt_grad_axis1(g_out, outputs, fwd_inputs):
[X, Y] = fwd_inputs
[GX, GY] = powt_grad(g_out, outputs, [X, Y[:, :, np.newaxis]])
return ([GX, np.sum(np.sum(GY, 3), 0)])
op = core.CreateOperator("Pow", ["X", "Y"], "Z", broadcast=1, axis=1)
self.assertReferenceChecks(device_option=gc,
op=op,
inputs=[X, Y],
reference=powt_op_axis1,
output_to_grad="Z",
grad_reference=powt_grad_axis1)
#3. broadcasting the first dimension
X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0
Y = np.random.rand(2).astype(np.float32) + 2.0
#pow op with the latter array increased by one dim
def powt_op_axis0(X, Y):
return powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis])
#two gradients Y*X^(Y-1) and X^Y * ln(X)
#latter gradient is sumed over 3, 2 and 1 dims to account for broadcast
def powt_grad_axis0(g_out, outputs, fwd_inputs):
[X, Y] = fwd_inputs
[GX, GY] = powt_grad(g_out,
outputs,
[X, Y[:, np.newaxis, np.newaxis, np.newaxis]])
return ([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)])
op = core.CreateOperator("Pow", ["X", "Y"], "Z", broadcast=1, axis=0)
self.assertReferenceChecks(device_option=gc,
op=op,
inputs=[X, Y],
reference=powt_op_axis0,
output_to_grad="Z",
grad_reference=powt_grad_axis0)
#4. broadcasting with single elem dimensions at both ends
X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0
Y = np.random.rand(1, 4, 1).astype(np.float32) + 2.0
#pow op with the latter array increased by one dim
def powt_op_mixed(X, Y):
return powt_op(X, Y[np.newaxis, :, :, :])
#two gradients Y*X^(Y-1) and X^Y * ln(X)
#latter gradient is sumed over 0 and 1 dims to account for broadcast
def powt_grad_mixed(g_out, outputs, fwd_inputs):
[X, Y] = fwd_inputs
[GX, GY] = powt_grad(g_out, outputs, [X, Y[np.newaxis, :, :, :]])
return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0),
(1, 4, 1))])
op = core.CreateOperator("Pow", ["X", "Y"], "Z", broadcast=1, axis=1)
self.assertReferenceChecks(device_option=gc,
op=op,
inputs=[X, Y],
reference=powt_op_mixed,
output_to_grad="Z",
grad_reference=powt_grad_mixed)
@given(**hu.gcs)
def test_broadcast_scalar(self, gc, dc):
# broadcasting constant
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(1).astype(np.float32)
op = core.CreateOperator("Add", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(
out, X + Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting scalar
X = np.random.rand(1).astype(np.float32)
Y = np.random.rand(1).astype(np.float32).reshape([])
op = core.CreateOperator("Add", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(
out, X + Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
@given(**hu.gcs)
def test_semantic_broadcast(self, gc, dc):
# NCHW as default
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(3).astype(np.float32)
op = core.CreateOperator(
"Add", ["X", "Y"], "out", broadcast=1, axis_str="C")
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(
out, X + Y[:, np.newaxis, np.newaxis])
self.assertDeviceChecks(dc, op, [X, Y], [0])
# NHWC
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(5).astype(np.float32)
op = core.CreateOperator(
"Add", ["X", "Y"], "out", broadcast=1, axis_str="C", order="NHWC")
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
np.testing.assert_array_almost_equal(out, X + Y)
self.assertDeviceChecks(dc, op, [X, Y], [0])
@given(**hu.gcs)
def test_sum_reduce_empty_blob(self, gc, dc):
net = core.Net('test')
with core.DeviceScope(gc):
net.GivenTensorFill([], ["X"], values=[], shape=[2, 0, 5])
net.GivenTensorFill([], ["Y"], values=[], shape=[2, 0])
net.SumReduceLike(["X", "Y"], "out", axis=0)
workspace.RunNetOnce(net)
@given(**hu.gcs)
def test_sum_reduce(self, gc, dc):
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(4, 5).astype(np.float32)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
res = np.sum(X, axis=0)
res = np.sum(res, axis=0)
np.testing.assert_array_almost_equal(out, res)
self.assertDeviceChecks(dc, op, [X, Y], [0])
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(2, 3).astype(np.float32)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1, axis=0)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
res = np.sum(X, axis=3)
res = np.sum(res, axis=2)
np.testing.assert_array_almost_equal(out, res, decimal=3)
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(3, 4).astype(np.float32)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1, axis=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
res = np.sum(X, axis=0)
res = np.sum(res, axis=2)
np.testing.assert_array_almost_equal(out, res)
self.assertDeviceChecks(dc, op, [X, Y], [0])
# broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 500).astype(np.float64)
Y = np.random.rand(1).astype(np.float64)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
res = np.array(np.sum(X))
np.testing.assert_array_almost_equal(out, res, decimal=0)
# broadcasting with single elem dimensions at both ends
X = np.random.rand(2, 3, 4, 5).astype(np.float32)
Y = np.random.rand(1, 3, 4, 1).astype(np.float32)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1)
workspace.FeedBlob("X", X)
workspace.FeedBlob("Y", Y)
workspace.RunOperatorOnce(op)
out = workspace.FetchBlob("out")
res = np.sum(X, axis=0)
res = np.sum(res, axis=2).reshape(Y.shape)
np.testing.assert_array_almost_equal(out, res)
self.assertDeviceChecks(dc, op, [X, Y], [0])
# fp64 is not supported with the CUDA op
dc_cpu_only = [d for d in dc if d.device_type != caffe2_pb2.CUDA]
self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0])
@unittest.skipIf(not workspace.has_gpu_support, "No gpu support")
@given(**hu.gcs_gpu_only)
def test_sum_reduce_fp16(self, gc, dc):
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float16)
Y = np.random.rand(4, 5).astype(np.float16)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1, device_option=gc)
def ref_op(X, Y):
res = np.sum(X, axis=0)
res = np.sum(res, axis=0)
return [res]
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[X, Y],
reference=ref_op,
threshold=1e-3)
# Set broadcast and no axis, i.e. broadcasting last dimensions.
X = np.random.rand(2, 3, 4, 5).astype(np.float16)
Y = np.random.rand(2, 3).astype(np.float16)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1, axis=0)
def ref_op(X, Y):
res = np.sum(X, axis=3)
res = np.sum(res, axis=2)
return [res]
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[X, Y],
reference=ref_op,
threshold=1e-3)
# broadcasting intermediate dimensions
X = np.random.rand(2, 3, 4, 5).astype(np.float16)
Y = np.random.rand(3, 4).astype(np.float16)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1, axis=1)
def ref_op(X, Y):
res = np.sum(X, axis=0)
res = np.sum(res, axis=2)
return [res]
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[X, Y],
reference=ref_op,
threshold=1e-3)
# broadcasting with single elem dimensions at both ends
X = np.random.rand(2, 3, 4, 5).astype(np.float16)
Y = np.random.rand(1, 3, 4, 1).astype(np.float16)
op = core.CreateOperator(
"SumReduceLike", ["X", "Y"], "out", broadcast=1)
def ref_op(X, Y):
res = np.sum(X, axis=0)
res = np.sum(res, axis=2)
return [res.reshape(Y.shape)]
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[X, Y],
reference=ref_op,
threshold=1e-3)
if __name__ == "__main__":
unittest.main()
| 42
| 79
| 0.56035
| 2,745
| 20,580
| 4.122404
| 0.064117
| 0.020679
| 0.061506
| 0.03906
| 0.893072
| 0.891923
| 0.884942
| 0.875309
| 0.854542
| 0.84058
| 0
| 0.032921
| 0.288581
| 20,580
| 489
| 80
| 42.08589
| 0.739977
| 0.097279
| 0
| 0.720207
| 0
| 0
| 0.024929
| 0
| 0
| 0
| 0
| 0.002045
| 0.160622
| 1
| 0.056995
| false
| 0
| 0.028497
| 0.010363
| 0.121762
| 0.002591
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
161fe3f007696be8bbc024b9cad0f629ab8008f8
| 28,143
|
py
|
Python
|
kayobe/tests/unit/cli/test_commands.py
|
jovial/kayobe
|
49e61fef4a221ee9fcfcee2b7bac02b6acc5bd0c
|
[
"Apache-2.0"
] | null | null | null |
kayobe/tests/unit/cli/test_commands.py
|
jovial/kayobe
|
49e61fef4a221ee9fcfcee2b7bac02b6acc5bd0c
|
[
"Apache-2.0"
] | null | null | null |
kayobe/tests/unit/cli/test_commands.py
|
jovial/kayobe
|
49e61fef4a221ee9fcfcee2b7bac02b6acc5bd0c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2017 StackHPC Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import cliff.app
import cliff.commandmanager
import mock
from kayobe.cli import commands
from kayobe import utils
class TestApp(cliff.app.App):
def __init__(self):
super(TestApp, self).__init__(
description='Test app',
version='0.1',
command_manager=cliff.commandmanager.CommandManager('kayobe.cli'))
class TestCase(unittest.TestCase):
@mock.patch.object(utils, "galaxy_install", spec=True)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_control_host_bootstrap(self, mock_run, mock_install):
command = commands.ControlHostBootstrap(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
mock_install.assert_called_once_with("requirements.yml",
"ansible/roles")
expected_calls = [
mock.call(mock.ANY, ["ansible/bootstrap.yml"]),
mock.call(mock.ANY, ["ansible/kolla-ansible.yml"],
tags="install"),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(utils, "galaxy_install", spec=True)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_control_host_upgrade(self, mock_run, mock_install):
command = commands.ControlHostUpgrade(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
mock_install.assert_called_once_with("requirements.yml",
"ansible/roles", force=True)
expected_calls = [
mock.call(mock.ANY, ["ansible/bootstrap.yml"]),
mock.call(mock.ANY, ["ansible/kolla-ansible.yml"],
tags="install"),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_network_connectivity_check(self, mock_run):
command = commands.NetworkConnectivityCheck(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(mock.ANY, ["ansible/network-connectivity.yml"]),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_seed_hypervisor_host_configure(self, mock_run, mock_dump):
command = commands.SeedHypervisorHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = "stack"
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(mock.ANY, host="seed-hypervisor",
var_name="kayobe_ansible_user", tags="dump-config")
]
self.assertEqual(expected_calls, mock_dump.call_args_list)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/ip-allocation.yml",
"ansible/ssh-known-host.yml",
"ansible/kayobe-ansible-user.yml",
"ansible/kayobe-target-venv.yml",
"ansible/users.yml",
"ansible/yum.yml",
"ansible/dev-tools.yml",
"ansible/network.yml",
"ansible/sysctl.yml",
"ansible/ntp.yml",
"ansible/seed-hypervisor-libvirt-host.yml",
],
limit="seed-hypervisor",
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_seed_hypervisor_host_upgrade(self, mock_run):
command = commands.SeedHypervisorHostUpgrade(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/kayobe-target-venv.yml",
"ansible/kolla-target-venv.yml",
],
limit="seed-hypervisor",
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_seed")
def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump):
command = commands.SeedHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"seed": {"kayobe_ansible_user": "stack"}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(mock.ANY, hosts="seed", tags="dump-config")
]
self.assertEqual(expected_calls, mock_dump.call_args_list)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/ip-allocation.yml",
"ansible/ssh-known-host.yml",
"ansible/kayobe-ansible-user.yml",
"ansible/kayobe-target-venv.yml",
"ansible/users.yml",
"ansible/yum.yml",
"ansible/dev-tools.yml",
"ansible/disable-selinux.yml",
"ansible/network.yml",
"ansible/sysctl.yml",
"ansible/ip-routing.yml",
"ansible/snat.yml",
"ansible/disable-glean.yml",
"ansible/ntp.yml",
"ansible/lvm.yml",
],
limit="seed",
),
mock.call(
mock.ANY,
["ansible/kolla-ansible.yml"],
tags="config",
),
mock.call(
mock.ANY,
[
"ansible/kolla-target-venv.yml",
"ansible/kolla-host.yml",
"ansible/docker.yml",
],
limit="seed",
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={"ansible_user": "stack"},
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_seed")
def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run,
mock_dump):
command = commands.SeedHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"seed": {
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"kayobe_ansible_user": "stack",
}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"ansible_user": "stack",
},
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_seed")
def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run,
mock_dump):
command = commands.SeedHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"seed": {
"kayobe_ansible_user": "stack",
"kolla_ansible_target_venv": "/kolla/venv/bin/python",
}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={
"ansible_python_interpreter": "/usr/bin/python",
"ansible_user": "stack",
"virtualenv": "/kolla/venv/bin/python",
},
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_seed")
def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run,
mock_dump):
command = commands.SeedHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"seed": {
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"kayobe_ansible_user": "stack",
"kolla_ansible_target_venv": "/kolla/venv/bin/python",
}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"ansible_user": "stack",
"virtualenv": "/kolla/venv/bin/python",
},
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_seed_host_upgrade(self, mock_run):
command = commands.SeedHostUpgrade(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/kayobe-target-venv.yml",
"ansible/kolla-target-venv.yml",
],
limit="seed",
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_seed_container_image_build(self, mock_run):
command = commands.SeedContainerImageBuild(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/container-image-builders-check.yml",
"ansible/kolla-build.yml",
"ansible/container-image-build.yml"
],
extra_vars={
"container_image_sets": (
"{{ seed_container_image_sets }}"),
"push_images": False,
}
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_seed_container_image_build_with_regex(self, mock_run):
command = commands.SeedContainerImageBuild(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args(["--push", "^regex1$", "^regex2$"])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/container-image-builders-check.yml",
"ansible/kolla-build.yml",
"ansible/container-image-build.yml"
],
extra_vars={
"container_image_regexes": "'^regex1$ ^regex2$'",
"push_images": True,
}
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_seed")
def test_service_deploy(self, mock_kolla_run, mock_run):
command = commands.SeedServiceDeploy(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
["ansible/kolla-ansible.yml"],
tags="config",
),
mock.call(
mock.ANY,
["ansible/kolla-bifrost.yml"],
),
mock.call(
mock.ANY,
[
"ansible/overcloud-host-image-workaround-resolv.yml",
"ansible/seed-introspection-rules.yml",
"ansible/dell-switch-bmp.yml",
],
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
expected_calls = [
mock.call(
mock.ANY,
"deploy-bifrost",
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_overcloud")
def test_overcloud_host_configure(self, mock_kolla_run, mock_run,
mock_dump):
command = commands.OvercloudHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"controller0": {"kayobe_ansible_user": "stack"}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(mock.ANY, hosts="overcloud", tags="dump-config")
]
self.assertEqual(expected_calls, mock_dump.call_args_list)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/ip-allocation.yml",
"ansible/ssh-known-host.yml",
"ansible/kayobe-ansible-user.yml",
"ansible/kayobe-target-venv.yml",
"ansible/users.yml",
"ansible/yum.yml",
"ansible/dev-tools.yml",
"ansible/disable-selinux.yml",
"ansible/network.yml",
"ansible/sysctl.yml",
"ansible/disable-glean.yml",
"ansible/disable-cloud-init.yml",
"ansible/ntp.yml",
"ansible/lvm.yml",
],
limit="overcloud",
),
mock.call(
mock.ANY,
["ansible/kolla-ansible.yml"],
tags="config",
),
mock.call(
mock.ANY,
[
"ansible/kolla-target-venv.yml",
"ansible/kolla-host.yml",
"ansible/docker.yml",
"ansible/ceph-block-devices.yml",
],
limit="overcloud",
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={"ansible_user": "stack"},
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_overcloud")
def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run,
mock_run, mock_dump):
command = commands.OvercloudHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"controller0": {
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"kayobe_ansible_user": "stack",
}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"ansible_user": "stack",
}
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_overcloud")
def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run,
mock_run, mock_dump):
command = commands.OvercloudHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"controller0": {
"kayobe_ansible_user": "stack",
"kolla_ansible_target_venv": "/kolla/venv/bin/python",
}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={
"ansible_python_interpreter": "/usr/bin/python",
"ansible_user": "stack",
"virtualenv": "/kolla/venv/bin/python",
}
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_config_dump")
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
@mock.patch.object(commands.KollaAnsibleMixin,
"run_kolla_ansible_overcloud")
def test_overcloud_host_configure_both_venvs(self, mock_kolla_run,
mock_run, mock_dump):
command = commands.OvercloudHostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
mock_dump.return_value = {
"controller0": {
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"kayobe_ansible_user": "stack",
"kolla_ansible_target_venv": "/kolla/venv/bin/python",
}
}
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
"bootstrap-servers",
extra_vars={
"ansible_python_interpreter": "/kayobe/venv/bin/python",
"ansible_user": "stack",
"virtualenv": "/kolla/venv/bin/python",
}
),
]
self.assertEqual(expected_calls, mock_kolla_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_overcloud_host_upgrade(self, mock_run):
command = commands.OvercloudHostUpgrade(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/kayobe-target-venv.yml",
"ansible/kolla-target-venv.yml",
"ansible/overcloud-docker-sdk-upgrade.yml",
"ansible/overcloud-etc-hosts-fixup.yml",
],
limit="overcloud",
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_overcloud_container_image_build(self, mock_run):
command = commands.OvercloudContainerImageBuild(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/container-image-builders-check.yml",
"ansible/kolla-build.yml",
"ansible/container-image-build.yml"
],
extra_vars={
"container_image_sets": (
"{{ overcloud_container_image_sets }}"),
"push_images": False,
}
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_overcloud_container_image_build_with_regex(self, mock_run):
command = commands.OvercloudContainerImageBuild(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args(["--push", "^regex1$", "^regex2$"])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/container-image-builders-check.yml",
"ansible/kolla-build.yml",
"ansible/container-image-build.yml"
],
extra_vars={
"container_image_regexes": "'^regex1$ ^regex2$'",
"push_images": True,
}
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_overcloud_post_configure(self, mock_run):
command = commands.OvercloudPostConfigure(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
'ansible/overcloud-ipa-images.yml',
'ansible/overcloud-introspection-rules.yml',
'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa
'ansible/provision-net.yml',
'ansible/overcloud-grafana-configure.yml'
],
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_baremetal_compute_inspect(self, mock_run):
command = commands.BaremetalComputeInspect(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/baremetal-compute-inspect.yml",
],
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_baremetal_compute_manage(self, mock_run):
command = commands.BaremetalComputeManage(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/baremetal-compute-manage.yml",
],
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
@mock.patch.object(commands.KayobeAnsibleMixin,
"run_kayobe_playbooks")
def test_baremetal_compute_provide(self, mock_run):
command = commands.BaremetalComputeProvide(TestApp(), [])
parser = command.get_parser("test")
parsed_args = parser.parse_args([])
result = command.run(parsed_args)
self.assertEqual(0, result)
expected_calls = [
mock.call(
mock.ANY,
[
"ansible/baremetal-compute-provide.yml",
],
),
]
self.assertEqual(expected_calls, mock_run.call_args_list)
| 37.324934
| 93
| 0.538855
| 2,569
| 28,143
| 5.658233
| 0.088361
| 0.042653
| 0.070171
| 0.066456
| 0.893919
| 0.878509
| 0.868396
| 0.855944
| 0.848996
| 0.848996
| 0
| 0.002534
| 0.355079
| 28,143
| 753
| 94
| 37.374502
| 0.798347
| 0.019898
| 0
| 0.774481
| 0
| 0
| 0.190257
| 0.118108
| 0
| 0
| 0
| 0
| 0.083086
| 1
| 0.037092
| false
| 0
| 0.008902
| 0
| 0.048961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
163549f9139dc6999e9e0ca088584cc51b142caa
| 12,432
|
py
|
Python
|
tests/test_selections.py
|
swimmio/sqlalchemy_swimm
|
d24accb7792743cf586bd7062531d108e7063eba
|
[
"MIT"
] | null | null | null |
tests/test_selections.py
|
swimmio/sqlalchemy_swimm
|
d24accb7792743cf586bd7062531d108e7063eba
|
[
"MIT"
] | null | null | null |
tests/test_selections.py
|
swimmio/sqlalchemy_swimm
|
d24accb7792743cf586bd7062531d108e7063eba
|
[
"MIT"
] | null | null | null |
import typing
import pytest
from src import selections
@pytest.mark.parametrize(
'min_time, min_bytes, expected_result',
[
(
10 * 60 * 1000,
500 * 1024 * 1024,
[
(2820,),
(2827,),
(2832,),
(2834,),
(2842,),
(2844,),
(2851,),
(2852,),
(2859,),
(2862,),
(2872,),
(2878,),
(2881,),
(2890,),
(2897,),
(2899,),
(2902,),
(2903,),
(2907,),
(2910,),
(2918,),
(2920,),
(3166,),
(3167,),
(3224,),
(3226,),
(3228,),
(3229,),
(3230,),
(3231,),
(3233,),
(3234,),
(3235,),
(3236,),
(3239,),
(3242,),
(3243,),
(3244,),
(3245,),
(3246,),
(3247,),
(3249,),
(3251,),
(3338,),
],
),
(
5 * 60 * 1000,
50 * 1024 * 1024,
[
(1666,),
(2819,),
(2820,),
(2821,),
(2822,),
(2823,),
(2824,),
(2825,),
(2826,),
(2827,),
(2828,),
(2829,),
(2830,),
(2831,),
(2832,),
(2833,),
(2834,),
(2835,),
(2836,),
(2837,),
(2838,),
(2839,),
(2840,),
(2841,),
(2842,),
(2843,),
(2844,),
(2845,),
(2846,),
(2847,),
(2848,),
(2849,),
(2850,),
(2851,),
(2852,),
(2853,),
(2854,),
(2855,),
(2856,),
(2857,),
(2858,),
(2859,),
(2860,),
(2861,),
(2862,),
(2863,),
(2864,),
(2865,),
(2866,),
(2867,),
(2868,),
(2869,),
(2870,),
(2871,),
(2872,),
(2873,),
(2874,),
(2875,),
(2876,),
(2877,),
(2878,),
(2879,),
(2880,),
(2881,),
(2882,),
(2883,),
(2884,),
(2885,),
(2886,),
(2887,),
(2888,),
(2889,),
(2890,),
(2891,),
(2892,),
(2893,),
(2894,),
(2895,),
(2896,),
(2897,),
(2898,),
(2899,),
(2900,),
(2901,),
(2902,),
(2903,),
(2904,),
(2905,),
(2906,),
(2907,),
(2908,),
(2909,),
(2910,),
(2911,),
(2912,),
(2913,),
(2914,),
(2915,),
(2916,),
(2917,),
(2918,),
(2919,),
(2920,),
(2921,),
(2922,),
(2923,),
(2924,),
(2925,),
(3165,),
(3166,),
(3167,),
(3168,),
(3169,),
(3170,),
(3171,),
(3172,),
(3173,),
(3174,),
(3175,),
(3176,),
(3177,),
(3178,),
(3179,),
(3180,),
(3181,),
(3182,),
(3183,),
(3184,),
(3185,),
(3186,),
(3187,),
(3188,),
(3189,),
(3190,),
(3191,),
(3192,),
(3193,),
(3194,),
(3195,),
(3196,),
(3197,),
(3198,),
(3199,),
(3200,),
(3201,),
(3202,),
(3203,),
(3204,),
(3205,),
(3206,),
(3207,),
(3208,),
(3209,),
(3210,),
(3211,),
(3212,),
(3213,),
(3214,),
(3215,),
(3216,),
(3217,),
(3218,),
(3219,),
(3220,),
(3221,),
(3222,),
(3223,),
(3224,),
(3226,),
(3227,),
(3228,),
(3229,),
(3230,),
(3231,),
(3232,),
(3233,),
(3234,),
(3235,),
(3236,),
(3237,),
(3238,),
(3239,),
(3240,),
(3241,),
(3242,),
(3243,),
(3244,),
(3245,),
(3246,),
(3247,),
(3248,),
(3249,),
(3250,),
(3251,),
(3252,),
(3337,),
(3338,),
(3340,),
(3341,),
(3342,),
(3343,),
(3344,),
(3345,),
(3346,),
(3347,),
(3348,),
(3360,),
(3361,),
(3362,),
(3363,),
(3364,),
(3428,),
(3429,),
],
),
(
2 * 60 * 1000,
100 * 1024 * 1024,
[
(2819,),
(2820,),
(2821,),
(2822,),
(2823,),
(2824,),
(2825,),
(2826,),
(2827,),
(2828,),
(2829,),
(2830,),
(2831,),
(2832,),
(2833,),
(2834,),
(2835,),
(2836,),
(2837,),
(2838,),
(2839,),
(2840,),
(2841,),
(2842,),
(2843,),
(2844,),
(2845,),
(2846,),
(2847,),
(2848,),
(2849,),
(2850,),
(2851,),
(2852,),
(2853,),
(2854,),
(2855,),
(2856,),
(2857,),
(2858,),
(2859,),
(2860,),
(2861,),
(2862,),
(2863,),
(2864,),
(2865,),
(2866,),
(2867,),
(2868,),
(2869,),
(2870,),
(2871,),
(2872,),
(2873,),
(2874,),
(2875,),
(2876,),
(2877,),
(2878,),
(2879,),
(2880,),
(2881,),
(2882,),
(2883,),
(2884,),
(2885,),
(2886,),
(2887,),
(2888,),
(2889,),
(2890,),
(2891,),
(2892,),
(2893,),
(2894,),
(2895,),
(2896,),
(2897,),
(2898,),
(2899,),
(2900,),
(2901,),
(2902,),
(2903,),
(2904,),
(2905,),
(2906,),
(2907,),
(2908,),
(2909,),
(2910,),
(2911,),
(2912,),
(2913,),
(2914,),
(2915,),
(2916,),
(2917,),
(2918,),
(2919,),
(2920,),
(2921,),
(2922,),
(2923,),
(2924,),
(2925,),
(3165,),
(3166,),
(3167,),
(3168,),
(3169,),
(3170,),
(3171,),
(3172,),
(3173,),
(3174,),
(3175,),
(3176,),
(3177,),
(3178,),
(3179,),
(3180,),
(3181,),
(3182,),
(3183,),
(3184,),
(3185,),
(3186,),
(3187,),
(3188,),
(3189,),
(3190,),
(3191,),
(3192,),
(3193,),
(3194,),
(3195,),
(3196,),
(3197,),
(3198,),
(3199,),
(3200,),
(3201,),
(3202,),
(3203,),
(3204,),
(3205,),
(3206,),
(3207,),
(3208,),
(3209,),
(3210,),
(3211,),
(3212,),
(3213,),
(3214,),
(3215,),
(3216,),
(3217,),
(3218,),
(3219,),
(3220,),
(3221,),
(3222,),
(3223,),
(3224,),
(3226,),
(3227,),
(3228,),
(3229,),
(3230,),
(3231,),
(3232,),
(3233,),
(3234,),
(3235,),
(3236,),
(3237,),
(3238,),
(3239,),
(3240,),
(3241,),
(3242,),
(3243,),
(3244,),
(3245,),
(3246,),
(3247,),
(3248,),
(3249,),
(3250,),
(3251,),
(3252,),
(3337,),
(3338,),
(3341,),
(3342,),
(3343,),
(3344,),
(3345,),
(3346,),
(3347,),
(3348,),
(3360,),
(3361,),
(3362,),
(3363,),
(3364,),
(3428,),
(3429,),
],
),
],
)
def test_selections(
min_time: int,
min_bytes: int,
expected_result: typing.List[typing.Tuple[int]],
) -> None:
code_returned_rows = [tuple(row) for row in selections.selections(min_time, min_bytes)]
assert code_returned_rows == expected_result
| 24.617822
| 91
| 0.178571
| 540
| 12,432
| 4.085185
| 0.459259
| 0.009519
| 0.016319
| 0.021759
| 0.776065
| 0.776065
| 0.765186
| 0.765186
| 0.765186
| 0.765186
| 0
| 0.461981
| 0.664656
| 12,432
| 504
| 92
| 24.666667
| 0.067162
| 0
| 0
| 0.944112
| 0
| 0
| 0.002896
| 0
| 0
| 0
| 0
| 0
| 0.001996
| 1
| 0.001996
| false
| 0
| 0.005988
| 0
| 0.007984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
167cfaccf65c4a217ee921178f5ab5094fc6d8a6
| 241
|
py
|
Python
|
iris_sdk/models/data/ord/rate_center_search_order.py
|
NumberAI/python-bandwidth-iris
|
0e05f79d68b244812afb97e00fd65b3f46d00aa3
|
[
"MIT"
] | 2
|
2020-04-13T13:47:59.000Z
|
2022-02-23T20:32:41.000Z
|
iris_sdk/models/data/ord/rate_center_search_order.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2020-09-18T20:59:24.000Z
|
2021-08-25T16:51:42.000Z
|
iris_sdk/models/data/ord/rate_center_search_order.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2018-12-12T14:39:50.000Z
|
2020-11-17T21:42:29.000Z
|
#!/usr/bin/env python
from iris_sdk.models.base_resource import BaseData
from iris_sdk.models.maps.ord.rate_center_search_order import \
RateCenterSearchOrderMap
class RateCenterSearchOrder(RateCenterSearchOrderMap, BaseData):
pass
| 30.125
| 64
| 0.834025
| 29
| 241
| 6.724138
| 0.758621
| 0.082051
| 0.112821
| 0.174359
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099585
| 241
| 8
| 65
| 30.125
| 0.898618
| 0.082988
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
16e4af35a62847ccd702cb32c6b8a27f27bee59d
| 129
|
py
|
Python
|
app/admin/views/__init__.py
|
CAUCHY2932/Northern_Hemisphere
|
06e5b3e3f0b47940d5b4549899d062373b019579
|
[
"BSD-3-Clause"
] | null | null | null |
app/admin/views/__init__.py
|
CAUCHY2932/Northern_Hemisphere
|
06e5b3e3f0b47940d5b4549899d062373b019579
|
[
"BSD-3-Clause"
] | 8
|
2021-03-19T03:28:32.000Z
|
2022-03-11T23:59:00.000Z
|
app/admin/views/__init__.py
|
CAUCHY2932/Northern_Hemisphere
|
06e5b3e3f0b47940d5b4549899d062373b019579
|
[
"BSD-3-Clause"
] | null | null | null |
# coding:utf-8
import app.admin.views.start
import app.admin.views.book
import app.admin.views.user
import app.admin.views.site
| 18.428571
| 28
| 0.79845
| 23
| 129
| 4.478261
| 0.478261
| 0.349515
| 0.543689
| 0.737864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.085271
| 129
| 6
| 29
| 21.5
| 0.864407
| 0.093023
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bc783a7352b8476e222dafa470f894420847e079
| 22,670
|
py
|
Python
|
sdk/python/pulumi_gcp/securitycenter/notification_config.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/securitycenter/notification_config.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/securitycenter/notification_config.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['NotificationConfigArgs', 'NotificationConfig']
@pulumi.input_type
class NotificationConfigArgs:
def __init__(__self__, *,
config_id: pulumi.Input[str],
organization: pulumi.Input[str],
pubsub_topic: pulumi.Input[str],
streaming_config: pulumi.Input['NotificationConfigStreamingConfigArgs'],
description: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NotificationConfig resource.
:param pulumi.Input[str] config_id: This must be unique within the organization.
:param pulumi.Input[str] organization: The organization whose Cloud Security Command Center the Notification
Config lives in.
:param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
:param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based notifications.
Structure is documented below.
:param pulumi.Input[str] description: The description of the notification config (max of 1024 characters).
"""
pulumi.set(__self__, "config_id", config_id)
pulumi.set(__self__, "organization", organization)
pulumi.set(__self__, "pubsub_topic", pubsub_topic)
pulumi.set(__self__, "streaming_config", streaming_config)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Input[str]:
"""
This must be unique within the organization.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: pulumi.Input[str]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter
def organization(self) -> pulumi.Input[str]:
"""
The organization whose Cloud Security Command Center the Notification
Config lives in.
"""
return pulumi.get(self, "organization")
@organization.setter
def organization(self, value: pulumi.Input[str]):
pulumi.set(self, "organization", value)
@property
@pulumi.getter(name="pubsubTopic")
def pubsub_topic(self) -> pulumi.Input[str]:
"""
The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
"""
return pulumi.get(self, "pubsub_topic")
@pubsub_topic.setter
def pubsub_topic(self, value: pulumi.Input[str]):
pulumi.set(self, "pubsub_topic", value)
@property
@pulumi.getter(name="streamingConfig")
def streaming_config(self) -> pulumi.Input['NotificationConfigStreamingConfigArgs']:
"""
The config for triggering streaming-based notifications.
Structure is documented below.
"""
return pulumi.get(self, "streaming_config")
@streaming_config.setter
def streaming_config(self, value: pulumi.Input['NotificationConfigStreamingConfigArgs']):
pulumi.set(self, "streaming_config", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the notification config (max of 1024 characters).
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class _NotificationConfigState:
def __init__(__self__, *,
config_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
organization: Optional[pulumi.Input[str]] = None,
pubsub_topic: Optional[pulumi.Input[str]] = None,
service_account: Optional[pulumi.Input[str]] = None,
streaming_config: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']] = None):
"""
Input properties used for looking up and filtering NotificationConfig resources.
:param pulumi.Input[str] config_id: This must be unique within the organization.
:param pulumi.Input[str] description: The description of the notification config (max of 1024 characters).
:param pulumi.Input[str] name: The resource name of this notification config, in the format
'organizations/{{organization}}/notificationConfigs/{{config_id}}'.
:param pulumi.Input[str] organization: The organization whose Cloud Security Command Center the Notification
Config lives in.
:param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
:param pulumi.Input[str] service_account: The service account that needs "pubsub.topics.publish" permission to publish to the Pub/Sub topic.
:param pulumi.Input['NotificationConfigStreamingConfigArgs'] streaming_config: The config for triggering streaming-based notifications.
Structure is documented below.
"""
if config_id is not None:
pulumi.set(__self__, "config_id", config_id)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if organization is not None:
pulumi.set(__self__, "organization", organization)
if pubsub_topic is not None:
pulumi.set(__self__, "pubsub_topic", pubsub_topic)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if streaming_config is not None:
pulumi.set(__self__, "streaming_config", streaming_config)
@property
@pulumi.getter(name="configId")
def config_id(self) -> Optional[pulumi.Input[str]]:
"""
This must be unique within the organization.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the notification config (max of 1024 characters).
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The resource name of this notification config, in the format
'organizations/{{organization}}/notificationConfigs/{{config_id}}'.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def organization(self) -> Optional[pulumi.Input[str]]:
"""
The organization whose Cloud Security Command Center the Notification
Config lives in.
"""
return pulumi.get(self, "organization")
@organization.setter
def organization(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "organization", value)
@property
@pulumi.getter(name="pubsubTopic")
def pubsub_topic(self) -> Optional[pulumi.Input[str]]:
"""
The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
"""
return pulumi.get(self, "pubsub_topic")
@pubsub_topic.setter
def pubsub_topic(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pubsub_topic", value)
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[pulumi.Input[str]]:
"""
The service account that needs "pubsub.topics.publish" permission to publish to the Pub/Sub topic.
"""
return pulumi.get(self, "service_account")
@service_account.setter
def service_account(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account", value)
@property
@pulumi.getter(name="streamingConfig")
def streaming_config(self) -> Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]:
"""
The config for triggering streaming-based notifications.
Structure is documented below.
"""
return pulumi.get(self, "streaming_config")
@streaming_config.setter
def streaming_config(self, value: Optional[pulumi.Input['NotificationConfigStreamingConfigArgs']]):
pulumi.set(self, "streaming_config", value)
class NotificationConfig(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
organization: Optional[pulumi.Input[str]] = None,
pubsub_topic: Optional[pulumi.Input[str]] = None,
streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None,
__props__=None):
"""
A Cloud Security Command Center (Cloud SCC) notification configs. A
notification config is a Cloud SCC resource that contains the
configuration to send notifications for create/update events of
findings, assets and etc.
> **Note:** In order to use Cloud SCC resources, your organization must be enrolled
in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center).
Without doing so, you may run into errors during resource creation.
To get more information about NotificationConfig, see:
* [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs)
* How-to Guides
* [Official Documentation](https://cloud.google.com/security-command-center/docs)
## Example Usage
### Scc Notification Config Basic
```python
import pulumi
import pulumi_gcp as gcp
scc_notification = gcp.pubsub.Topic("sccNotification")
custom_notification_config = gcp.securitycenter.NotificationConfig("customNotificationConfig",
config_id="my-config",
organization="123456789",
description="My custom Cloud Security Command Center Finding Notification Configuration",
pubsub_topic=scc_notification.id,
streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs(
filter="category = \"OPEN_FIREWALL\" AND state = \"ACTIVE\"",
))
```
## Import
NotificationConfig can be imported using any of these accepted formats
```sh
$ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}}
```
```sh
$ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config_id: This must be unique within the organization.
:param pulumi.Input[str] description: The description of the notification config (max of 1024 characters).
:param pulumi.Input[str] organization: The organization whose Cloud Security Command Center the Notification
Config lives in.
:param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
:param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based notifications.
Structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NotificationConfigArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A Cloud Security Command Center (Cloud SCC) notification configs. A
notification config is a Cloud SCC resource that contains the
configuration to send notifications for create/update events of
findings, assets and etc.
> **Note:** In order to use Cloud SCC resources, your organization must be enrolled
in [SCC Standard/Premium](https://cloud.google.com/security-command-center/docs/quickstart-security-command-center).
Without doing so, you may run into errors during resource creation.
To get more information about NotificationConfig, see:
* [API documentation](https://cloud.google.com/security-command-center/docs/reference/rest/v1/organizations.notificationConfigs)
* How-to Guides
* [Official Documentation](https://cloud.google.com/security-command-center/docs)
## Example Usage
### Scc Notification Config Basic
```python
import pulumi
import pulumi_gcp as gcp
scc_notification = gcp.pubsub.Topic("sccNotification")
custom_notification_config = gcp.securitycenter.NotificationConfig("customNotificationConfig",
config_id="my-config",
organization="123456789",
description="My custom Cloud Security Command Center Finding Notification Configuration",
pubsub_topic=scc_notification.id,
streaming_config=gcp.securitycenter.NotificationConfigStreamingConfigArgs(
filter="category = \"OPEN_FIREWALL\" AND state = \"ACTIVE\"",
))
```
## Import
NotificationConfig can be imported using any of these accepted formats
```sh
$ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default organizations/{{organization}}/notificationConfigs/{{name}}
```
```sh
$ pulumi import gcp:securitycenter/notificationConfig:NotificationConfig default {{organization}}/{{name}}
```
:param str resource_name: The name of the resource.
:param NotificationConfigArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NotificationConfigArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
organization: Optional[pulumi.Input[str]] = None,
pubsub_topic: Optional[pulumi.Input[str]] = None,
streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NotificationConfigArgs.__new__(NotificationConfigArgs)
if config_id is None and not opts.urn:
raise TypeError("Missing required property 'config_id'")
__props__.__dict__["config_id"] = config_id
__props__.__dict__["description"] = description
if organization is None and not opts.urn:
raise TypeError("Missing required property 'organization'")
__props__.__dict__["organization"] = organization
if pubsub_topic is None and not opts.urn:
raise TypeError("Missing required property 'pubsub_topic'")
__props__.__dict__["pubsub_topic"] = pubsub_topic
if streaming_config is None and not opts.urn:
raise TypeError("Missing required property 'streaming_config'")
__props__.__dict__["streaming_config"] = streaming_config
__props__.__dict__["name"] = None
__props__.__dict__["service_account"] = None
super(NotificationConfig, __self__).__init__(
'gcp:securitycenter/notificationConfig:NotificationConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
organization: Optional[pulumi.Input[str]] = None,
pubsub_topic: Optional[pulumi.Input[str]] = None,
service_account: Optional[pulumi.Input[str]] = None,
streaming_config: Optional[pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']]] = None) -> 'NotificationConfig':
"""
Get an existing NotificationConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config_id: This must be unique within the organization.
:param pulumi.Input[str] description: The description of the notification config (max of 1024 characters).
:param pulumi.Input[str] name: The resource name of this notification config, in the format
'organizations/{{organization}}/notificationConfigs/{{config_id}}'.
:param pulumi.Input[str] organization: The organization whose Cloud Security Command Center the Notification
Config lives in.
:param pulumi.Input[str] pubsub_topic: The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
:param pulumi.Input[str] service_account: The service account that needs "pubsub.topics.publish" permission to publish to the Pub/Sub topic.
:param pulumi.Input[pulumi.InputType['NotificationConfigStreamingConfigArgs']] streaming_config: The config for triggering streaming-based notifications.
Structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NotificationConfigState.__new__(_NotificationConfigState)
__props__.__dict__["config_id"] = config_id
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["organization"] = organization
__props__.__dict__["pubsub_topic"] = pubsub_topic
__props__.__dict__["service_account"] = service_account
__props__.__dict__["streaming_config"] = streaming_config
return NotificationConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Output[str]:
"""
This must be unique within the organization.
"""
return pulumi.get(self, "config_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the notification config (max of 1024 characters).
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name of this notification config, in the format
'organizations/{{organization}}/notificationConfigs/{{config_id}}'.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def organization(self) -> pulumi.Output[str]:
"""
The organization whose Cloud Security Command Center the Notification
Config lives in.
"""
return pulumi.get(self, "organization")
@property
@pulumi.getter(name="pubsubTopic")
def pubsub_topic(self) -> pulumi.Output[str]:
"""
The Pub/Sub topic to send notifications to. Its format is
"projects/[project_id]/topics/[topic]".
"""
return pulumi.get(self, "pubsub_topic")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> pulumi.Output[str]:
"""
The service account that needs "pubsub.topics.publish" permission to publish to the Pub/Sub topic.
"""
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="streamingConfig")
def streaming_config(self) -> pulumi.Output['outputs.NotificationConfigStreamingConfig']:
"""
The config for triggering streaming-based notifications.
Structure is documented below.
"""
return pulumi.get(self, "streaming_config")
| 44.714004
| 161
| 0.661226
| 2,413
| 22,670
| 6.026523
| 0.098632
| 0.061271
| 0.06354
| 0.05295
| 0.843213
| 0.819626
| 0.78538
| 0.758011
| 0.754504
| 0.72631
| 0
| 0.002851
| 0.241861
| 22,670
| 506
| 162
| 44.802372
| 0.843254
| 0.409616
| 0
| 0.590909
| 1
| 0
| 0.134427
| 0.038214
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157025
| false
| 0.004132
| 0.028926
| 0
| 0.280992
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc7aed95070ea2718e44219b9db81ddfb927929e
| 5,036
|
py
|
Python
|
musket_core/tests/coders_test.py
|
dreamflyer/musket_core
|
1bdf1b4715a3b5c63bf687799d7b977fdf49053f
|
[
"MIT"
] | 16
|
2019-09-25T14:58:45.000Z
|
2020-04-04T22:03:27.000Z
|
musket_core/tests/coders_test.py
|
dreamflyer/musket_core
|
1bdf1b4715a3b5c63bf687799d7b977fdf49053f
|
[
"MIT"
] | 17
|
2019-06-28T06:46:31.000Z
|
2020-01-23T10:01:12.000Z
|
musket_core/tests/coders_test.py
|
dreamflyer/musket_core
|
1bdf1b4715a3b5c63bf687799d7b977fdf49053f
|
[
"MIT"
] | 2
|
2019-11-22T15:09:18.000Z
|
2019-12-17T03:17:25.000Z
|
import unittest
from musket_core import coders
import numpy as np
import pandas as pd
import os
import math
fl=__file__
fl=os.path.dirname(fl)
class TestCoders(unittest.TestCase):
def test_binary_num(self):
a=np.array([0,1,0,1])
bc=coders.get_coder("binary",a, None)
self.assertEqual(bc[0], 0, "should be zero")
self.assertEqual(bc[1], 1, "should be one")
v=bc._decode(np.array([0.6]))
self.assertEqual(v, 1, "should be one")
v=bc._decode(np.array([0.2]))
self.assertEqual(v, 0, "should be zero")
pass
def test_binary_str(self):
a=np.array(["0","1","0","1"])
bc=coders.get_coder("binary",a, None)
self.assertEqual(bc[0], 0, "should be zero")
self.assertEqual(bc[1], 1, "should be one")
v=bc._decode(np.array([0.6]))
self.assertEqual(v, "1", "should be one")
v=bc._decode(np.array([0.2]))
self.assertEqual(v, "0", "should be zero")
pass
def test_binary_str2(self):
a=np.array(["","1","","1"])
bc=coders.get_coder("binary",a, None)
self.assertEqual(bc[0], 0, "should be zero")
self.assertEqual(bc[1], 1, "should be one")
v=bc._decode(np.array([0.6]))
self.assertEqual(v, "1", "should be one")
v=bc._decode(np.array([0.2]))
self.assertEqual(v, "", "should be zero")
pass
def test_binary_bool(self):
a=np.array([True,False,True,False])
bc=coders.get_coder("binary",a, None)
self.assertEqual(bc[0], 1, "should be zero")
self.assertEqual(bc[1], 0, "should be one")
v=bc._decode(np.array([0.6]))
self.assertEqual(v, True, "should be one")
v=bc._decode(np.array([0.2]))
self.assertEqual(v, False, "should be zero")
pass
def test_categorical_num(self):
a=np.array([0,1,2,1])
bc=coders.get_coder("categorical_one_hot",a, None)
self.assertEqual(bc[0][0], True, "should be zero")
self.assertEqual(bc[0][1], False, "should be one")
v=bc._decode(np.array([0.3,0.4,0.45]))
self.assertEqual(v, 2, "should be one")
v=bc._decode(np.array([0.2,0.1,0.1]))
self.assertEqual(v, 0, "should be zero")
pass
def test_categorical_str(self):
a=np.array(["a","b","c","b"])
bc=coders.get_coder("categorical_one_hot",a, None)
self.assertEqual(bc[0][0], True, "should be zero")
self.assertEqual(bc[0][1], False, "should be one")
v=bc._decode(np.array([0.3,0.4,0.45]))
self.assertEqual(v, "c", "should be one")
v=bc._decode(np.array([0.2,0.1,0.1]))
self.assertEqual(v, "a", "should be zero")
pass
def test_categorical_str2(self):
a=np.array(["","b","c","b"])
bc=coders.get_coder("categorical_one_hot",a, None)
self.assertEqual(bc[0][0], True, "should be zero")
self.assertEqual(bc[0][1], False, "should be one")
v=bc._decode(np.array([0.3,0.4,0.45]))
self.assertEqual(v, "c", "should be one")
v=bc._decode(np.array([0.2,0.1,0.1]))
self.assertEqual(v, "", "should be zero")
pass
def test_categorical_pd(self):
a=np.array([math.nan,1,2,1])
bc=coders.get_coder("categorical_one_hot",a, None)
self.assertEqual(bc[0][2], True, "should be zero")
self.assertEqual(bc[0][1], False, "should be one")
v=bc._decode(np.array([0.3,0.4,0.45]))
self.assertEqual(math.isnan(v),True, "should be one")
v=bc._decode(np.array([0.2,0.1,0.1]))
self.assertEqual(v, 1, "should be zero")
pass
def test_multiclass(self):
a=np.array(["1 2","0 2","0",""])
bc=coders.get_coder("multi_class",a, None)
val=bc[0]
self.assertEqual((val==np.array([False,True,True])).sum(), 3,"Fixing format")
for i in range(len(a)):
val=bc[i]
r=bc._decode(val)
self.assertEqual(r, a[i], "Decoding should work also")
pass
def test_multiclass1(self):
a=np.array(["1_2","0_2","0",""])
bc=coders.get_coder("multi_class",a, None)
val=bc[0]
self.assertEqual((val==np.array([False,True,True])).sum(), 3,"Fixing format")
for i in range(len(a)):
val=bc[i]
r=bc._decode(val)
self.assertEqual(r, a[i], "Decoding should work also")
pass
def test_multiclass2(self):
a=np.array(["1","","",""])
bc=coders.get_coder("multi_class",a, None)
val=bc[0]
self.assertEqual((val==np.array([True])).sum(), 1,"Fixing format")
for i in range(len(a)):
val=bc[i]
r=bc._decode(val)
self.assertEqual(r, a[i], "Decoding should work also")
pass
| 37.029412
| 86
| 0.538721
| 756
| 5,036
| 3.5
| 0.097884
| 0.21542
| 0.057445
| 0.072562
| 0.897959
| 0.873394
| 0.864324
| 0.809524
| 0.809524
| 0.792139
| 0
| 0.040376
| 0.28197
| 5,036
| 136
| 87
| 37.029412
| 0.691372
| 0
| 0
| 0.638655
| 0
| 0
| 0.145451
| 0
| 0
| 0
| 0
| 0
| 0.319328
| 1
| 0.092437
| false
| 0.092437
| 0.05042
| 0
| 0.151261
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
bc8acb8ede34bacdf376a2fc95f5b2c7c78ede61
| 141,721
|
py
|
Python
|
src/test/python/test_scc_pacs.py
|
xchange11/ttconv-1
|
6e67172af126fa0e90690044848f300c0173715c
|
[
"BSD-2-Clause"
] | 66
|
2020-09-25T11:38:28.000Z
|
2022-03-23T15:15:34.000Z
|
src/test/python/test_scc_pacs.py
|
xchange11/ttconv-1
|
6e67172af126fa0e90690044848f300c0173715c
|
[
"BSD-2-Clause"
] | 217
|
2020-09-22T22:45:22.000Z
|
2022-03-31T23:02:15.000Z
|
src/test/python/test_scc_pacs.py
|
xchange11/ttconv-1
|
6e67172af126fa0e90690044848f300c0173715c
|
[
"BSD-2-Clause"
] | 5
|
2020-09-25T09:24:17.000Z
|
2021-08-08T20:52:26.000Z
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2020, Sandflow Consulting LLC
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for the SCC PACs"""
# pylint: disable=R0201,C0115,C0116
import unittest
from ttconv.scc.codes.preambles_address_codes import SccPreambleAddressCode
from ttconv.style_properties import TextDecorationType, NamedColors, FontStyleType
class SCCPreambleAddressCodesTest(unittest.TestCase):
def test_scc_pac_values(self):
channel_1_byte_1 = [0x11, 0x12, 0x15, 0x16, 0x17, 0x10, 0x13, 0x14]
channel_2_byte_1 = [0x19, 0x1A, 0x1D, 0x1E, 0x1F, 0x18, 0x1B, 0x1C]
all_range = list(range(0x00, 0XFF))
byte_2_range = range(0x40, 0x80)
other_bytes_1 = [item for item in all_range
if item not in channel_1_byte_1 and item not in channel_2_byte_1]
other_bytes_2 = [item for item in all_range if item not in list(byte_2_range)]
for b1 in channel_1_byte_1:
for b2 in byte_2_range:
pac = SccPreambleAddressCode.find(b1, b2)
if b2 > 0x5F and b1 % 0x08 == 0: # row 11 case
self.assertIsNone(pac)
else:
self.assertIsNotNone(pac)
for b2 in other_bytes_2:
self.assertIsNone(SccPreambleAddressCode.find(b1, b2))
for b1 in channel_2_byte_1:
for b2 in byte_2_range:
pac = SccPreambleAddressCode.find(b1, b2)
if b2 > 0x5F and b1 % 0x08 == 0: # row 11 case
self.assertIsNone(pac)
else:
self.assertIsNotNone(pac)
for b2 in other_bytes_2:
self.assertIsNone(SccPreambleAddressCode.find(b1, b2))
for b1 in other_bytes_1:
for b2 in range(0x00, 0xFF):
self.assertIsNone(SccPreambleAddressCode.find(b1, b2))
def check_scc_pac_attributes(self, pac, channel, row, indent, color, font_style, text_decoration):
self.assertEqual(channel, pac.get_channel())
self.assertEqual(row, pac.get_row())
self.assertEqual(indent, pac.get_indent())
self.assertEqual(color, pac.get_color())
self.assertEqual(font_style, pac.get_font_style())
self.assertEqual(text_decoration, pac.get_text_decoration())
def test_scc_pac_white(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x40), 1, 1, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x60), 1, 2, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x40), 1, 3, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x60), 1, 4, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x40), 1, 5, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x60), 1, 6, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x40), 1, 7, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x60), 1, 8, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x40), 1, 9, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x60), 1, 10, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x40), 1, 11, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x40), 1, 12, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x60), 1, 13, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x40), 1, 14, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x60), 1, 15, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x40), 2, 1, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x60), 2, 2, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x40), 2, 3, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x60), 2, 4, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x40), 2, 5, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x60), 2, 6, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x40), 2, 7, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x60), 2, 8, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x40), 2, 9, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x60), 2, 10, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x40), 2, 11, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x40), 2, 12, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x60), 2, 13, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x40), 2, 14, None, NamedColors.white.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x60), 2, 15, None, NamedColors.white.value, None, None)
def test_scc_pac_white_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x41), 1, 1, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x61), 1, 2, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x41), 1, 3, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x61), 1, 4, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x41), 1, 5, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x61), 1, 6, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x41), 1, 7, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x61), 1, 8, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x41), 1, 9, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x61), 1, 10, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x41), 1, 11, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x41), 1, 12, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x61), 1, 13, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x41), 1, 14, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x61), 1, 15, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x41), 2, 1, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x61), 2, 2, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x41), 2, 3, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x61), 2, 4, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x41), 2, 5, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x61), 2, 6, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x41), 2, 7, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x61), 2, 8, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x41), 2, 9, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x61), 2, 10, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x41), 2, 11, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x41), 2, 12, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x61), 2, 13, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x41), 2, 14, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x61), 2, 15, None, NamedColors.white.value, None,
TextDecorationType(underline=True))
def test_scc_pac_green(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x42), 1, 1, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x62), 1, 2, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x42), 1, 3, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x62), 1, 4, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x42), 1, 5, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x62), 1, 6, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x42), 1, 7, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x62), 1, 8, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x42), 1, 9, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x62), 1, 10, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x42), 1, 11, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x42), 1, 12, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x62), 1, 13, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x42), 1, 14, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x62), 1, 15, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x42), 2, 1, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x62), 2, 2, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x42), 2, 3, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x62), 2, 4, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x42), 2, 5, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x62), 2, 6, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x42), 2, 7, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x62), 2, 8, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x42), 2, 9, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x62), 2, 10, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x42), 2, 11, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x42), 2, 12, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x62), 2, 13, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x42), 2, 14, None, NamedColors.green.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x62), 2, 15, None, NamedColors.green.value, None, None)
def test_scc_pac_green_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x43), 1, 1, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x63), 1, 2, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x43), 1, 3, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x63), 1, 4, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x43), 1, 5, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x63), 1, 6, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x43), 1, 7, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x63), 1, 8, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x43), 1, 9, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x63), 1, 10, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x43), 1, 11, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x43), 1, 12, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x63), 1, 13, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x43), 1, 14, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x63), 1, 15, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x43), 2, 1, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x63), 2, 2, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x43), 2, 3, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x63), 2, 4, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x43), 2, 5, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x63), 2, 6, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x43), 2, 7, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x63), 2, 8, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x43), 2, 9, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x63), 2, 10, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x43), 2, 11, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x43), 2, 12, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x63), 2, 13, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x43), 2, 14, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x63), 2, 15, None, NamedColors.green.value, None,
TextDecorationType(underline=True))
def test_scc_pac_blue(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x44), 1, 1, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x64), 1, 2, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x44), 1, 3, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x64), 1, 4, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x44), 1, 5, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x64), 1, 6, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x44), 1, 7, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x64), 1, 8, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x44), 1, 9, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x64), 1, 10, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x44), 1, 11, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x44), 1, 12, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x64), 1, 13, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x44), 1, 14, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x64), 1, 15, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x44), 2, 1, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x64), 2, 2, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x44), 2, 3, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x64), 2, 4, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x44), 2, 5, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x64), 2, 6, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x44), 2, 7, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x64), 2, 8, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x44), 2, 9, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x64), 2, 10, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x44), 2, 11, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x44), 2, 12, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x64), 2, 13, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x44), 2, 14, None, NamedColors.blue.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x64), 2, 15, None, NamedColors.blue.value, None, None)
def test_scc_pac_blue_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x45), 1, 1, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x65), 1, 2, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x45), 1, 3, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x65), 1, 4, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x45), 1, 5, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x65), 1, 6, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x45), 1, 7, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x65), 1, 8, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x45), 1, 9, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x65), 1, 10, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x45), 1, 11, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x45), 1, 12, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x65), 1, 13, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x45), 1, 14, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x65), 1, 15, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x45), 2, 1, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x65), 2, 2, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x45), 2, 3, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x65), 2, 4, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x45), 2, 5, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x65), 2, 6, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x45), 2, 7, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x65), 2, 8, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x45), 2, 9, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x65), 2, 10, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x45), 2, 11, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x45), 2, 12, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x65), 2, 13, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x45), 2, 14, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x65), 2, 15, None, NamedColors.blue.value, None,
TextDecorationType(underline=True))
def test_scc_pac_cyan(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x46), 1, 1, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x66), 1, 2, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x46), 1, 3, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x66), 1, 4, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x46), 1, 5, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x66), 1, 6, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x46), 1, 7, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x66), 1, 8, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x46), 1, 9, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x66), 1, 10, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x46), 1, 11, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x46), 1, 12, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x66), 1, 13, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x46), 1, 14, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x66), 1, 15, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x46), 2, 1, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x66), 2, 2, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x46), 2, 3, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x66), 2, 4, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x46), 2, 5, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x66), 2, 6, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x46), 2, 7, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x66), 2, 8, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x46), 2, 9, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x66), 2, 10, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x46), 2, 11, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x46), 2, 12, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x66), 2, 13, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x46), 2, 14, None, NamedColors.cyan.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x66), 2, 15, None, NamedColors.cyan.value, None, None)
def test_scc_pac_cyan_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x47), 1, 1, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x67), 1, 2, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x47), 1, 3, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x67), 1, 4, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x47), 1, 5, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x67), 1, 6, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x47), 1, 7, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x67), 1, 8, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x47), 1, 9, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x67), 1, 10, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x47), 1, 11, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x47), 1, 12, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x67), 1, 13, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x47), 1, 14, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x67), 1, 15, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x47), 2, 1, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x67), 2, 2, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x47), 2, 3, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x67), 2, 4, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x47), 2, 5, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x67), 2, 6, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x47), 2, 7, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x67), 2, 8, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x47), 2, 9, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x67), 2, 10, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x47), 2, 11, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x47), 2, 12, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x67), 2, 13, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x47), 2, 14, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x67), 2, 15, None, NamedColors.cyan.value, None,
TextDecorationType(underline=True))
def test_scc_pac_red(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x48), 1, 1, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x68), 1, 2, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x48), 1, 3, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x68), 1, 4, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x48), 1, 5, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x68), 1, 6, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x48), 1, 7, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x68), 1, 8, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x48), 1, 9, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x68), 1, 10, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x48), 1, 11, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x48), 1, 12, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x68), 1, 13, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x48), 1, 14, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x68), 1, 15, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x48), 2, 1, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x68), 2, 2, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x48), 2, 3, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x68), 2, 4, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x48), 2, 5, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x68), 2, 6, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x48), 2, 7, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x68), 2, 8, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x48), 2, 9, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x68), 2, 10, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x48), 2, 11, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x48), 2, 12, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x68), 2, 13, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x48), 2, 14, None, NamedColors.red.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x68), 2, 15, None, NamedColors.red.value, None, None)
def test_scc_pac_red_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x49), 1, 1, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x69), 1, 2, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x49), 1, 3, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x69), 1, 4, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x49), 1, 5, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x69), 1, 6, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x49), 1, 7, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x69), 1, 8, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x49), 1, 9, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x69), 1, 10, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x49), 1, 11, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x49), 1, 12, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x69), 1, 13, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x49), 1, 14, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x69), 1, 15, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x49), 2, 1, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x69), 2, 2, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x49), 2, 3, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x69), 2, 4, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x49), 2, 5, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x69), 2, 6, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x49), 2, 7, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x69), 2, 8, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x49), 2, 9, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x69), 2, 10, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x49), 2, 11, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x49), 2, 12, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x69), 2, 13, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x49), 2, 14, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x69), 2, 15, None, NamedColors.red.value, None,
TextDecorationType(underline=True))
def test_scc_pac_yellow(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4A), 1, 1, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6A), 1, 2, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4A), 1, 3, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6A), 1, 4, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4A), 1, 5, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6A), 1, 6, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4A), 1, 7, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6A), 1, 8, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4A), 1, 9, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6A), 1, 10, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4A), 1, 11, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4A), 1, 12, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6A), 1, 13, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4A), 1, 14, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6A), 1, 15, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4A), 2, 1, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6A), 2, 2, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4A), 2, 3, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6A), 2, 4, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4A), 2, 5, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6A), 2, 6, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4A), 2, 7, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6A), 2, 8, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4A), 2, 9, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6A), 2, 10, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4A), 2, 11, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4A), 2, 12, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6A), 2, 13, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4A), 2, 14, None, NamedColors.yellow.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6A), 2, 15, None, NamedColors.yellow.value, None, None)
def test_scc_pac_yellow_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4B), 1, 1, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6B), 1, 2, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4B), 1, 3, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6B), 1, 4, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4B), 1, 5, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6B), 1, 6, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4B), 1, 7, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6B), 1, 8, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4B), 1, 9, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6B), 1, 10, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4B), 1, 11, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4B), 1, 12, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6B), 1, 13, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4B), 1, 14, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6B), 1, 15, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4B), 2, 1, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6B), 2, 2, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4B), 2, 3, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6B), 2, 4, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4B), 2, 5, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6B), 2, 6, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4B), 2, 7, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6B), 2, 8, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4B), 2, 9, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6B), 2, 10, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4B), 2, 11, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4B), 2, 12, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6B), 2, 13, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4B), 2, 14, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6B), 2, 15, None, NamedColors.yellow.value, None,
TextDecorationType(underline=True))
def test_scc_pac_magenta(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4C), 1, 1, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6C), 1, 2, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4C), 1, 3, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6C), 1, 4, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4C), 1, 5, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6C), 1, 6, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4C), 1, 7, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6C), 1, 8, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4C), 1, 9, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6C), 1, 10, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4C), 1, 11, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4C), 1, 12, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6C), 1, 13, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4C), 1, 14, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6C), 1, 15, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4C), 2, 1, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6C), 2, 2, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4C), 2, 3, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6C), 2, 4, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4C), 2, 5, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6C), 2, 6, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4C), 2, 7, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6C), 2, 8, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4C), 2, 9, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6C), 2, 10, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4C), 2, 11, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4C), 2, 12, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6C), 2, 13, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4C), 2, 14, None, NamedColors.magenta.value, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6C), 2, 15, None, NamedColors.magenta.value, None, None)
def test_scc_pac_magenta_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4D), 1, 1, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6D), 1, 2, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4D), 1, 3, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6D), 1, 4, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4D), 1, 5, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6D), 1, 6, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4D), 1, 7, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6D), 1, 8, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4D), 1, 9, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6D), 1, 10, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4D), 1, 11, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4D), 1, 12, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6D), 1, 13, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4D), 1, 14, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6D), 1, 15, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4D), 2, 1, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6D), 2, 2, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4D), 2, 3, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6D), 2, 4, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4D), 2, 5, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6D), 2, 6, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4D), 2, 7, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6D), 2, 8, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4D), 2, 9, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6D), 2, 10, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4D), 2, 11, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4D), 2, 12, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6D), 2, 13, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4D), 2, 14, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6D), 2, 15, None, NamedColors.magenta.value, None,
TextDecorationType(underline=True))
def test_scc_pac_white_italics(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4E), 1, 1, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6E), 1, 2, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4E), 1, 3, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6E), 1, 4, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4E), 1, 5, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6E), 1, 6, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4E), 1, 7, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6E), 1, 8, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4E), 1, 9, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6E), 1, 10, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4E), 1, 11, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4E), 1, 12, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6E), 1, 13, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4E), 1, 14, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6E), 1, 15, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4E), 2, 1, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6E), 2, 2, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4E), 2, 3, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6E), 2, 4, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4E), 2, 5, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6E), 2, 6, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4E), 2, 7, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6E), 2, 8, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4E), 2, 9, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6E), 2, 10, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4E), 2, 11, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4E), 2, 12, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6E), 2, 13, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4E), 2, 14, None, NamedColors.white.value, FontStyleType.italic,
None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6E), 2, 15, None, NamedColors.white.value, FontStyleType.italic,
None)
def test_scc_pac_white_italics_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x4F), 1, 1, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x6F), 1, 2, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x4F), 1, 3, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x6F), 1, 4, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x4F), 1, 5, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x6F), 1, 6, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x4F), 1, 7, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x6F), 1, 8, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x4F), 1, 9, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x6F), 1, 10, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x4F), 1, 11, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x4F), 1, 12, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x6F), 1, 13, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x4F), 1, 14, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x6F), 1, 15, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x4F), 2, 1, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x6F), 2, 2, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x4F), 2, 3, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x6F), 2, 4, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x4F), 2, 5, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x6F), 2, 6, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x4F), 2, 7, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x6F), 2, 8, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x4F), 2, 9, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x6F), 2, 10, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x4F), 2, 11, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x4F), 2, 12, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x6F), 2, 13, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x4F), 2, 14, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x6F), 2, 15, None, NamedColors.white.value, FontStyleType.italic,
TextDecorationType(underline=True))
def test_scc_pac_indent_0(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x50), 1, 1, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x70), 1, 2, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x50), 1, 3, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x70), 1, 4, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x50), 1, 5, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x70), 1, 6, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x50), 1, 7, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x70), 1, 8, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x50), 1, 9, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x70), 1, 10, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x50), 1, 11, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x50), 1, 12, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x70), 1, 13, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x50), 1, 14, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x70), 1, 15, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x50), 2, 1, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x70), 2, 2, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x50), 2, 3, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x70), 2, 4, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x50), 2, 5, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x70), 2, 6, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x50), 2, 7, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x70), 2, 8, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x50), 2, 9, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x70), 2, 10, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x50), 2, 11, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x50), 2, 12, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x70), 2, 13, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x50), 2, 14, 0, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x70), 2, 15, 0, None, None, None)
def test_scc_pac_indent_0_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x51), 1, 1, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x71), 1, 2, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x51), 1, 3, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x71), 1, 4, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x51), 1, 5, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x71), 1, 6, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x51), 1, 7, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x71), 1, 8, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x51), 1, 9, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x71), 1, 10, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x51), 1, 11, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x51), 1, 12, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x71), 1, 13, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x51), 1, 14, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x71), 1, 15, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x51), 2, 1, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x71), 2, 2, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x51), 2, 3, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x71), 2, 4, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x51), 2, 5, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x71), 2, 6, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x51), 2, 7, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x71), 2, 8, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x51), 2, 9, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x71), 2, 10, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x51), 2, 11, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x51), 2, 12, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x71), 2, 13, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x51), 2, 14, 0, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x71), 2, 15, 0, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_4(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x52), 1, 1, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x72), 1, 2, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x52), 1, 3, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x72), 1, 4, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x52), 1, 5, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x72), 1, 6, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x52), 1, 7, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x72), 1, 8, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x52), 1, 9, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x72), 1, 10, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x52), 1, 11, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x52), 1, 12, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x72), 1, 13, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x52), 1, 14, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x72), 1, 15, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x52), 2, 1, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x72), 2, 2, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x52), 2, 3, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x72), 2, 4, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x52), 2, 5, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x72), 2, 6, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x52), 2, 7, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x72), 2, 8, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x52), 2, 9, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x72), 2, 10, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x52), 2, 11, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x52), 2, 12, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x72), 2, 13, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x52), 2, 14, 4, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x72), 2, 15, 4, None, None, None)
def test_scc_pac_indent_4_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x53), 1, 1, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x73), 1, 2, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x53), 1, 3, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x73), 1, 4, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x53), 1, 5, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x73), 1, 6, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x53), 1, 7, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x73), 1, 8, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x53), 1, 9, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x73), 1, 10, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x53), 1, 11, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x53), 1, 12, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x73), 1, 13, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x53), 1, 14, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x73), 1, 15, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x53), 2, 1, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x73), 2, 2, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x53), 2, 3, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x73), 2, 4, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x53), 2, 5, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x73), 2, 6, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x53), 2, 7, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x73), 2, 8, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x53), 2, 9, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x73), 2, 10, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x53), 2, 11, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x53), 2, 12, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x73), 2, 13, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x53), 2, 14, 4, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x73), 2, 15, 4, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_8(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x54), 1, 1, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x74), 1, 2, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x54), 1, 3, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x74), 1, 4, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x54), 1, 5, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x74), 1, 6, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x54), 1, 7, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x74), 1, 8, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x54), 1, 9, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x74), 1, 10, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x54), 1, 11, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x54), 1, 12, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x74), 1, 13, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x54), 1, 14, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x74), 1, 15, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x54), 2, 1, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x74), 2, 2, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x54), 2, 3, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x74), 2, 4, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x54), 2, 5, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x74), 2, 6, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x54), 2, 7, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x74), 2, 8, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x54), 2, 9, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x74), 2, 10, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x54), 2, 11, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x54), 2, 12, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x74), 2, 13, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x54), 2, 14, 8, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x74), 2, 15, 8, None, None, None)
def test_scc_pac_indent_8_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x55), 1, 1, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x75), 1, 2, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x55), 1, 3, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x75), 1, 4, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x55), 1, 5, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x75), 1, 6, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x55), 1, 7, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x75), 1, 8, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x55), 1, 9, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x75), 1, 10, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x55), 1, 11, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x55), 1, 12, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x75), 1, 13, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x55), 1, 14, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x75), 1, 15, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x55), 2, 1, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x75), 2, 2, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x55), 2, 3, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x75), 2, 4, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x55), 2, 5, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x75), 2, 6, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x55), 2, 7, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x75), 2, 8, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x55), 2, 9, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x75), 2, 10, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x55), 2, 11, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x55), 2, 12, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x75), 2, 13, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x55), 2, 14, 8, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x75), 2, 15, 8, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_12(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x56), 1, 1, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x76), 1, 2, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x56), 1, 3, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x76), 1, 4, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x56), 1, 5, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x76), 1, 6, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x56), 1, 7, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x76), 1, 8, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x56), 1, 9, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x76), 1, 10, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x56), 1, 11, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x56), 1, 12, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x76), 1, 13, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x56), 1, 14, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x76), 1, 15, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x56), 2, 1, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x76), 2, 2, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x56), 2, 3, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x76), 2, 4, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x56), 2, 5, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x76), 2, 6, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x56), 2, 7, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x76), 2, 8, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x56), 2, 9, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x76), 2, 10, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x56), 2, 11, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x56), 2, 12, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x76), 2, 13, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x56), 2, 14, 12, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x76), 2, 15, 12, None, None, None)
def test_scc_pac_indent_12_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x57), 1, 1, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x77), 1, 2, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x57), 1, 3, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x77), 1, 4, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x57), 1, 5, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x77), 1, 6, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x57), 1, 7, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x77), 1, 8, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x57), 1, 9, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x77), 1, 10, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x57), 1, 11, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x57), 1, 12, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x77), 1, 13, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x57), 1, 14, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x77), 1, 15, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x57), 2, 1, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x77), 2, 2, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x57), 2, 3, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x77), 2, 4, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x57), 2, 5, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x77), 2, 6, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x57), 2, 7, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x77), 2, 8, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x57), 2, 9, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x77), 2, 10, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x57), 2, 11, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x57), 2, 12, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x77), 2, 13, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x57), 2, 14, 12, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x77), 2, 15, 12, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_16(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x58), 1, 1, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x78), 1, 2, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x58), 1, 3, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x78), 1, 4, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x58), 1, 5, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x78), 1, 6, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x58), 1, 7, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x78), 1, 8, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x58), 1, 9, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x78), 1, 10, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x58), 1, 11, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x58), 1, 12, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x78), 1, 13, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x58), 1, 14, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x78), 1, 15, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x58), 2, 1, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x78), 2, 2, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x58), 2, 3, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x78), 2, 4, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x58), 2, 5, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x78), 2, 6, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x58), 2, 7, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x78), 2, 8, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x58), 2, 9, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x78), 2, 10, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x58), 2, 11, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x58), 2, 12, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x78), 2, 13, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x58), 2, 14, 16, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x78), 2, 15, 16, None, None, None)
def test_scc_pac_indent_16_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x59), 1, 1, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x79), 1, 2, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x59), 1, 3, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x79), 1, 4, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x59), 1, 5, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x79), 1, 6, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x59), 1, 7, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x79), 1, 8, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x59), 1, 9, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x79), 1, 10, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x59), 1, 11, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x59), 1, 12, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x79), 1, 13, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x59), 1, 14, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x79), 1, 15, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x59), 2, 1, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x79), 2, 2, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x59), 2, 3, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x79), 2, 4, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x59), 2, 5, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x79), 2, 6, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x59), 2, 7, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x79), 2, 8, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x59), 2, 9, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x79), 2, 10, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x59), 2, 11, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x59), 2, 12, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x79), 2, 13, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x59), 2, 14, 16, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x79), 2, 15, 16, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_20(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5A), 1, 1, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7A), 1, 2, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5A), 1, 3, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7A), 1, 4, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5A), 1, 5, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7A), 1, 6, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5A), 1, 7, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7A), 1, 8, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5A), 1, 9, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7A), 1, 10, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5A), 1, 11, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5A), 1, 12, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7A), 1, 13, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5A), 1, 14, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7A), 1, 15, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5A), 2, 1, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7A), 2, 2, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5A), 2, 3, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7A), 2, 4, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5A), 2, 5, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7A), 2, 6, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5A), 2, 7, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7A), 2, 8, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5A), 2, 9, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7A), 2, 10, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5A), 2, 11, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5A), 2, 12, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7A), 2, 13, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5A), 2, 14, 20, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7A), 2, 15, 20, None, None, None)
def test_scc_pac_indent_20_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5B), 1, 1, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7B), 1, 2, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5B), 1, 3, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7B), 1, 4, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5B), 1, 5, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7B), 1, 6, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5B), 1, 7, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7B), 1, 8, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5B), 1, 9, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7B), 1, 10, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5B), 1, 11, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5B), 1, 12, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7B), 1, 13, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5B), 1, 14, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7B), 1, 15, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5B), 2, 1, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7B), 2, 2, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5B), 2, 3, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7B), 2, 4, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5B), 2, 5, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7B), 2, 6, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5B), 2, 7, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7B), 2, 8, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5B), 2, 9, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7B), 2, 10, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5B), 2, 11, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5B), 2, 12, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7B), 2, 13, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5B), 2, 14, 20, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7B), 2, 15, 20, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_24(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5C), 1, 1, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7C), 1, 2, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5C), 1, 3, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7C), 1, 4, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5C), 1, 5, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7C), 1, 6, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5C), 1, 7, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7C), 1, 8, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5C), 1, 9, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7C), 1, 10, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5C), 1, 11, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5C), 1, 12, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7C), 1, 13, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5C), 1, 14, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7C), 1, 15, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5C), 2, 1, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7C), 2, 2, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5C), 2, 3, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7C), 2, 4, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5C), 2, 5, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7C), 2, 6, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5C), 2, 7, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7C), 2, 8, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5C), 2, 9, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7C), 2, 10, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5C), 2, 11, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5C), 2, 12, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7C), 2, 13, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5C), 2, 14, 24, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7C), 2, 15, 24, None, None, None)
def test_scc_pac_indent_24_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5D), 1, 1, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7D), 1, 2, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5D), 1, 3, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7D), 1, 4, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5D), 1, 5, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7D), 1, 6, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5D), 1, 7, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7D), 1, 8, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5D), 1, 9, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7D), 1, 10, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5D), 1, 11, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5D), 1, 12, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7D), 1, 13, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5D), 1, 14, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7D), 1, 15, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5D), 2, 1, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7D), 2, 2, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5D), 2, 3, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7D), 2, 4, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5D), 2, 5, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7D), 2, 6, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5D), 2, 7, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7D), 2, 8, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5D), 2, 9, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7D), 2, 10, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5D), 2, 11, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5D), 2, 12, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7D), 2, 13, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5D), 2, 14, 24, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7D), 2, 15, 24, None, None,
TextDecorationType(underline=True))
def test_scc_pac_indent_28(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5E), 1, 1, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7E), 1, 2, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5E), 1, 3, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7E), 1, 4, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5E), 1, 5, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7E), 1, 6, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5E), 1, 7, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7E), 1, 8, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5E), 1, 9, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7E), 1, 10, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5E), 1, 11, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5E), 1, 12, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7E), 1, 13, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5E), 1, 14, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7E), 1, 15, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5E), 2, 1, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7E), 2, 2, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5E), 2, 3, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7E), 2, 4, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5E), 2, 5, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7E), 2, 6, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5E), 2, 7, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7E), 2, 8, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5E), 2, 9, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7E), 2, 10, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5E), 2, 11, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5E), 2, 12, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7E), 2, 13, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5E), 2, 14, 28, None, None, None)
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7E), 2, 15, 28, None, None, None)
def test_scc_pac_indent_28_underline(self):
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x5F), 1, 1, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x11, 0x7F), 1, 2, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x5F), 1, 3, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x12, 0x7F), 1, 4, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x5F), 1, 5, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x15, 0x7F), 1, 6, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x5F), 1, 7, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x16, 0x7F), 1, 8, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x5F), 1, 9, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x17, 0x7F), 1, 10, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x10, 0x5F), 1, 11, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x5F), 1, 12, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x13, 0x7F), 1, 13, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x5F), 1, 14, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x14, 0x7F), 1, 15, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x5F), 2, 1, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x19, 0x7F), 2, 2, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x5F), 2, 3, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1A, 0x7F), 2, 4, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x5F), 2, 5, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1D, 0x7F), 2, 6, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x5F), 2, 7, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1E, 0x7F), 2, 8, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x5F), 2, 9, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1F, 0x7F), 2, 10, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x18, 0x5F), 2, 11, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x5F), 2, 12, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1B, 0x7F), 2, 13, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x5F), 2, 14, 28, None, None,
TextDecorationType(underline=True))
self.check_scc_pac_attributes(SccPreambleAddressCode(0x1C, 0x7F), 2, 15, 28, None, None,
TextDecorationType(underline=True))
if __name__ == '__main__':
unittest.main()
| 87.374229
| 129
| 0.689333
| 16,135
| 141,721
| 5.86396
| 0.01754
| 0.063034
| 0.111726
| 0.213296
| 0.941521
| 0.934175
| 0.93297
| 0.923955
| 0.910522
| 0.894013
| 0
| 0.073728
| 0.209482
| 141,721
| 1,621
| 130
| 87.428131
| 0.7708
| 0.00997
| 0
| 0.341321
| 0
| 0
| 0.000057
| 0
| 0
| 0
| 0.05549
| 0
| 0.00842
| 1
| 0.022021
| false
| 0
| 0.001943
| 0
| 0.024611
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcab57fb16b16bdb97e645f2dba9e5a2f1d7fa1f
| 10,293
|
py
|
Python
|
tests/test_observable/test_skip.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_skip.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_skip.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
import unittest
from reactivex import operators as ops
from reactivex.testing import ReactiveTest, TestScheduler
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestSkip(unittest.TestCase):
def test_skip_complete_after(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_completed(690),
)
def create():
return xs.pipe(ops.skip(20))
results = scheduler.start(create)
assert results.messages == [on_completed(690)]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_complete_same(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_completed(690),
)
def create():
return xs.pipe(ops.skip(17))
results = scheduler.start(create)
assert results.messages == [on_completed(690)]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_complete_before(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_completed(690),
)
def create():
return xs.pipe(ops.skip(10))
results = scheduler.start(create)
assert results.messages == [
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_completed(690),
]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_Complete_zero(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_completed(690),
)
def create():
return xs.pipe(ops.skip(0))
results = scheduler.start(create)
assert results.messages == [
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_completed(690),
]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_error_after(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_error(690, ex),
)
def create():
return xs.pipe(ops.skip(20))
results = scheduler.start(create)
assert results.messages == [on_error(690, ex)]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_error_same(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_error(690, ex),
)
def create():
return xs.pipe(ops.skip(17))
results = scheduler.start(create)
assert results.messages == [on_error(690, ex)]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_error_before(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_error(690, ex),
)
def create():
return xs.pipe(ops.skip(3))
results = scheduler.start(create)
assert results.messages == [
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
on_error(690, ex),
]
assert xs.subscriptions == [subscribe(200, 690)]
def test_skip_dispose_before(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
)
def create():
return xs.pipe(ops.skip(3))
results = scheduler.start(create, disposed=250)
assert results.messages == []
assert xs.subscriptions == [subscribe(200, 250)]
def test_skip_dispose_after(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, 6),
on_next(150, 4),
on_next(210, 9),
on_next(230, 13),
on_next(270, 7),
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
on_next(410, 15),
on_next(415, 16),
on_next(460, 72),
on_next(510, 76),
on_next(560, 32),
on_next(570, -100),
on_next(580, -3),
on_next(590, 5),
on_next(630, 10),
)
def create():
return xs.pipe(ops.skip(3))
results = scheduler.start(create, disposed=400)
assert results.messages == [
on_next(280, 1),
on_next(300, -1),
on_next(310, 3),
on_next(340, 8),
on_next(370, 11),
]
assert xs.subscriptions == [subscribe(200, 400)]
if __name__ == "__main__":
unittest.main()
| 28.2
| 57
| 0.469445
| 1,270
| 10,293
| 3.57874
| 0.070866
| 0.285149
| 0.036964
| 0.026403
| 0.90055
| 0.886029
| 0.886029
| 0.886029
| 0.886029
| 0.864906
| 0
| 0.175933
| 0.404158
| 10,293
| 364
| 58
| 28.277473
| 0.565139
| 0
| 0
| 0.870769
| 0
| 0
| 0.00136
| 0
| 0
| 0
| 0
| 0
| 0.055385
| 1
| 0.055385
| false
| 0
| 0.009231
| 0.027692
| 0.095385
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bcb9144fdddbbf32bc78ac12f77acb144b544d93
| 142
|
py
|
Python
|
python/package/geo/test/__init__.py
|
fiomenankiti/playground
|
7c3139ffe5db4b18cf042b8027c9f670860371e0
|
[
"MIT"
] | null | null | null |
python/package/geo/test/__init__.py
|
fiomenankiti/playground
|
7c3139ffe5db4b18cf042b8027c9f670860371e0
|
[
"MIT"
] | null | null | null |
python/package/geo/test/__init__.py
|
fiomenankiti/playground
|
7c3139ffe5db4b18cf042b8027c9f670860371e0
|
[
"MIT"
] | null | null | null |
from geo.calc import Calc
from geo.calc import Distance
from geo.geosp import Wt
from geo.geosp import Gh
from geo.files.csv_file import check
| 28.4
| 36
| 0.823944
| 27
| 142
| 4.296296
| 0.444444
| 0.301724
| 0.189655
| 0.293103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133803
| 142
| 5
| 36
| 28.4
| 0.943089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
bcea70bd02cca28f65bc7151eb0b0e69448cc1e4
| 7,566
|
py
|
Python
|
lattedb/project/formfac/migrations/0009_auto_20200528_0907.py
|
callat-qcd/lattedb
|
75c06748f3d59332a84ec1b5794c215c5974a46f
|
[
"BSD-3-Clause"
] | 1
|
2019-12-11T02:33:23.000Z
|
2019-12-11T02:33:23.000Z
|
lattedb/project/formfac/migrations/0009_auto_20200528_0907.py
|
callat-qcd/lattedb
|
75c06748f3d59332a84ec1b5794c215c5974a46f
|
[
"BSD-3-Clause"
] | 10
|
2020-01-29T17:06:01.000Z
|
2021-05-31T14:41:19.000Z
|
lattedb/project/formfac/migrations/0009_auto_20200528_0907.py
|
callat-qcd/lattedb
|
75c06748f3d59332a84ec1b5794c215c5974a46f
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.0.6 on 2020-05-28 09:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('project_formfac', '0008_auto_20200408_0823'),
]
operations = [
migrations.AlterField(
model_name='concatenatedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='correlatormeta',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='diskconcatenatedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='diskcorrelatorh5dset',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='diskformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='diskspectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='disktslicedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='disktslicedsaveragedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='disktslicedsaveragedspectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='disktslicedspectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='formfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='spectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tapeconcatenatedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tapecorrelatorh5dset',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tapetslicedsaveragedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tapetslicedsaveragedspectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tslicedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tslicedsaveragedformfactor4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tslicedsaveragedspectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='tslicedspectrum4dfile',
name='user',
field=models.ForeignKey(blank=True, help_text='User who updated this object. Set on save by connection to database. Anonymous if not found.', null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 64.666667
| 239
| 0.690193
| 927
| 7,566
| 5.517799
| 0.085221
| 0.034409
| 0.057478
| 0.090323
| 0.842815
| 0.842815
| 0.842815
| 0.842815
| 0.842815
| 0.842815
| 0
| 0.008365
| 0.210019
| 7,566
| 116
| 240
| 65.224138
| 0.847415
| 0.005948
| 0
| 0.727273
| 1
| 0
| 0.328501
| 0.054928
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027273
| 0
| 0.054545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c573a085ee0bd360c33de2b14ef3c06c724afc8
| 2,572
|
py
|
Python
|
Platforms/Web/Processing/Api/Discord/Configs/Quotedisabledchannels/errors.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 2
|
2017-09-14T08:07:55.000Z
|
2021-05-18T05:05:05.000Z
|
Platforms/Web/Processing/Api/Discord/Configs/Quotedisabledchannels/errors.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 111
|
2018-04-15T14:32:14.000Z
|
2021-03-28T21:06:29.000Z
|
Platforms/Web/Processing/Api/Discord/Configs/Quotedisabledchannels/errors.py
|
The-CJ/Phaazebot
|
83a9563d210718071d4e2cdcca3b212c87abaf51
|
[
"MIT"
] | 1
|
2018-04-15T13:24:44.000Z
|
2018-04-15T13:24:44.000Z
|
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from Platforms.Web.main_web import PhaazebotWeb
import json
from aiohttp.web import Response
from Utils.Classes.extendedrequest import ExtendedRequest
async def apiDiscordConfigsQuoteDisabledChannelExists(cls:"PhaazebotWeb", WebRequest:ExtendedRequest, **kwargs) -> Response:
"""
Optional keywords:
------------------
* msg `str` : (Default: None) * [Overwrites default]
* channel_id `str` *
* channel_name `str` *
Default message (*gets altered by optional keywords):
----------------------------------------------------
Disabled quote channel already exists
"""
res:dict = dict(status=400, error="discord_disabled_regularchannel_exists")
channel_id:str = kwargs.get("channel_id", "")
if channel_id:
res["channel_id"] = str(channel_id)
channel_name:str = kwargs.get("channel_name", "")
if channel_name:
res["channel_name"] = str(channel_name)
# build message
default_msg:str = "Disabled quote channel already exists"
if channel_name:
default_msg += f" for '{channel_name}'"
if channel_id:
default_msg += f" (Channel ID:{channel_id})"
msg:str = kwargs.get("msg", default_msg)
res["msg"] = msg
cls.BASE.Logger.debug(f"(API/Discord) 400 Channel exists: {WebRequest.path}", require="api:400")
return cls.response(
text=json.dumps(res),
content_type="application/json",
status=400
)
async def apiDiscordConfigsQuoteDisabledChannelNotExists(cls:"PhaazebotWeb", WebRequest:ExtendedRequest, **kwargs) -> Response:
"""
Optional keywords:
------------------
* msg `str` : (Default: None) * [Overwrites default]
* channel_id `str` *
* channel_name `str` *
Default message (*gets altered by optional keywords):
----------------------------------------------------
Disabled quote channel does not exists
"""
res:dict = dict(status=400, error="discord_disabled_regularchannel_not_exists")
channel_id:str = kwargs.get("channel_id", "")
if channel_id:
res["channel_id"] = str(channel_id)
channel_name:str = kwargs.get("channel_name", "")
if channel_name:
res["channel_name"] = str(channel_name)
# build message
default_msg:str = "Disabled quote channel does not exists"
if channel_name:
default_msg += f" for '{channel_name}'"
if channel_id:
default_msg += f" (Channel ID:{channel_id})"
msg:str = kwargs.get("msg", default_msg)
res["msg"] = msg
cls.BASE.Logger.debug(f"(API/Discord) 400 Channel does not exists: {WebRequest.path}", require="api:400")
return cls.response(
text=json.dumps(res),
content_type="application/json",
status=400
)
| 28.577778
| 127
| 0.691291
| 321
| 2,572
| 5.376947
| 0.202492
| 0.093859
| 0.041715
| 0.044032
| 0.83314
| 0.825029
| 0.816918
| 0.816918
| 0.816918
| 0.816918
| 0
| 0.010724
| 0.12986
| 2,572
| 89
| 128
| 28.898876
| 0.7605
| 0.010498
| 0
| 0.666667
| 0
| 0
| 0.272214
| 0.041089
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.104167
| 0
| 0.145833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c624ee7a6d344a15a579b043c3cb6fef1c9aa3b
| 1,035
|
py
|
Python
|
polymatch/matchers/standard.py
|
linuxdaemon/poly-match
|
66d967999de982d5ee9463c46b0ff8040d91dc67
|
[
"MIT"
] | null | null | null |
polymatch/matchers/standard.py
|
linuxdaemon/poly-match
|
66d967999de982d5ee9463c46b0ff8040d91dc67
|
[
"MIT"
] | 26
|
2020-05-13T17:46:45.000Z
|
2022-03-18T16:07:14.000Z
|
polymatch/matchers/standard.py
|
TotallyNotRobots/poly-match
|
66d967999de982d5ee9463c46b0ff8040d91dc67
|
[
"MIT"
] | null | null | null |
from polymatch import PolymorphicMatcher
class ExactMatcher(PolymorphicMatcher):
def compile_pattern(self, raw_pattern):
return raw_pattern
def compile_pattern_cs(self, raw_pattern):
return raw_pattern
def compile_pattern_ci(self, raw_pattern):
return raw_pattern.lower()
def compile_pattern_cf(self, raw_pattern):
return raw_pattern.casefold()
def match_text(self, pattern, text):
return text == pattern
@classmethod
def get_type(cls):
return "exact"
class ContainsMatcher(PolymorphicMatcher):
def compile_pattern(self, raw_pattern):
return raw_pattern
def compile_pattern_cs(self, raw_pattern):
return raw_pattern
def compile_pattern_ci(self, raw_pattern):
return raw_pattern.lower()
def compile_pattern_cf(self, raw_pattern):
return raw_pattern.casefold()
def match_text(self, pattern, text):
return pattern in text
@classmethod
def get_type(cls):
return "contains"
| 23.522727
| 46
| 0.696618
| 125
| 1,035
| 5.496
| 0.216
| 0.232897
| 0.197962
| 0.232897
| 0.838428
| 0.838428
| 0.751092
| 0.751092
| 0.751092
| 0.751092
| 0
| 0
| 0.231884
| 1,035
| 43
| 47
| 24.069767
| 0.864151
| 0
| 0
| 0.758621
| 0
| 0
| 0.01256
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.413793
| false
| 0
| 0.034483
| 0.413793
| 0.931034
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 13
|
d5c64f687d9f59ed689fc14b8df6d5ee61f23931
| 23,742
|
py
|
Python
|
stix_shifter_modules/aws_athena/tests/stix_translation/test_aws_athena_json_to_stix.py
|
nkhetia31/stix-shifter
|
ace07581cb227fd35e450b2f8871475227a041d0
|
[
"Apache-2.0"
] | 33
|
2018-05-25T17:07:28.000Z
|
2019-09-30T10:08:53.000Z
|
stix_shifter_modules/aws_athena/tests/stix_translation/test_aws_athena_json_to_stix.py
|
nkhetia31/stix-shifter
|
ace07581cb227fd35e450b2f8871475227a041d0
|
[
"Apache-2.0"
] | 54
|
2018-06-01T18:17:24.000Z
|
2019-09-30T18:36:15.000Z
|
stix_shifter_modules/aws_athena/tests/stix_translation/test_aws_athena_json_to_stix.py
|
subbyte/stix-shifter
|
36d71c172a5fc5b97d872e623753b0dd1bf4fe6c
|
[
"Apache-2.0"
] | 37
|
2018-07-24T13:29:46.000Z
|
2019-09-29T19:06:27.000Z
|
from stix_shifter_utils.stix_translation.src.json_to_stix import json_to_stix_translator
from stix_shifter_utils.stix_translation.src.utils.transformer_utils import get_module_transformers
from stix_shifter_modules.aws_athena.entry_point import EntryPoint
import unittest
MODULE = "aws_athena"
entry_point = EntryPoint()
map_data = entry_point.get_results_translator().map_data
data_source = {
"type": "identity",
"id": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"name": "aws_athena",
"identity_class": "events"
}
options = {}
class TestAwsResultsToStix(unittest.TestCase):
"""
class to perform unit test case for Aws Athena logs translate results
"""
@staticmethod
def get_first(itr, constraint):
"""
return the obj in the itr if constraint is true
"""
return next(
(obj for obj in itr if constraint(obj)),
None
)
@staticmethod
def get_first_of_type(itr, typ):
"""
to check whether the object belongs to respective stix object
"""
return TestAwsResultsToStix.get_first(itr, lambda o: isinstance(o, dict) and o.get('type') == typ)
def test_common_prop(self):
"""
to test the common stix object properties
"""
data = {
"guardduty": {
"accountid": 979326520502,
"region": "us-east-1",
"type": "UnauthorizedAccess:EC2/SSHBruteForce",
"resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal",
"resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104",
"resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4",
"resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1."
"amazonaws.com",
"resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a",
"resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128",
"resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13",
"resource_instancedetails_imageid": "ami-0015fcaa5516c75ed",
"resource_instancedetails_instanceid": "i-031cb81e1f32a36e1",
"resource_instancedetails_availabilityzone": "us-east-1f",
"service_eventfirstseen": "2020-07-31T06:19:09Z",
"service_action_networkconnectionaction_protocol": "TCP",
"service_action_networkconnectionaction_remoteportdetails_port": "38420",
"service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden",
"service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94",
"service_action_networkconnectionaction_remoteipdetails_city_cityname": "\u00d6rebro",
"service_action_networkconnectionaction_localportdetails_port": "22",
"service_eventlastseen": "2020-09-12T09:19:40Z",
"severity": 2,
"title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.",
"arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding/"
"7ab9d1cb6248e05a0e419a79528761cb",
"createdat": "2020-07-31T06:37:13.745Z",
"description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1. "
"Brute force attacks are used to gain unauthorized access to your instance by "
"guessing the SSH password.",
"finding_id": "7ab9d1cb6248e05a0e419a79528761cb",
"partition": "aws",
"resource": {
"instancedetails": {
"imagedescription": "Provided by Red Hat, Inc.",
"instancestate": "running",
"instancetype": "t2.large",
"launchtime": "2020-09-11T23:16:03Z",
"tags": {
"0": {
"key": "Name",
"value": "ArcSight Logger"
}
}
},
"resourcetype": "Instance"
},
"schemaversion": 2.0,
"service": {
"action": {
"actiontype": "NETWORK_CONNECTION",
"networkconnectionaction": {
"connectiondirection": "INBOUND",
"localportdetails": {
"portname": "SSH"
},
"remoteipdetails": {
"geolocation": {
"lat": "59.2741",
"lon": "15.2066"
},
"organization": {
"asn": "2119",
"asnorg": "Telenor Norge AS",
"isp": "Telenor Sverige AB",
"org": "Telenor Sverige AB"
}
},
"remoteportdetails": {
"portname": "Unknown"
}
}
},
"count": "20",
"detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df",
"resourcerole": "TARGET",
"servicename": "guardduty"
},
"updatedat": "2020-09-12T09:25:34.086Z"
}
}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
assert result_bundle['type'] == 'bundle'
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
assert result_bundle_identity['id'] == data_source['id']
assert result_bundle_identity['name'] == data_source['name']
assert result_bundle_identity['identity_class'] == data_source['identity_class']
observed_data = result_bundle_objects[1]
assert observed_data['id'] is not None
assert observed_data['type'] == "observed-data"
assert observed_data['created_by_ref'] == result_bundle_identity['id']
assert observed_data['created'] is not None
assert observed_data['modified'] is not None
assert observed_data['number_observed'] is not None
def test_vpc_flow_network_json_to_stix(self):
"""to test network stix object properties"""
data = {
"vpcflow": {
"account": 979326520502,
"interfaceid": "eni-04b762de832716892",
"sourceaddress": "89.248.172.85",
"destinationaddress": "172.31.62.249",
"sourceport": 58387,
"destinationport": 51289,
"protocol": "tcp",
"starttime": 1592547796,
"endtime": 1592547798,
"action": "REJECT",
"date": "2020-06-19",
"logstatus": "OK",
"numbytes": 40,
"region": "us-east-1",
"version": 2
}
}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
network_obj = TestAwsResultsToStix.get_first_of_type(objects.values(), 'network-traffic')
assert network_obj is not None, 'network-traffic object type not found'
assert network_obj.keys() == {'type', 'src_ref', 'dst_ref', 'src_port', 'dst_port', 'protocols', 'start', 'end'}
assert network_obj['type'] == 'network-traffic'
assert network_obj['src_ref'] == '1'
assert network_obj['dst_ref'] == '4'
assert network_obj['src_port'] == 58387
assert network_obj['dst_port'] == 51289
assert network_obj['protocols'] == ['tcp']
assert network_obj['start'] == '2020-06-19T06:23:16.000Z'
assert network_obj['end'] == '2020-06-19T06:23:18.000Z'
def test_vpc_flow_custom_attr_json_to_stix(self):
"""to test network stix object properties"""
data = {
"vpcflow": {
"account": 979326520502,
"interfaceid": "eni-04b762de832716892",
"sourceaddress": "89.248.172.85",
"destinationaddress": "172.31.62.249",
"sourceport": 58387,
"destinationport": 51289,
"protocol": "tcp",
"starttime": 1592547796,
"endtime": 1592547798,
"action": "REJECT",
"date": "2020-06-19",
"logstatus": "OK",
"numbytes": 40,
"region": "us-east-1",
"version": 2
}
}
options = {"unmapped_fallback": True}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
custom_object = TestAwsResultsToStix.get_first_of_type(objects.values(), 'x-aws-athena')
assert custom_object.keys() == {'type', 'interfaceid', 'date', 'logstatus', 'numbytes', 'region', 'version'}
assert custom_object['date'] == '2020-06-19'
assert custom_object['logstatus'] == 'OK'
assert custom_object['numbytes'] == 40
assert custom_object['region'] == 'us-east-1'
assert custom_object['version'] == 2
def test_guardduty_network_json_to_stix(self):
"""to test network stix object properties"""
data = {
"guardduty": {
"accountid": 979326520502,
"region": "us-east-1",
"type": "UnauthorizedAccess:EC2/SSHBruteForce",
"resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal",
"resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104",
"resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4",
"resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1."
"amazonaws.com",
"resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a",
"resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128",
"resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13",
"resource_instancedetails_imageid": "ami-0015fcaa5516c75ed",
"resource_instancedetails_instanceid": "i-031cb81e1f32a36e1",
"resource_instancedetails_availabilityzone": "us-east-1f",
"service_eventfirstseen": "2020-07-31T06:19:09Z",
"service_action_networkconnectionaction_protocol": "TCP",
"service_action_networkconnectionaction_remoteportdetails_port": "38420",
"service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden",
"service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94",
"service_action_networkconnectionaction_remoteipdetails_city_cityname": "rebro",
"service_action_networkconnectionaction_localportdetails_port": "22",
"service_eventlastseen": "2020-09-12T09:19:40Z",
"severity": 2,
"title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.",
"arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding"
"/7ab9d1cb6248e05a0e419a79528761cb",
"createdat": "2020-07-31T06:37:13.745Z",
"description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1. "
"Brute force attacks are used to gain unauthorized access to your instance by "
"guessing the SSH password.",
"finding_id": "7ab9d1cb6248e05a0e419a79528761cb",
"partition": "aws",
"resource": {
"instancedetails": {
"imagedescription": "Provided by Red Hat, Inc.",
"instancestate": "running",
"instancetype": "t2.large",
"launchtime": "2020-09-11T23:16:03Z",
"tags": {
"0": {
"key": "Name",
"value": "ArcSight Logger"
}
}
},
"resourcetype": "Instance"
},
"schemaversion": 2.0,
"service": {
"action": {
"actiontype": "NETWORK_CONNECTION",
"networkconnectionaction": {
"connectiondirection": "INBOUND",
"localportdetails": {
"portname": "SSH"
},
"remoteipdetails": {
"geolocation": {
"lat": "59.2741",
"lon": "15.2066"
},
"organization": {
"asn": "2119",
"asnorg": "Telenor Norge AS",
"isp": "Telenor Sverige AB",
"org": "Telenor Sverige AB"
}
},
"remoteportdetails": {
"portname": "Unknown"
}
}
},
"count": "20",
"detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df",
"resourcerole": "TARGET",
"servicename": "guardduty"
},
"updatedat": "2020-09-12T09:25:34.086Z"
}
}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
network_obj = TestAwsResultsToStix.get_first_of_type(objects.values(), 'network-traffic')
assert network_obj is not None, 'network-traffic object type not found'
assert network_obj.keys() == {'type', 'dst_port', 'src_ref', 'dst_ref', 'src_port', 'protocols'}
assert network_obj['type'] == 'network-traffic'
assert network_obj['dst_port'] == 38420
assert network_obj['src_ref'] == '3'
assert network_obj['dst_ref'] == '9'
assert network_obj['src_port'] == 22
assert network_obj['protocols'] == ['tcp']
def test_guardduty_custom_attr_json_to_stix(self):
"""to test network stix object properties"""
data = {
"guardduty": {
"accountid": 979326520502,
"region": "us-east-1",
"type": "UnauthorizedAccess:EC2/SSHBruteForce",
"resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal",
"resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104",
"resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4",
"resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1."
"amazonaws.com",
"resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a",
"resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128",
"resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13",
"resource_instancedetails_imageid": "ami-0015fcaa5516c75ed",
"resource_instancedetails_instanceid": "i-031cb81e1f32a36e1",
"resource_instancedetails_availabilityzone": "us-east-1f",
"service_eventfirstseen": "2020-07-31T06:19:09Z",
"service_action_networkconnectionaction_protocol": "TCP",
"service_action_networkconnectionaction_remoteportdetails_port": "38420",
"service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden",
"service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94",
"service_action_networkconnectionaction_remoteipdetails_city_cityname": "rebro",
"service_action_networkconnectionaction_localportdetails_port": "22",
"service_eventlastseen": "2020-09-12T09:19:40Z",
"severity": 2,
"title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.",
"arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding/"
"7ab9d1cb6248e05a0e419a79528761cb",
"createdat": "2020-07-31T06:37:13.745Z",
"description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1."
" Brute force attacks are used to gain unauthorized access to your instance by guessing "
"the SSH password.",
"finding_id": "7ab9d1cb6248e05a0e419a79528761cb",
"partition": "aws",
"resource": {
"instancedetails": {
"imagedescription": "Provided by Red Hat, Inc.",
"instancestate": "running",
"instancetype": "t2.large",
"launchtime": "2020-09-11T23:16:03Z",
"tags": {
"0": {
"key": "Name",
"value": "ArcSight Logger"
}
}
},
"resourcetype": "Instance"
},
"schemaversion": 2.0,
"service": {
"action": {
"actiontype": "NETWORK_CONNECTION",
"networkconnectionaction": {
"connectiondirection": "INBOUND",
"localportdetails": {
"portname": "SSH"
},
"remoteipdetails": {
"geolocation": {
"lat": "59.2741",
"lon": "15.2066"
},
"organization": {
"asn": "2119",
"asnorg": "Telenor Norge AS",
"isp": "Telenor Sverige AB",
"org": "Telenor Sverige AB"
}
},
"remoteportdetails": {
"portname": "Unknown"
}
}
},
"count": "20",
"detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df",
"resourcerole": "TARGET",
"servicename": "guardduty"
},
"updatedat": "2020-09-12T09:25:34.086Z"
}
}
options = {"unmapped_fallback": True}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
custom_object = TestAwsResultsToStix.get_first_of_type(objects.values(), 'x-aws-athena')
assert custom_object.keys() == {'type', 'service_action_networkconnectionaction_remoteipdetails_country_countryname',
'finding_id', 'arn', 'createdat', 'partition', 'resource',
'schemaversion', 'service', 'updatedat'}
assert custom_object['arn'] == 'arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed' \
'494f3b7ca56acdc74df/finding/7ab9d1cb6248e05a0e419a79528761cb'
assert custom_object['finding_id'] == '7ab9d1cb6248e05a0e419a79528761cb'
assert custom_object['createdat'] == '2020-07-31T06:37:13.745Z'
assert custom_object['partition'] == 'aws'
assert custom_object['schemaversion'] == 2.0
assert custom_object['updatedat'] == '2020-09-12T09:25:34.086Z'
| 52.18022
| 126
| 0.539087
| 1,921
| 23,742
| 6.401353
| 0.160333
| 0.039034
| 0.087826
| 0.090022
| 0.855818
| 0.839229
| 0.823941
| 0.810116
| 0.810116
| 0.798325
| 0
| 0.097047
| 0.3581
| 23,742
| 454
| 127
| 52.295154
| 0.709843
| 0.015879
| 0
| 0.717073
| 0
| 0
| 0.402752
| 0.205675
| 0
| 0
| 0
| 0
| 0.121951
| 1
| 0.017073
| false
| 0.007317
| 0.009756
| 0
| 0.034146
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5c9c3dcfd93144a733bdffa2a7d7a7dc364d51d
| 2,807
|
py
|
Python
|
tests/test_html_escaping.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 43
|
2017-12-27T05:57:00.000Z
|
2022-03-18T10:07:28.000Z
|
tests/test_html_escaping.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 10
|
2018-02-07T11:20:37.000Z
|
2021-04-22T21:44:19.000Z
|
tests/test_html_escaping.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 8
|
2018-11-05T13:10:35.000Z
|
2021-08-30T18:14:02.000Z
|
"""
Test that escaping characters for HTML is disabled.
"""
import os, subprocess
def test_escape_singlequote(tmpdir):
# Define empty dictionaries
doc = {}
template = {}
# Prepare file names
doc['path'] = tmpdir.join("document.md")
template['path'] = tmpdir.join("template.yaml")
# Prepare file contents
doc['metadata'] = '''---
mustache: {mustachefile}
---
'''
doc['mfiles'] = { "mustachefile": template['path'] }
doc['text'] = 'Hello {{place}}'
template['content'] = "place: world ' universe"
# Write contents to files
with open(doc['path'].strpath, "a") as myfile:
myfile.write(doc['metadata'].format(**doc['mfiles']))
myfile.write(doc['text'])
template['path'].write(template['content'])
# Run pandoc
output = subprocess.check_output(["pandoc", doc['path'].strpath, "--filter", "pandoc-mustache", "--to=plain"], universal_newlines=True)
# Test output
assert output == "Hello world ' universe\n"
def test_escape_gt(tmpdir):
# Define empty dictionaries
doc = {}
template = {}
# Prepare file names
doc['path'] = tmpdir.join("document.md")
template['path'] = tmpdir.join("template.yaml")
# Prepare file contents
doc['metadata'] = '''---
mustache: {mustachefile}
---
'''
doc['mfiles'] = { "mustachefile": template['path'] }
doc['text'] = 'Hello {{place}}'
template['content'] = "place: world > universe"
# Write contents to files
with open(doc['path'].strpath, "a") as myfile:
myfile.write(doc['metadata'].format(**doc['mfiles']))
myfile.write(doc['text'])
template['path'].write(template['content'])
# Run pandoc
output = subprocess.check_output(["pandoc", doc['path'].strpath, "--filter", "pandoc-mustache", "--to=plain"], universal_newlines=True)
# Test output
assert output == "Hello world > universe\n"
def test_escape_ampersand(tmpdir):
# Define empty dictionaries
doc = {}
template = {}
# Prepare file names
doc['path'] = tmpdir.join("document.md")
template['path'] = tmpdir.join("template.yaml")
# Prepare file contents
doc['metadata'] = '''---
mustache: {mustachefile}
---
'''
doc['mfiles'] = { "mustachefile": template['path'] }
doc['text'] = 'Hello {{place}}'
template['content'] = "place: world & universe"
# Write contents to files
with open(doc['path'].strpath, "a") as myfile:
myfile.write(doc['metadata'].format(**doc['mfiles']))
myfile.write(doc['text'])
template['path'].write(template['content'])
# Run pandoc
output = subprocess.check_output(["pandoc", doc['path'].strpath, "--filter", "pandoc-mustache", "--to=plain"], universal_newlines=True)
# Test output
assert output == "Hello world & universe\n"
| 28.642857
| 139
| 0.617385
| 311
| 2,807
| 5.533762
| 0.199357
| 0.036607
| 0.048809
| 0.050552
| 0.944218
| 0.944218
| 0.944218
| 0.944218
| 0.944218
| 0.944218
| 0
| 0
| 0.19487
| 2,807
| 97
| 140
| 28.938144
| 0.761504
| 0.140363
| 0
| 0.818182
| 0
| 0
| 0.307789
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 1
| 0.054545
| false
| 0
| 0.018182
| 0
| 0.072727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9116473055c5bd072ad59a444dc826781f8a2c35
| 2,387
|
py
|
Python
|
tests/test_integration_partition.py
|
themoodymann/piChain
|
4de9e8da3994901371713b68bc05295fe6676571
|
[
"MIT"
] | 8
|
2018-02-22T08:52:26.000Z
|
2022-02-01T01:28:29.000Z
|
tests/test_integration_partition.py
|
themoodymann/piChain
|
4de9e8da3994901371713b68bc05295fe6676571
|
[
"MIT"
] | 3
|
2018-03-07T18:13:53.000Z
|
2019-12-03T23:42:42.000Z
|
tests/test_integration_partition.py
|
florianmorath/piChain
|
df498021cb7c2df26a7980fb85b795f4a0105faf
|
[
"MIT"
] | 7
|
2018-02-26T12:28:34.000Z
|
2021-01-01T11:33:59.000Z
|
"""Integration test: Test partition of piChain nodes.
Note: run tests with default setting values in config.py.
"""
import time
from tests.util import MultiNodeTest
class MultiNodeTestPartition(MultiNodeTest):
def test_scenario30_partition(self):
self.start_processes_with_test_scenario(30, 5)
time.sleep(8)
self.terminate_processes()
node0_blocks = self.extract_committed_blocks_single_process(0)
node1_blocks = self.extract_committed_blocks_single_process(1)
node2_blocks = self.extract_committed_blocks_single_process(2)
node3_blocks = self.extract_committed_blocks_single_process(3)
node4_blocks = self.extract_committed_blocks_single_process(4)
assert len(node0_blocks) > 0
assert node0_blocks == node1_blocks
assert node2_blocks == node1_blocks
assert node3_blocks == node1_blocks
assert node4_blocks == node1_blocks
def test_scenario31_partition(self):
self.start_processes_with_test_scenario(31, 5)
time.sleep(8)
self.terminate_processes()
node0_blocks = self.extract_committed_blocks_single_process(0)
node1_blocks = self.extract_committed_blocks_single_process(1)
node2_blocks = self.extract_committed_blocks_single_process(2)
node3_blocks = self.extract_committed_blocks_single_process(3)
node4_blocks = self.extract_committed_blocks_single_process(4)
assert len(node0_blocks) > 0
assert node0_blocks == node1_blocks
assert node2_blocks == node1_blocks
assert node3_blocks == node1_blocks
assert node4_blocks == node1_blocks
def test_scenario32_partition(self):
self.start_processes_with_test_scenario(32, 5)
time.sleep(15)
self.terminate_processes()
node0_blocks = self.extract_committed_blocks_single_process(0)
node1_blocks = self.extract_committed_blocks_single_process(1)
node2_blocks = self.extract_committed_blocks_single_process(2)
node3_blocks = self.extract_committed_blocks_single_process(3)
node4_blocks = self.extract_committed_blocks_single_process(4)
assert len(node0_blocks) > 0
assert node0_blocks == node1_blocks
assert node2_blocks == node1_blocks
assert node3_blocks == node1_blocks
assert node4_blocks == node1_blocks
| 38.5
| 70
| 0.735233
| 294
| 2,387
| 5.55102
| 0.190476
| 0.091912
| 0.15625
| 0.238971
| 0.862132
| 0.862132
| 0.862132
| 0.862132
| 0.775735
| 0.775735
| 0
| 0.041535
| 0.203184
| 2,387
| 61
| 71
| 39.131148
| 0.816509
| 0.045664
| 0
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.066667
| false
| 0
| 0.044444
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
913f16898807024d65f74b71e35760e3bc3c6dbb
| 149,429
|
py
|
Python
|
test/vanilla/version-tolerant/Expected/AcceptanceTests/UrlVersionTolerant/urlversiontolerant/operations/_operations.py
|
msyyc/autorest.python
|
91aa86f51d5c43c10ead5d51ac102618d23e3a21
|
[
"MIT"
] | null | null | null |
test/vanilla/version-tolerant/Expected/AcceptanceTests/UrlVersionTolerant/urlversiontolerant/operations/_operations.py
|
msyyc/autorest.python
|
91aa86f51d5c43c10ead5d51ac102618d23e3a21
|
[
"MIT"
] | null | null | null |
test/vanilla/version-tolerant/Expected/AcceptanceTests/UrlVersionTolerant/urlversiontolerant/operations/_operations.py
|
msyyc/autorest.python
|
91aa86f51d5c43c10ead5d51ac102618d23e3a21
|
[
"MIT"
] | 1
|
2022-03-28T08:58:03.000Z
|
2022-03-28T08:58:03.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .._vendor import _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
def build_paths_get_boolean_true_request(**kwargs: Any) -> HttpRequest:
bool_path = kwargs.pop("bool_path", True) # type: bool
accept = "application/json"
# Construct URL
url = "/paths/bool/true/{boolPath}"
path_format_arguments = {
"boolPath": _SERIALIZER.url("bool_path", bool_path, "bool"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_boolean_false_request(**kwargs: Any) -> HttpRequest:
bool_path = kwargs.pop("bool_path", False) # type: bool
accept = "application/json"
# Construct URL
url = "/paths/bool/false/{boolPath}"
path_format_arguments = {
"boolPath": _SERIALIZER.url("bool_path", bool_path, "bool"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_int_one_million_request(**kwargs: Any) -> HttpRequest:
int_path = kwargs.pop("int_path", 1000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/int/1000000/{intPath}"
path_format_arguments = {
"intPath": _SERIALIZER.url("int_path", int_path, "int"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_int_negative_one_million_request(**kwargs: Any) -> HttpRequest:
int_path = kwargs.pop("int_path", -1000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/int/-1000000/{intPath}"
path_format_arguments = {
"intPath": _SERIALIZER.url("int_path", int_path, "int"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_path = kwargs.pop("long_path", 10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/long/10000000000/{longPath}"
path_format_arguments = {
"longPath": _SERIALIZER.url("long_path", long_path, "long"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_negative_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_path = kwargs.pop("long_path", -10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/long/-10000000000/{longPath}"
path_format_arguments = {
"longPath": _SERIALIZER.url("long_path", long_path, "long"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_float_scientific_positive_request(**kwargs: Any) -> HttpRequest:
float_path = kwargs.pop("float_path", 103400000000000000000) # type: float
accept = "application/json"
# Construct URL
url = "/paths/float/1.034E+20/{floatPath}"
path_format_arguments = {
"floatPath": _SERIALIZER.url("float_path", float_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_float_scientific_negative_request(**kwargs: Any) -> HttpRequest:
float_path = kwargs.pop("float_path", -1.034e-20) # type: float
accept = "application/json"
# Construct URL
url = "/paths/float/-1.034E-20/{floatPath}"
path_format_arguments = {
"floatPath": _SERIALIZER.url("float_path", float_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_double_decimal_positive_request(**kwargs: Any) -> HttpRequest:
double_path = kwargs.pop("double_path", 9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/paths/double/9999999.999/{doublePath}"
path_format_arguments = {
"doublePath": _SERIALIZER.url("double_path", double_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_double_decimal_negative_request(**kwargs: Any) -> HttpRequest:
double_path = kwargs.pop("double_path", -9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/paths/double/-9999999.999/{doublePath}"
path_format_arguments = {
"doublePath": _SERIALIZER.url("double_path", double_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_unicode_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "啊齄丂狛狜隣郎隣兀﨩") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/unicode/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_url_encoded_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "begin!*'();:@ &=+$,/?#[]end") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_url_non_encoded_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "begin!*'();:@&=+$,end") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/begin!*'();:@&=+$,end/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str", skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_empty_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/empty/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_null_request(string_path: str, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/string/null/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_enum_valid_request(enum_path: str, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/enum/green%20color/{enumPath}"
path_format_arguments = {
"enumPath": _SERIALIZER.url("enum_path", enum_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_enum_null_request(enum_path: str, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/string/null/{enumPath}"
path_format_arguments = {
"enumPath": _SERIALIZER.url("enum_path", enum_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_byte_multi_byte_request(byte_path: bytearray, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/byte/multibyte/{bytePath}"
path_format_arguments = {
"bytePath": _SERIALIZER.url("byte_path", byte_path, "bytearray"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_byte_empty_request(**kwargs: Any) -> HttpRequest:
byte_path = kwargs.pop("byte_path", bytearray("", encoding="utf-8")) # type: bytearray
accept = "application/json"
# Construct URL
url = "/paths/byte/empty/{bytePath}"
path_format_arguments = {
"bytePath": _SERIALIZER.url("byte_path", byte_path, "bytearray"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_byte_null_request(byte_path: bytearray, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/byte/null/{bytePath}"
path_format_arguments = {
"bytePath": _SERIALIZER.url("byte_path", byte_path, "bytearray"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_valid_request(**kwargs: Any) -> HttpRequest:
date_path = kwargs.pop("date_path", "2012-01-01") # type: datetime.date
accept = "application/json"
# Construct URL
url = "/paths/date/2012-01-01/{datePath}"
path_format_arguments = {
"datePath": _SERIALIZER.url("date_path", date_path, "date"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_null_request(date_path: datetime.date, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/date/null/{datePath}"
path_format_arguments = {
"datePath": _SERIALIZER.url("date_path", date_path, "date"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_time_valid_request(**kwargs: Any) -> HttpRequest:
date_time_path = kwargs.pop("date_time_path", "2012-01-01T01:01:01Z") # type: datetime.datetime
accept = "application/json"
# Construct URL
url = "/paths/datetime/2012-01-01T01%3A01%3A01Z/{dateTimePath}"
path_format_arguments = {
"dateTimePath": _SERIALIZER.url("date_time_path", date_time_path, "iso-8601"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_time_null_request(date_time_path: datetime.datetime, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/datetime/null/{dateTimePath}"
path_format_arguments = {
"dateTimePath": _SERIALIZER.url("date_time_path", date_time_path, "iso-8601"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_base64_url_request(base64_url_path: bytes, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/string/bG9yZW0/{base64UrlPath}"
path_format_arguments = {
"base64UrlPath": _SERIALIZER.url("base64_url_path", base64_url_path, "base64"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_array_csv_in_path_request(array_path: List[str], **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = (
"/paths/array/ArrayPath1%2cbegin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend%2c%2c/{arrayPath}"
)
path_format_arguments = {
"arrayPath": _SERIALIZER.url("array_path", array_path, "[str]", div=","),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_unix_time_url_request(unix_time_url_path: datetime.datetime, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/int/1460505600/{unixTimeUrlPath}"
path_format_arguments = {
"unixTimeUrlPath": _SERIALIZER.url("unix_time_url_path", unix_time_url_path, "unix-time"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_queries_get_boolean_true_request(**kwargs: Any) -> HttpRequest:
bool_query = kwargs.pop("bool_query", True) # type: bool
accept = "application/json"
# Construct URL
url = "/queries/bool/true"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["boolQuery"] = _SERIALIZER.query("bool_query", bool_query, "bool")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_boolean_false_request(**kwargs: Any) -> HttpRequest:
bool_query = kwargs.pop("bool_query", False) # type: bool
accept = "application/json"
# Construct URL
url = "/queries/bool/false"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["boolQuery"] = _SERIALIZER.query("bool_query", bool_query, "bool")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_boolean_null_request(*, bool_query: Optional[bool] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/bool/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if bool_query is not None:
query_parameters["boolQuery"] = _SERIALIZER.query("bool_query", bool_query, "bool")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_int_one_million_request(**kwargs: Any) -> HttpRequest:
int_query = kwargs.pop("int_query", 1000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/int/1000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["intQuery"] = _SERIALIZER.query("int_query", int_query, "int")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_int_negative_one_million_request(**kwargs: Any) -> HttpRequest:
int_query = kwargs.pop("int_query", -1000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/int/-1000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["intQuery"] = _SERIALIZER.query("int_query", int_query, "int")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_int_null_request(*, int_query: Optional[int] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/int/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if int_query is not None:
query_parameters["intQuery"] = _SERIALIZER.query("int_query", int_query, "int")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_query = kwargs.pop("long_query", 10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/long/10000000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["longQuery"] = _SERIALIZER.query("long_query", long_query, "long")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_negative_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_query = kwargs.pop("long_query", -10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/long/-10000000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["longQuery"] = _SERIALIZER.query("long_query", long_query, "long")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_long_null_request(*, long_query: Optional[int] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/long/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if long_query is not None:
query_parameters["longQuery"] = _SERIALIZER.query("long_query", long_query, "long")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_float_scientific_positive_request(**kwargs: Any) -> HttpRequest:
float_query = kwargs.pop("float_query", 103400000000000000000) # type: float
accept = "application/json"
# Construct URL
url = "/queries/float/1.034E+20"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["floatQuery"] = _SERIALIZER.query("float_query", float_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_float_scientific_negative_request(**kwargs: Any) -> HttpRequest:
float_query = kwargs.pop("float_query", -1.034e-20) # type: float
accept = "application/json"
# Construct URL
url = "/queries/float/-1.034E-20"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["floatQuery"] = _SERIALIZER.query("float_query", float_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_float_null_request(*, float_query: Optional[float] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/float/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if float_query is not None:
query_parameters["floatQuery"] = _SERIALIZER.query("float_query", float_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_double_decimal_positive_request(**kwargs: Any) -> HttpRequest:
double_query = kwargs.pop("double_query", 9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/queries/double/9999999.999"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["doubleQuery"] = _SERIALIZER.query("double_query", double_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_double_decimal_negative_request(**kwargs: Any) -> HttpRequest:
double_query = kwargs.pop("double_query", -9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/queries/double/-9999999.999"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["doubleQuery"] = _SERIALIZER.query("double_query", double_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_double_null_request(*, double_query: Optional[float] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/double/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if double_query is not None:
query_parameters["doubleQuery"] = _SERIALIZER.query("double_query", double_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_unicode_request(**kwargs: Any) -> HttpRequest:
string_query = kwargs.pop("string_query", "啊齄丂狛狜隣郎隣兀﨩") # type: str
accept = "application/json"
# Construct URL
url = "/queries/string/unicode/"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_url_encoded_request(**kwargs: Any) -> HttpRequest:
string_query = kwargs.pop("string_query", "begin!*'();:@ &=+$,/?#[]end") # type: str
accept = "application/json"
# Construct URL
url = "/queries/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_empty_request(**kwargs: Any) -> HttpRequest:
string_query = kwargs.pop("string_query", "") # type: str
accept = "application/json"
# Construct URL
url = "/queries/string/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_null_request(*, string_query: Optional[str] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/string/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if string_query is not None:
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_enum_valid_request(*, enum_query: Optional[str] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/enum/green%20color"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if enum_query is not None:
query_parameters["enumQuery"] = _SERIALIZER.query("enum_query", enum_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_enum_null_request(*, enum_query: Optional[str] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/enum/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if enum_query is not None:
query_parameters["enumQuery"] = _SERIALIZER.query("enum_query", enum_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_byte_multi_byte_request(*, byte_query: Optional[bytearray] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/byte/multibyte"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if byte_query is not None:
query_parameters["byteQuery"] = _SERIALIZER.query("byte_query", byte_query, "bytearray")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_byte_empty_request(**kwargs: Any) -> HttpRequest:
byte_query = kwargs.pop("byte_query", bytearray("", encoding="utf-8")) # type: bytearray
accept = "application/json"
# Construct URL
url = "/queries/byte/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["byteQuery"] = _SERIALIZER.query("byte_query", byte_query, "bytearray")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_byte_null_request(*, byte_query: Optional[bytearray] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/byte/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if byte_query is not None:
query_parameters["byteQuery"] = _SERIALIZER.query("byte_query", byte_query, "bytearray")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_valid_request(**kwargs: Any) -> HttpRequest:
date_query = kwargs.pop("date_query", "2012-01-01") # type: datetime.date
accept = "application/json"
# Construct URL
url = "/queries/date/2012-01-01"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["dateQuery"] = _SERIALIZER.query("date_query", date_query, "date")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_null_request(*, date_query: Optional[datetime.date] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/date/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if date_query is not None:
query_parameters["dateQuery"] = _SERIALIZER.query("date_query", date_query, "date")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_time_valid_request(**kwargs: Any) -> HttpRequest:
date_time_query = kwargs.pop("date_time_query", "2012-01-01T01:01:01Z") # type: datetime.datetime
accept = "application/json"
# Construct URL
url = "/queries/datetime/2012-01-01T01%3A01%3A01Z"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["dateTimeQuery"] = _SERIALIZER.query("date_time_query", date_time_query, "iso-8601")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_time_null_request(
*, date_time_query: Optional[datetime.datetime] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/datetime/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if date_time_query is not None:
query_parameters["dateTimeQuery"] = _SERIALIZER.query("date_time_query", date_time_query, "iso-8601")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_csv_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/csv/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_csv_null_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/csv/string/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_csv_empty_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/csv/string/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_no_collection_format_empty_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/none/string/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_ssv_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/ssv/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=" ")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_tsv_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/tsv/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=" ")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_pipes_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/pipes/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div="|")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_all_with_values_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/pathItemStringQuery/localStringQuery"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_global_query_null_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/localStringQuery"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_global_and_local_query_null_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/null"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_local_path_item_query_null_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/null/null"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
class PathsOperations(object):
"""PathsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_boolean_true(self, **kwargs: Any) -> None:
"""Get true Boolean value on path.
:keyword bool_path: true boolean value. The default value is True. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_path: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_path = kwargs.pop("bool_path", True) # type: bool
request = build_paths_get_boolean_true_request(
bool_path=bool_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_true.metadata = {"url": "/paths/bool/true/{boolPath}"} # type: ignore
@distributed_trace
def get_boolean_false(self, **kwargs: Any) -> None:
"""Get false Boolean value on path.
:keyword bool_path: false boolean value. The default value is False. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_path: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_path = kwargs.pop("bool_path", False) # type: bool
request = build_paths_get_boolean_false_request(
bool_path=bool_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_false.metadata = {"url": "/paths/bool/false/{boolPath}"} # type: ignore
@distributed_trace
def get_int_one_million(self, **kwargs: Any) -> None:
"""Get '1000000' integer value.
:keyword int_path: '1000000' integer value. The default value is 1000000. Note that overriding
this default value may result in unsupported behavior.
:paramtype int_path: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_path = kwargs.pop("int_path", 1000000) # type: int
request = build_paths_get_int_one_million_request(
int_path=int_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_one_million.metadata = {"url": "/paths/int/1000000/{intPath}"} # type: ignore
@distributed_trace
def get_int_negative_one_million(self, **kwargs: Any) -> None:
"""Get '-1000000' integer value.
:keyword int_path: '-1000000' integer value. The default value is -1000000. Note that
overriding this default value may result in unsupported behavior.
:paramtype int_path: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_path = kwargs.pop("int_path", -1000000) # type: int
request = build_paths_get_int_negative_one_million_request(
int_path=int_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_negative_one_million.metadata = {"url": "/paths/int/-1000000/{intPath}"} # type: ignore
@distributed_trace
def get_ten_billion(self, **kwargs: Any) -> None:
"""Get '10000000000' 64 bit integer value.
:keyword long_path: '10000000000' 64 bit integer value. The default value is 10000000000. Note
that overriding this default value may result in unsupported behavior.
:paramtype long_path: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_path = kwargs.pop("long_path", 10000000000) # type: int
request = build_paths_get_ten_billion_request(
long_path=long_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_ten_billion.metadata = {"url": "/paths/long/10000000000/{longPath}"} # type: ignore
@distributed_trace
def get_negative_ten_billion(self, **kwargs: Any) -> None:
"""Get '-10000000000' 64 bit integer value.
:keyword long_path: '-10000000000' 64 bit integer value. The default value is -10000000000.
Note that overriding this default value may result in unsupported behavior.
:paramtype long_path: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_path = kwargs.pop("long_path", -10000000000) # type: int
request = build_paths_get_negative_ten_billion_request(
long_path=long_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_negative_ten_billion.metadata = {"url": "/paths/long/-10000000000/{longPath}"} # type: ignore
@distributed_trace
def float_scientific_positive(self, **kwargs: Any) -> None:
"""Get '1.034E+20' numeric value.
:keyword float_path: '1.034E+20'numeric value. The default value is 103400000000000000000. Note
that overriding this default value may result in unsupported behavior.
:paramtype float_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_path = kwargs.pop("float_path", 103400000000000000000) # type: float
request = build_paths_float_scientific_positive_request(
float_path=float_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_positive.metadata = {"url": "/paths/float/1.034E+20/{floatPath}"} # type: ignore
@distributed_trace
def float_scientific_negative(self, **kwargs: Any) -> None:
"""Get '-1.034E-20' numeric value.
:keyword float_path: '-1.034E-20'numeric value. The default value is -1.034e-20. Note that
overriding this default value may result in unsupported behavior.
:paramtype float_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_path = kwargs.pop("float_path", -1.034e-20) # type: float
request = build_paths_float_scientific_negative_request(
float_path=float_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_negative.metadata = {"url": "/paths/float/-1.034E-20/{floatPath}"} # type: ignore
@distributed_trace
def double_decimal_positive(self, **kwargs: Any) -> None:
"""Get '9999999.999' numeric value.
:keyword double_path: '9999999.999'numeric value. The default value is 9999999.999. Note that
overriding this default value may result in unsupported behavior.
:paramtype double_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_path = kwargs.pop("double_path", 9999999.999) # type: float
request = build_paths_double_decimal_positive_request(
double_path=double_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_positive.metadata = {"url": "/paths/double/9999999.999/{doublePath}"} # type: ignore
@distributed_trace
def double_decimal_negative(self, **kwargs: Any) -> None:
"""Get '-9999999.999' numeric value.
:keyword double_path: '-9999999.999'numeric value. The default value is -9999999.999. Note that
overriding this default value may result in unsupported behavior.
:paramtype double_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_path = kwargs.pop("double_path", -9999999.999) # type: float
request = build_paths_double_decimal_negative_request(
double_path=double_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_negative.metadata = {"url": "/paths/double/-9999999.999/{doublePath}"} # type: ignore
@distributed_trace
def string_unicode(self, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value.
:keyword string_path: '啊齄丂狛狜隣郎隣兀﨩'multi-byte string value. The default value is "啊齄丂狛狜隣郎隣兀﨩".
Note that overriding this default value may result in unsupported behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "啊齄丂狛狜隣郎隣兀﨩") # type: str
request = build_paths_string_unicode_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_unicode.metadata = {"url": "/paths/string/unicode/{stringPath}"} # type: ignore
@distributed_trace
def string_url_encoded(self, **kwargs: Any) -> None:
"""Get 'begin!*'();:@ &=+$,/?#[]end.
:keyword string_path: 'begin!*'();:@ &=+$,/?#[]end' url encoded string value. The default value
is "begin!*'();:@ &=+$,/?#[]end". Note that overriding this default value may result in
unsupported behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "begin!*'();:@ &=+$,/?#[]end") # type: str
request = build_paths_string_url_encoded_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_url_encoded.metadata = {"url": "/paths/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend/{stringPath}"} # type: ignore
@distributed_trace
def string_url_non_encoded(self, **kwargs: Any) -> None:
"""Get 'begin!*'();:@&=+$,end.
https://tools.ietf.org/html/rfc3986#appendix-A 'path' accept any 'pchar' not encoded.
:keyword string_path: 'begin!*'();:@&=+$,end' url encoded string value. The default value is
"begin!*'();:@&=+$,end". Note that overriding this default value may result in unsupported
behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "begin!*'();:@&=+$,end") # type: str
request = build_paths_string_url_non_encoded_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_url_non_encoded.metadata = {"url": "/paths/string/begin!*'();:@&=+$,end/{stringPath}"} # type: ignore
@distributed_trace
def string_empty(self, **kwargs: Any) -> None:
"""Get ''.
:keyword string_path: '' string value. The default value is "". Note that overriding this
default value may result in unsupported behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "") # type: str
request = build_paths_string_empty_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_empty.metadata = {"url": "/paths/string/empty/{stringPath}"} # type: ignore
@distributed_trace
def string_null(self, string_path: str, **kwargs: Any) -> None:
"""Get null (should throw).
:param string_path: null string value.
:type string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_string_null_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_null.metadata = {"url": "/paths/string/null/{stringPath}"} # type: ignore
@distributed_trace
def enum_valid(self, enum_path: str, **kwargs: Any) -> None:
"""Get using uri with 'green color' in path parameter.
:param enum_path: send the value green. Possible values are: "red color", "green color", and
"blue color".
:type enum_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_enum_valid_request(
enum_path=enum_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_valid.metadata = {"url": "/paths/enum/green%20color/{enumPath}"} # type: ignore
@distributed_trace
def enum_null(self, enum_path: str, **kwargs: Any) -> None:
"""Get null (should throw on the client before the request is sent on wire).
:param enum_path: send null should throw. Possible values are: "red color", "green color", and
"blue color".
:type enum_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_enum_null_request(
enum_path=enum_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_null.metadata = {"url": "/paths/string/null/{enumPath}"} # type: ignore
@distributed_trace
def byte_multi_byte(self, byte_path: bytearray, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:param byte_path: '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:type byte_path: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_byte_multi_byte_request(
byte_path=byte_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_multi_byte.metadata = {"url": "/paths/byte/multibyte/{bytePath}"} # type: ignore
@distributed_trace
def byte_empty(self, **kwargs: Any) -> None:
"""Get '' as byte array.
:keyword byte_path: '' as byte array. The default value is bytearray("", encoding="utf-8").
Note that overriding this default value may result in unsupported behavior.
:paramtype byte_path: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
byte_path = kwargs.pop("byte_path", bytearray("", encoding="utf-8")) # type: bytearray
request = build_paths_byte_empty_request(
byte_path=byte_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_empty.metadata = {"url": "/paths/byte/empty/{bytePath}"} # type: ignore
@distributed_trace
def byte_null(self, byte_path: bytearray, **kwargs: Any) -> None:
"""Get null as byte array (should throw).
:param byte_path: null as byte array (should throw).
:type byte_path: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_byte_null_request(
byte_path=byte_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_null.metadata = {"url": "/paths/byte/null/{bytePath}"} # type: ignore
@distributed_trace
def date_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01' as date.
:keyword date_path: '2012-01-01' as date. The default value is "2012-01-01". Note that
overriding this default value may result in unsupported behavior.
:paramtype date_path: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_path = kwargs.pop("date_path", "2012-01-01") # type: datetime.date
request = build_paths_date_valid_request(
date_path=date_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_valid.metadata = {"url": "/paths/date/2012-01-01/{datePath}"} # type: ignore
@distributed_trace
def date_null(self, date_path: datetime.date, **kwargs: Any) -> None:
"""Get null as date - this should throw or be unusable on the client side, depending on date
representation.
:param date_path: null as date (should throw).
:type date_path: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_date_null_request(
date_path=date_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_null.metadata = {"url": "/paths/date/null/{datePath}"} # type: ignore
@distributed_trace
def date_time_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01T01:01:01Z' as date-time.
:keyword date_time_path: '2012-01-01T01:01:01Z' as date-time. The default value is
"2012-01-01T01:01:01Z". Note that overriding this default value may result in unsupported
behavior.
:paramtype date_time_path: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_time_path = kwargs.pop("date_time_path", "2012-01-01T01:01:01Z") # type: datetime.datetime
request = build_paths_date_time_valid_request(
date_time_path=date_time_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_valid.metadata = {"url": "/paths/datetime/2012-01-01T01%3A01%3A01Z/{dateTimePath}"} # type: ignore
@distributed_trace
def date_time_null(self, date_time_path: datetime.datetime, **kwargs: Any) -> None:
"""Get null as date-time, should be disallowed or throw depending on representation of date-time.
:param date_time_path: null as date-time.
:type date_time_path: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_date_time_null_request(
date_time_path=date_time_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_null.metadata = {"url": "/paths/datetime/null/{dateTimePath}"} # type: ignore
@distributed_trace
def base64_url(self, base64_url_path: bytes, **kwargs: Any) -> None:
"""Get 'lorem' encoded value as 'bG9yZW0' (base64url).
:param base64_url_path: base64url encoded value.
:type base64_url_path: bytes
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_base64_url_request(
base64_url_path=base64_url_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
base64_url.metadata = {"url": "/paths/string/bG9yZW0/{base64UrlPath}"} # type: ignore
@distributed_trace
def array_csv_in_path(self, array_path: List[str], **kwargs: Any) -> None:
"""Get an array of string ['ArrayPath1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
csv-array format.
:param array_path: an array of string ['ArrayPath1', 'begin!*'();:@ &=+$,/?#[]end' , null, '']
using the csv-array format.
:type array_path: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_array_csv_in_path_request(
array_path=array_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_csv_in_path.metadata = {"url": "/paths/array/ArrayPath1%2cbegin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend%2c%2c/{arrayPath}"} # type: ignore
@distributed_trace
def unix_time_url(self, unix_time_url_path: datetime.datetime, **kwargs: Any) -> None:
"""Get the date 2016-04-13 encoded value as '1460505600' (Unix time).
:param unix_time_url_path: Unix time encoded value.
:type unix_time_url_path: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_unix_time_url_request(
unix_time_url_path=unix_time_url_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
unix_time_url.metadata = {"url": "/paths/int/1460505600/{unixTimeUrlPath}"} # type: ignore
class QueriesOperations(object):
"""QueriesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_boolean_true(self, **kwargs: Any) -> None:
"""Get true Boolean value on path.
:keyword bool_query: true boolean value. The default value is True. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_query: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_query = kwargs.pop("bool_query", True) # type: bool
request = build_queries_get_boolean_true_request(
bool_query=bool_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_true.metadata = {"url": "/queries/bool/true"} # type: ignore
@distributed_trace
def get_boolean_false(self, **kwargs: Any) -> None:
"""Get false Boolean value on path.
:keyword bool_query: false boolean value. The default value is False. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_query: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_query = kwargs.pop("bool_query", False) # type: bool
request = build_queries_get_boolean_false_request(
bool_query=bool_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_false.metadata = {"url": "/queries/bool/false"} # type: ignore
@distributed_trace
def get_boolean_null(self, *, bool_query: Optional[bool] = None, **kwargs: Any) -> None:
"""Get null Boolean value on query (query string should be absent).
:keyword bool_query: null boolean value.
:paramtype bool_query: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_get_boolean_null_request(
bool_query=bool_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_null.metadata = {"url": "/queries/bool/null"} # type: ignore
@distributed_trace
def get_int_one_million(self, **kwargs: Any) -> None:
"""Get '1000000' integer value.
:keyword int_query: '1000000' integer value. The default value is 1000000. Note that overriding
this default value may result in unsupported behavior.
:paramtype int_query: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_query = kwargs.pop("int_query", 1000000) # type: int
request = build_queries_get_int_one_million_request(
int_query=int_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_one_million.metadata = {"url": "/queries/int/1000000"} # type: ignore
@distributed_trace
def get_int_negative_one_million(self, **kwargs: Any) -> None:
"""Get '-1000000' integer value.
:keyword int_query: '-1000000' integer value. The default value is -1000000. Note that
overriding this default value may result in unsupported behavior.
:paramtype int_query: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_query = kwargs.pop("int_query", -1000000) # type: int
request = build_queries_get_int_negative_one_million_request(
int_query=int_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_negative_one_million.metadata = {"url": "/queries/int/-1000000"} # type: ignore
@distributed_trace
def get_int_null(self, *, int_query: Optional[int] = None, **kwargs: Any) -> None:
"""Get null integer value (no query parameter).
:keyword int_query: null integer value.
:paramtype int_query: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_get_int_null_request(
int_query=int_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_null.metadata = {"url": "/queries/int/null"} # type: ignore
@distributed_trace
def get_ten_billion(self, **kwargs: Any) -> None:
"""Get '10000000000' 64 bit integer value.
:keyword long_query: '10000000000' 64 bit integer value. The default value is 10000000000. Note
that overriding this default value may result in unsupported behavior.
:paramtype long_query: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_query = kwargs.pop("long_query", 10000000000) # type: int
request = build_queries_get_ten_billion_request(
long_query=long_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_ten_billion.metadata = {"url": "/queries/long/10000000000"} # type: ignore
@distributed_trace
def get_negative_ten_billion(self, **kwargs: Any) -> None:
"""Get '-10000000000' 64 bit integer value.
:keyword long_query: '-10000000000' 64 bit integer value. The default value is -10000000000.
Note that overriding this default value may result in unsupported behavior.
:paramtype long_query: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_query = kwargs.pop("long_query", -10000000000) # type: int
request = build_queries_get_negative_ten_billion_request(
long_query=long_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_negative_ten_billion.metadata = {"url": "/queries/long/-10000000000"} # type: ignore
@distributed_trace
def get_long_null(self, *, long_query: Optional[int] = None, **kwargs: Any) -> None:
"""Get 'null 64 bit integer value (no query param in uri).
:keyword long_query: null 64 bit integer value.
:paramtype long_query: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_get_long_null_request(
long_query=long_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_long_null.metadata = {"url": "/queries/long/null"} # type: ignore
@distributed_trace
def float_scientific_positive(self, **kwargs: Any) -> None:
"""Get '1.034E+20' numeric value.
:keyword float_query: '1.034E+20'numeric value. The default value is 103400000000000000000.
Note that overriding this default value may result in unsupported behavior.
:paramtype float_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_query = kwargs.pop("float_query", 103400000000000000000) # type: float
request = build_queries_float_scientific_positive_request(
float_query=float_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_positive.metadata = {"url": "/queries/float/1.034E+20"} # type: ignore
@distributed_trace
def float_scientific_negative(self, **kwargs: Any) -> None:
"""Get '-1.034E-20' numeric value.
:keyword float_query: '-1.034E-20'numeric value. The default value is -1.034e-20. Note that
overriding this default value may result in unsupported behavior.
:paramtype float_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_query = kwargs.pop("float_query", -1.034e-20) # type: float
request = build_queries_float_scientific_negative_request(
float_query=float_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_negative.metadata = {"url": "/queries/float/-1.034E-20"} # type: ignore
@distributed_trace
def float_null(self, *, float_query: Optional[float] = None, **kwargs: Any) -> None:
"""Get null numeric value (no query parameter).
:keyword float_query: null numeric value.
:paramtype float_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_float_null_request(
float_query=float_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_null.metadata = {"url": "/queries/float/null"} # type: ignore
@distributed_trace
def double_decimal_positive(self, **kwargs: Any) -> None:
"""Get '9999999.999' numeric value.
:keyword double_query: '9999999.999'numeric value. The default value is 9999999.999. Note that
overriding this default value may result in unsupported behavior.
:paramtype double_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_query = kwargs.pop("double_query", 9999999.999) # type: float
request = build_queries_double_decimal_positive_request(
double_query=double_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_positive.metadata = {"url": "/queries/double/9999999.999"} # type: ignore
@distributed_trace
def double_decimal_negative(self, **kwargs: Any) -> None:
"""Get '-9999999.999' numeric value.
:keyword double_query: '-9999999.999'numeric value. The default value is -9999999.999. Note
that overriding this default value may result in unsupported behavior.
:paramtype double_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_query = kwargs.pop("double_query", -9999999.999) # type: float
request = build_queries_double_decimal_negative_request(
double_query=double_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_negative.metadata = {"url": "/queries/double/-9999999.999"} # type: ignore
@distributed_trace
def double_null(self, *, double_query: Optional[float] = None, **kwargs: Any) -> None:
"""Get null numeric value (no query parameter).
:keyword double_query: null numeric value.
:paramtype double_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_double_null_request(
double_query=double_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_null.metadata = {"url": "/queries/double/null"} # type: ignore
@distributed_trace
def string_unicode(self, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value.
:keyword string_query: '啊齄丂狛狜隣郎隣兀﨩'multi-byte string value. The default value is "啊齄丂狛狜隣郎隣兀﨩".
Note that overriding this default value may result in unsupported behavior.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_query = kwargs.pop("string_query", "啊齄丂狛狜隣郎隣兀﨩") # type: str
request = build_queries_string_unicode_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_unicode.metadata = {"url": "/queries/string/unicode/"} # type: ignore
@distributed_trace
def string_url_encoded(self, **kwargs: Any) -> None:
"""Get 'begin!*'();:@ &=+$,/?#[]end.
:keyword string_query: 'begin!*'();:@ &=+$,/?#[]end' url encoded string value. The default
value is "begin!*'();:@ &=+$,/?#[]end". Note that overriding this default value may result in
unsupported behavior.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_query = kwargs.pop("string_query", "begin!*'();:@ &=+$,/?#[]end") # type: str
request = build_queries_string_url_encoded_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_url_encoded.metadata = {"url": "/queries/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend"} # type: ignore
@distributed_trace
def string_empty(self, **kwargs: Any) -> None:
"""Get ''.
:keyword string_query: '' string value. The default value is "". Note that overriding this
default value may result in unsupported behavior.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_query = kwargs.pop("string_query", "") # type: str
request = build_queries_string_empty_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_empty.metadata = {"url": "/queries/string/empty"} # type: ignore
@distributed_trace
def string_null(self, *, string_query: Optional[str] = None, **kwargs: Any) -> None:
"""Get null (no query parameter in url).
:keyword string_query: null string value.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_string_null_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_null.metadata = {"url": "/queries/string/null"} # type: ignore
@distributed_trace
def enum_valid(self, *, enum_query: Optional[str] = None, **kwargs: Any) -> None:
"""Get using uri with query parameter 'green color'.
:keyword enum_query: 'green color' enum value. Possible values are: "red color", "green color",
and "blue color".
:paramtype enum_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_enum_valid_request(
enum_query=enum_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_valid.metadata = {"url": "/queries/enum/green%20color"} # type: ignore
@distributed_trace
def enum_null(self, *, enum_query: Optional[str] = None, **kwargs: Any) -> None:
"""Get null (no query parameter in url).
:keyword enum_query: null string value. Possible values are: "red color", "green color", and
"blue color".
:paramtype enum_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_enum_null_request(
enum_query=enum_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_null.metadata = {"url": "/queries/enum/null"} # type: ignore
@distributed_trace
def byte_multi_byte(self, *, byte_query: Optional[bytearray] = None, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:keyword byte_query: '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:paramtype byte_query: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_byte_multi_byte_request(
byte_query=byte_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_multi_byte.metadata = {"url": "/queries/byte/multibyte"} # type: ignore
@distributed_trace
def byte_empty(self, **kwargs: Any) -> None:
"""Get '' as byte array.
:keyword byte_query: '' as byte array. The default value is bytearray("", encoding="utf-8").
Note that overriding this default value may result in unsupported behavior.
:paramtype byte_query: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
byte_query = kwargs.pop("byte_query", bytearray("", encoding="utf-8")) # type: bytearray
request = build_queries_byte_empty_request(
byte_query=byte_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_empty.metadata = {"url": "/queries/byte/empty"} # type: ignore
@distributed_trace
def byte_null(self, *, byte_query: Optional[bytearray] = None, **kwargs: Any) -> None:
"""Get null as byte array (no query parameters in uri).
:keyword byte_query: null as byte array (no query parameters in uri).
:paramtype byte_query: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_byte_null_request(
byte_query=byte_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_null.metadata = {"url": "/queries/byte/null"} # type: ignore
@distributed_trace
def date_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01' as date.
:keyword date_query: '2012-01-01' as date. The default value is "2012-01-01". Note that
overriding this default value may result in unsupported behavior.
:paramtype date_query: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_query = kwargs.pop("date_query", "2012-01-01") # type: datetime.date
request = build_queries_date_valid_request(
date_query=date_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_valid.metadata = {"url": "/queries/date/2012-01-01"} # type: ignore
@distributed_trace
def date_null(self, *, date_query: Optional[datetime.date] = None, **kwargs: Any) -> None:
"""Get null as date - this should result in no query parameters in uri.
:keyword date_query: null as date (no query parameters in uri).
:paramtype date_query: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_date_null_request(
date_query=date_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_null.metadata = {"url": "/queries/date/null"} # type: ignore
@distributed_trace
def date_time_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01T01:01:01Z' as date-time.
:keyword date_time_query: '2012-01-01T01:01:01Z' as date-time. The default value is
"2012-01-01T01:01:01Z". Note that overriding this default value may result in unsupported
behavior.
:paramtype date_time_query: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_time_query = kwargs.pop("date_time_query", "2012-01-01T01:01:01Z") # type: datetime.datetime
request = build_queries_date_time_valid_request(
date_time_query=date_time_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_valid.metadata = {"url": "/queries/datetime/2012-01-01T01%3A01%3A01Z"} # type: ignore
@distributed_trace
def date_time_null(self, *, date_time_query: Optional[datetime.datetime] = None, **kwargs: Any) -> None:
"""Get null as date-time, should result in no query parameters in uri.
:keyword date_time_query: null as date-time (no query parameters).
:paramtype date_time_query: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_date_time_null_request(
date_time_query=date_time_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_null.metadata = {"url": "/queries/datetime/null"} # type: ignore
@distributed_trace
def array_string_csv_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
csv-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the csv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_csv_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_csv_valid.metadata = {"url": "/queries/array/csv/string/valid"} # type: ignore
@distributed_trace
def array_string_csv_null(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get a null array of string using the csv-array format.
:keyword array_query: a null array of string using the csv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_csv_null_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_csv_null.metadata = {"url": "/queries/array/csv/string/null"} # type: ignore
@distributed_trace
def array_string_csv_empty(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an empty array [] of string using the csv-array format.
:keyword array_query: an empty array [] of string using the csv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_csv_empty_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_csv_empty.metadata = {"url": "/queries/array/csv/string/empty"} # type: ignore
@distributed_trace
def array_string_no_collection_format_empty(
self, *, array_query: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""Array query has no defined collection format, should default to csv. Pass in ['hello', 'nihao',
'bonjour'] for the 'arrayQuery' parameter to the service.
:keyword array_query: Array-typed query parameter. Pass in ['hello', 'nihao', 'bonjour'].
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_no_collection_format_empty_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_no_collection_format_empty.metadata = {"url": "/queries/array/none/string/empty"} # type: ignore
@distributed_trace
def array_string_ssv_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
ssv-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the ssv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_ssv_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_ssv_valid.metadata = {"url": "/queries/array/ssv/string/valid"} # type: ignore
@distributed_trace
def array_string_tsv_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
tsv-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the tsv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_tsv_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_tsv_valid.metadata = {"url": "/queries/array/tsv/string/valid"} # type: ignore
@distributed_trace
def array_string_pipes_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
pipes-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the pipes-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_pipes_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_pipes_valid.metadata = {"url": "/queries/array/pipes/string/valid"} # type: ignore
class PathItemsOperations(object):
"""PathItemsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_all_with_values(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery='globalStringQuery',
pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: A string value 'pathItemStringQuery' that appears as a query
parameter.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain value 'localStringQuery'.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_all_with_values_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_all_with_values.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/pathItemStringQuery/localStringQuery"} # type: ignore
@distributed_trace
def get_global_query_null(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery=null,
pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: A string value 'pathItemStringQuery' that appears as a query
parameter.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain value 'localStringQuery'.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_global_query_null_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_global_query_null.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/localStringQuery"} # type: ignore
@distributed_trace
def get_global_and_local_query_null(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery=null,
pathItemStringQuery='pathItemStringQuery', localStringQuery=null.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: A string value 'pathItemStringQuery' that appears as a query
parameter.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain null value.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_global_and_local_query_null_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_global_and_local_query_null.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/null"} # type: ignore
@distributed_trace
def get_local_path_item_query_null(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery='globalStringQuery',
pathItemStringQuery=null, localStringQuery=null.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: should contain value null.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain value null.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_local_path_item_query_null_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_local_path_item_query_null.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/null/null"} # type: ignore
| 41.233168
| 245
| 0.675491
| 17,114
| 149,429
| 5.667407
| 0.019808
| 0.027219
| 0.031302
| 0.039467
| 0.971132
| 0.963956
| 0.952532
| 0.942201
| 0.933479
| 0.913684
| 0
| 0.020869
| 0.206332
| 149,429
| 3,623
| 246
| 41.244549
| 0.796959
| 0.20237
| 0
| 0.777554
| 0
| 0.007046
| 0.111029
| 0.043456
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067942
| false
| 0
| 0.005536
| 0
| 0.141419
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
914f974905119aa6df33b733c4b0cd0e4954c272
| 15,147
|
py
|
Python
|
heliosburn/django/hbproject/webui/models.py
|
thecodeteam/heliosburn
|
513f6335c9788948d82e5c9285d7869f3ff4cc10
|
[
"MIT"
] | null | null | null |
heliosburn/django/hbproject/webui/models.py
|
thecodeteam/heliosburn
|
513f6335c9788948d82e5c9285d7869f3ff4cc10
|
[
"MIT"
] | null | null | null |
heliosburn/django/hbproject/webui/models.py
|
thecodeteam/heliosburn
|
513f6335c9788948d82e5c9285d7869f3ff4cc10
|
[
"MIT"
] | 1
|
2020-09-17T18:19:05.000Z
|
2020-09-17T18:19:05.000Z
|
import json
import re
from django.conf import settings
import requests
from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, \
UnexpectedException, LocationHeaderNotFoundException, NotFoundException
def validate_response(response):
if 200 <= response.status_code < 300:
return True
return False
def status_code_to_exception(status_code):
if status_code == 400:
return BadRequestException()
if status_code == 401:
return UnauthorizedException()
if status_code == 404:
return NotFoundException()
if status_code >= 500:
return ServerErrorException()
if 300 <= status_code < 400:
return RedirectException()
return UnexpectedException()
def get_resource_id_or_raise_exception(resource_name, response):
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
location = response.headers.get('location')
pattern = '.+{}\/(?P<id>\w+)'.format(resource_name)
p = re.compile(pattern)
m = p.match(location)
try:
resource_id = m.group('id')
return resource_id
except:
return UnexpectedException('Could not get the resource ID from the response.')
class Base(object):
def __init__(self, auth_token=None):
self.auth_token = auth_token
def get_url(self, extra=''):
return '{base_url}{endpoint}{extra}'.format(base_url=settings.API_BASE_URL,
endpoint=object.__getattribute__(self, '__endpoint__'),
extra=extra)
class Session(Base):
__endpoint__ = '/session/'
__resourcename__ = 'session'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
session = json.loads(response.text)
return session
def start(self, resource_id):
url = self.get_url(extra='{}/{}/'.format(resource_id, 'start'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def stop(self, resource_id):
url = self.get_url(extra='{}/{}/'.format(resource_id, 'stop'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class TestPlan(Base):
__endpoint__ = '/testplan/'
__resourcename__ = 'testplan'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
testplan = json.loads(response.text)
return testplan
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
testplans = json.loads(response.text)
return testplans
def delete(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.delete(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class Rule(Base):
__endpoint__ = '/testplan/{testplan_id}/rule/'
__resourcename__ = 'rule'
def __init__(self, testplan_id, auth_token=None):
self.auth_token = auth_token
self.__endpoint__ = self.__endpoint__.format(testplan_id=testplan_id)
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
rule = json.loads(response.text)
return rule
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
resource = json.loads(response.text)
return resource
class Recording(Base):
__endpoint__ = '/recording/'
__resourcename__ = 'recording'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
recording = json.loads(response.text)
return recording
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
recordings = json.loads(response.text)
return recordings
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def start(self, resource_id):
url = self.get_url(extra='{}/{}'.format(resource_id, 'start'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def stop(self, resource_id):
url = self.get_url(extra='{}/{}'.format(resource_id, 'stop'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class QoS(Base):
__endpoint__ = '/qos/'
__resourcename__ = 'qos'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
qos = json.loads(response.text)
return qos
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
qos = json.loads(response.text)
return qos
def delete(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.delete(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class ServerOverload(Base):
__endpoint__ = '/serveroverload/'
__resourcename__ = 'serveroverload'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
profile = json.loads(response.text)
return profile
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
profile = json.loads(response.text)
return profile
def delete(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.delete(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class Logs(Base):
__endpoint__ = '/log/'
__resourcename__ = 'log'
def stats(self):
url = self.get_url(extra='stats')
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
stats = json.loads(response.text)
return stats
def get(self, start, length, component, levels, date_from, date_to, msg):
url = self.get_url(
extra='?start={}&limit={}&component={}&levels={}&from={}&to={}&msg={}'.format(start, length, component,
levels,
date_from, date_to, msg))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
logs = json.loads(response.text)
return logs
| 39.139535
| 115
| 0.642239
| 1,729
| 15,147
| 5.384615
| 0.058994
| 0.065736
| 0.047476
| 0.043287
| 0.834479
| 0.804082
| 0.800967
| 0.800967
| 0.793448
| 0.784211
| 0
| 0.002136
| 0.258335
| 15,147
| 387
| 116
| 39.139535
| 0.826598
| 0
| 0
| 0.731928
| 0
| 0
| 0.047927
| 0.00779
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111446
| false
| 0
| 0.01506
| 0.003012
| 0.283133
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6bdcee8c086f35e2a59b7fc819faaf2312d18c6
| 89,316
|
py
|
Python
|
sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py
|
adewaleo/azure-sdk-for-python
|
169457edbea5e3c5557246cfcf8bd635d528bae4
|
[
"MIT"
] | 2
|
2019-08-23T21:14:00.000Z
|
2021-09-07T18:32:34.000Z
|
sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py
|
adewaleo/azure-sdk-for-python
|
169457edbea5e3c5557246cfcf8bd635d528bae4
|
[
"MIT"
] | 2
|
2021-11-03T06:10:36.000Z
|
2021-12-01T06:29:39.000Z
|
sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_gremlin_resources_operations.py
|
adewaleo/azure-sdk-for-python
|
169457edbea5e3c5557246cfcf8bd635d528bae4
|
[
"MIT"
] | 1
|
2021-05-19T02:55:10.000Z
|
2021-05-19T02:55:10.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class GremlinResourcesOperations(object):
"""GremlinResourcesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.cosmosdb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_gremlin_databases(
self,
resource_group_name, # type: str
account_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.GremlinDatabaseListResult"]
"""Lists the Gremlin databases under an existing Azure Cosmos DB database account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GremlinDatabaseListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cosmosdb.models.GremlinDatabaseListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.GremlinDatabaseListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_gremlin_databases.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('GremlinDatabaseListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_gremlin_databases.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases'} # type: ignore
def get_gremlin_database(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.GremlinDatabaseGetResults"
"""Gets the Gremlin databases under an existing Azure Cosmos DB database account with the provided
name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GremlinDatabaseGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.GremlinDatabaseGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.GremlinDatabaseGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get_gremlin_database.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GremlinDatabaseGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_gremlin_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}'} # type: ignore
def _create_update_gremlin_database_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
create_update_gremlin_database_parameters, # type: "models.GremlinDatabaseCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["models.GremlinDatabaseGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.GremlinDatabaseGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_update_gremlin_database_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(create_update_gremlin_database_parameters, 'GremlinDatabaseCreateUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GremlinDatabaseGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_update_gremlin_database_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}'} # type: ignore
def begin_create_update_gremlin_database(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
create_update_gremlin_database_parameters, # type: "models.GremlinDatabaseCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.GremlinDatabaseGetResults"]
"""Create or update an Azure Cosmos DB Gremlin database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param create_update_gremlin_database_parameters: The parameters to provide for the current
Gremlin database.
:type create_update_gremlin_database_parameters: ~azure.mgmt.cosmosdb.models.GremlinDatabaseCreateUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GremlinDatabaseGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.GremlinDatabaseGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.GremlinDatabaseGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_update_gremlin_database_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
create_update_gremlin_database_parameters=create_update_gremlin_database_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GremlinDatabaseGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_update_gremlin_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}'} # type: ignore
def _delete_gremlin_database_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
# Construct URL
url = self._delete_gremlin_database_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_gremlin_database_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}'} # type: ignore
def begin_delete_gremlin_database(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes an existing Azure Cosmos DB Gremlin database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_gremlin_database_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_gremlin_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}'} # type: ignore
def get_gremlin_database_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ThroughputSettingsGetResults"
"""Gets the RUs per second of the Gremlin database under an existing Azure Cosmos DB database
account with the provided name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ThroughputSettingsGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get_gremlin_database_throughput.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_gremlin_database_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default'} # type: ignore
def _update_gremlin_database_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
update_throughput_parameters, # type: "models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_gremlin_database_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(update_throughput_parameters, 'ThroughputSettingsUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_gremlin_database_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default'} # type: ignore
def begin_update_gremlin_database_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
update_throughput_parameters, # type: "models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.ThroughputSettingsGetResults"]
"""Update RUs per second of an Azure Cosmos DB Gremlin database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param update_throughput_parameters: The RUs per second of the parameters to provide for the
current Gremlin database.
:type update_throughput_parameters: ~azure.mgmt.cosmosdb.models.ThroughputSettingsUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_gremlin_database_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
update_throughput_parameters=update_throughput_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_gremlin_database_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default'} # type: ignore
def _migrate_gremlin_database_to_autoscale_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._migrate_gremlin_database_to_autoscale_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_gremlin_database_to_autoscale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def begin_migrate_gremlin_database_to_autoscale(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB Gremlin database from manual throughput to autoscale.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_gremlin_database_to_autoscale_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_gremlin_database_to_autoscale.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def _migrate_gremlin_database_to_manual_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._migrate_gremlin_database_to_manual_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_gremlin_database_to_manual_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
def begin_migrate_gremlin_database_to_manual_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB Gremlin database from autoscale to manual throughput.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_gremlin_database_to_manual_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_gremlin_database_to_manual_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
def list_gremlin_graphs(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.GremlinGraphListResult"]
"""Lists the Gremlin graph under an existing Azure Cosmos DB database account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GremlinGraphListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cosmosdb.models.GremlinGraphListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.GremlinGraphListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_gremlin_graphs.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('GremlinGraphListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_gremlin_graphs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs'} # type: ignore
def get_gremlin_graph(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.GremlinGraphGetResults"
"""Gets the Gremlin graph under an existing Azure Cosmos DB database account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GremlinGraphGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.GremlinGraphGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.GremlinGraphGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get_gremlin_graph.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GremlinGraphGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_gremlin_graph.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}'} # type: ignore
def _create_update_gremlin_graph_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
create_update_gremlin_graph_parameters, # type: "models.GremlinGraphCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["models.GremlinGraphGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.GremlinGraphGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_update_gremlin_graph_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(create_update_gremlin_graph_parameters, 'GremlinGraphCreateUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GremlinGraphGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_update_gremlin_graph_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}'} # type: ignore
def begin_create_update_gremlin_graph(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
create_update_gremlin_graph_parameters, # type: "models.GremlinGraphCreateUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.GremlinGraphGetResults"]
"""Create or update an Azure Cosmos DB Gremlin graph.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:param create_update_gremlin_graph_parameters: The parameters to provide for the current
Gremlin graph.
:type create_update_gremlin_graph_parameters: ~azure.mgmt.cosmosdb.models.GremlinGraphCreateUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GremlinGraphGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.GremlinGraphGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.GremlinGraphGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_update_gremlin_graph_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
graph_name=graph_name,
create_update_gremlin_graph_parameters=create_update_gremlin_graph_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GremlinGraphGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_update_gremlin_graph.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}'} # type: ignore
def _delete_gremlin_graph_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
# Construct URL
url = self._delete_gremlin_graph_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_gremlin_graph_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}'} # type: ignore
def begin_delete_gremlin_graph(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes an existing Azure Cosmos DB Gremlin graph.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_gremlin_graph_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
graph_name=graph_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_gremlin_graph.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}'} # type: ignore
def get_gremlin_graph_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ThroughputSettingsGetResults"
"""Gets the Gremlin graph throughput under an existing Azure Cosmos DB database account with the
provided name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ThroughputSettingsGetResults, or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get_gremlin_graph_throughput.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_gremlin_graph_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default'} # type: ignore
def _update_gremlin_graph_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
update_throughput_parameters, # type: "models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> Optional["models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_gremlin_graph_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(update_throughput_parameters, 'ThroughputSettingsUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_gremlin_graph_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default'} # type: ignore
def begin_update_gremlin_graph_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
update_throughput_parameters, # type: "models.ThroughputSettingsUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.ThroughputSettingsGetResults"]
"""Update RUs per second of an Azure Cosmos DB Gremlin graph.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:param update_throughput_parameters: The RUs per second of the parameters to provide for the
current Gremlin graph.
:type update_throughput_parameters: ~azure.mgmt.cosmosdb.models.ThroughputSettingsUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_gremlin_graph_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
graph_name=graph_name,
update_throughput_parameters=update_throughput_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_gremlin_graph_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default'} # type: ignore
def _migrate_gremlin_graph_to_autoscale_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._migrate_gremlin_graph_to_autoscale_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_gremlin_graph_to_autoscale_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def begin_migrate_gremlin_graph_to_autoscale(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB Gremlin graph from manual throughput to autoscale.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_gremlin_graph_to_autoscale_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
graph_name=graph_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_gremlin_graph_to_autoscale.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default/migrateToAutoscale'} # type: ignore
def _migrate_gremlin_graph_to_manual_throughput_initial(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.ThroughputSettingsGetResults"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ThroughputSettingsGetResults"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._migrate_gremlin_graph_to_manual_throughput_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'graphName': self._serialize.url("graph_name", graph_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_migrate_gremlin_graph_to_manual_throughput_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
def begin_migrate_gremlin_graph_to_manual_throughput(
self,
resource_group_name, # type: str
account_name, # type: str
database_name, # type: str
graph_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.ThroughputSettingsGetResults"]
"""Migrate an Azure Cosmos DB Gremlin graph from autoscale to manual throughput.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param database_name: Cosmos DB database name.
:type database_name: str
:param graph_name: Cosmos DB graph name.
:type graph_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ThroughputSettingsGetResults or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.ThroughputSettingsGetResults]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.ThroughputSettingsGetResults"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._migrate_gremlin_graph_to_manual_throughput_initial(
resource_group_name=resource_group_name,
account_name=account_name,
database_name=database_name,
graph_name=graph_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ThroughputSettingsGetResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_migrate_gremlin_graph_to_manual_throughput.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/gremlinDatabases/{databaseName}/graphs/{graphName}/throughputSettings/default/migrateToManualThroughput'} # type: ignore
| 52.538824
| 326
| 0.672242
| 9,640
| 89,316
| 5.995332
| 0.031328
| 0.017579
| 0.032356
| 0.009205
| 0.963474
| 0.957591
| 0.954131
| 0.951882
| 0.950082
| 0.943836
| 0
| 0.007992
| 0.2281
| 89,316
| 1,699
| 327
| 52.569747
| 0.830309
| 0.26638
| 0
| 0.849242
| 0
| 0.022302
| 0.171503
| 0.102696
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038359
| false
| 0
| 0.009813
| 0
| 0.114184
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6e833fb51a1ec7a1130669c82455b2f1f57a22e
| 53,602
|
py
|
Python
|
pythonFiles/tests/testing_tools/adapter/test_functional.py
|
erinxocon/vscode-python
|
e53f9061d16467a9ae2d8995a9a5f3cfa0f444e1
|
[
"MIT"
] | null | null | null |
pythonFiles/tests/testing_tools/adapter/test_functional.py
|
erinxocon/vscode-python
|
e53f9061d16467a9ae2d8995a9a5f3cfa0f444e1
|
[
"MIT"
] | null | null | null |
pythonFiles/tests/testing_tools/adapter/test_functional.py
|
erinxocon/vscode-python
|
e53f9061d16467a9ae2d8995a9a5f3cfa0f444e1
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from __future__ import unicode_literals
import json
import os
import os.path
import subprocess
import sys
import unittest
import pytest
from ...__main__ import TESTING_TOOLS_ROOT
CWD = os.getcwd()
DATA_DIR = os.path.join(os.path.dirname(__file__), '.data')
SCRIPT = os.path.join(TESTING_TOOLS_ROOT, 'run_adapter.py')
def resolve_testroot(name):
projroot = os.path.join(DATA_DIR, name)
return projroot, os.path.join(projroot, 'tests')
def run_adapter(cmd, tool, *cliargs):
try:
return _run_adapter(cmd, tool, *cliargs)
except subprocess.CalledProcessError:
# Re-run pytest but print out stdout & stderr this time
try:
return _run_adapter(cmd, tool, *cliargs, hidestdio=False)
except subprocess.CalledProcessError as exc:
print(exc.output)
def _run_adapter(cmd, tool, *cliargs, **kwargs):
hidestdio = kwargs.pop('hidestdio', True)
assert not kwargs
kwds = {}
argv = [sys.executable, SCRIPT, cmd, tool, '--'] + list(cliargs)
if not hidestdio:
argv.insert(4, '--no-hide-stdio')
kwds['stderr'] = subprocess.STDOUT
argv.append('--cache-clear')
print('running {!r}'.format(' '.join(arg.rpartition(CWD + '/')[-1] for arg in argv)))
return subprocess.check_output(argv,
universal_newlines=True,
**kwds)
def fix_path(nodeid):
return nodeid.replace('/', os.path.sep)
def fix_test_order(tests):
if sys.version_info >= (3, 6):
return tests
fixed = []
curfile = None
group = []
for test in tests:
if (curfile or '???') not in test['id']:
fixed.extend(sorted(group, key=lambda t: t['id']))
group = []
curfile = test['id'].partition('.py::')[0] + '.py'
group.append(test)
fixed.extend(sorted(group, key=lambda t: t['id']))
return fixed
def fix_source(tests, testid, srcfile, lineno):
testid = fix_path(testid)
for test in tests:
if test['id'] == testid:
break
else:
raise KeyError('test {!r} not found'.format(testid))
if not srcfile:
srcfile = test['source'].rpartition(':')[0]
test['source'] = fix_path('{}:{}'.format(srcfile, lineno))
@pytest.mark.functional
class PytestTests(unittest.TestCase):
def complex(self, testroot):
results = COMPLEX.copy()
results['root'] = testroot
return [results]
def test_discover_simple(self):
projroot, testroot = resolve_testroot('simple')
out = run_adapter('discover', 'pytest',
'--rootdir', projroot,
testroot)
result = json.loads(out)
self.maxDiff = None
self.assertEqual(result, [{
'root': projroot,
'rootid': '.',
'parents': [
{'id': fix_path('./tests'),
'kind': 'folder',
'name': 'tests',
'parentid': '.',
},
{'id': fix_path('./tests/test_spam.py'),
'kind': 'file',
'name': 'test_spam.py',
'parentid': fix_path('./tests'),
},
],
'tests': [
{'id': fix_path('./tests/test_spam.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_spam.py:2'),
'markers': [],
'parentid': fix_path('./tests/test_spam.py'),
},
],
}])
def test_discover_complex_default(self):
projroot, testroot = resolve_testroot('complex')
expected = self.complex(projroot)
expected[0]['tests'] = fix_test_order(expected[0]['tests'])
if sys.version_info < (3,):
decorated = [
'./tests/test_unittest.py::MyTests::test_skipped',
'./tests/test_unittest.py::MyTests::test_maybe_skipped',
'./tests/test_unittest.py::MyTests::test_maybe_not_skipped',
]
for testid in decorated:
fix_source(expected[0]['tests'], testid, None, 0)
out = run_adapter('discover', 'pytest',
'--rootdir', projroot,
testroot)
result = json.loads(out)
result[0]['tests'] = fix_test_order(result[0]['tests'])
self.maxDiff = None
self.assertEqual(result, expected)
def test_discover_complex_doctest(self):
projroot, _ = resolve_testroot('complex')
expected = self.complex(projroot)
# add in doctests from test suite
expected[0]['parents'].insert(3, {
'id': fix_path('./tests/test_doctest.py'),
'kind': 'file',
'name': 'test_doctest.py',
'parentid': fix_path('./tests'),
})
expected[0]['tests'].insert(2, {
'id': fix_path('./tests/test_doctest.py::tests.test_doctest'),
'name': 'tests.test_doctest',
'source': fix_path('./tests/test_doctest.py:1'),
'markers': [],
'parentid': fix_path('./tests/test_doctest.py'),
})
# add in doctests from non-test module
expected[0]['parents'].insert(0, {
'id': fix_path('./mod.py'),
'kind': 'file',
'name': 'mod.py',
'parentid': '.',
})
expected[0]['tests'] = [
{'id': fix_path('./mod.py::mod'),
'name': 'mod',
'source': fix_path('./mod.py:1'),
'markers': [],
'parentid': fix_path('./mod.py'),
},
{'id': fix_path('./mod.py::mod.Spam'),
'name': 'mod.Spam',
'source': fix_path('./mod.py:33'),
'markers': [],
'parentid': fix_path('./mod.py'),
},
{'id': fix_path('./mod.py::mod.Spam.eggs'),
'name': 'mod.Spam.eggs',
'source': fix_path('./mod.py:43'),
'markers': [],
'parentid': fix_path('./mod.py'),
},
{'id': fix_path('./mod.py::mod.square'),
'name': 'mod.square',
'source': fix_path('./mod.py:18'),
'markers': [],
'parentid': fix_path('./mod.py'),
},
] + expected[0]['tests']
expected[0]['tests'] = fix_test_order(expected[0]['tests'])
if sys.version_info < (3,):
decorated = [
'./tests/test_unittest.py::MyTests::test_skipped',
'./tests/test_unittest.py::MyTests::test_maybe_skipped',
'./tests/test_unittest.py::MyTests::test_maybe_not_skipped',
]
for testid in decorated:
fix_source(expected[0]['tests'], testid, None, 0)
out = run_adapter('discover', 'pytest',
'--rootdir', projroot,
'--doctest-modules',
projroot)
result = json.loads(out)
result[0]['tests'] = fix_test_order(result[0]['tests'])
self.maxDiff = None
self.assertEqual(result, expected)
def test_discover_not_found(self):
projroot, testroot = resolve_testroot('notests')
out = run_adapter('discover', 'pytest',
'--rootdir', projroot,
testroot)
result = json.loads(out)
self.maxDiff = None
self.assertEqual(result, [])
# TODO: Expect the following instead?
#self.assertEqual(result, [{
# 'root': projroot,
# 'rootid': '.',
# 'parents': [],
# 'tests': [],
# }])
COMPLEX = {
'root': None,
'rootid': '.',
'parents': [
#
{'id': fix_path('./tests'),
'kind': 'folder',
'name': 'tests',
'parentid': '.',
},
# +++
{'id': fix_path('./tests/test_42-43.py'),
'kind': 'file',
'name': 'test_42-43.py',
'parentid': fix_path('./tests'),
},
# +++
{'id': fix_path('./tests/test_42.py'),
'kind': 'file',
'name': 'test_42.py',
'parentid': fix_path('./tests'),
},
# +++
{'id': fix_path('./tests/test_doctest.txt'),
'kind': 'file',
'name': 'test_doctest.txt',
'parentid': fix_path('./tests'),
},
# +++
{'id': fix_path('./tests/test_foo.py'),
'kind': 'file',
'name': 'test_foo.py',
'parentid': fix_path('./tests'),
},
# +++
{'id': fix_path('./tests/test_mixed.py'),
'kind': 'file',
'name': 'test_mixed.py',
'parentid': fix_path('./tests'),
},
{'id': fix_path('./tests/test_mixed.py::MyTests'),
'kind': 'suite',
'name': 'MyTests',
'parentid': fix_path('./tests/test_mixed.py'),
},
{'id': fix_path('./tests/test_mixed.py::TestMySuite'),
'kind': 'suite',
'name': 'TestMySuite',
'parentid': fix_path('./tests/test_mixed.py'),
},
# +++
{'id': fix_path('./tests/test_pytest.py'),
'kind': 'file',
'name': 'test_pytest.py',
'parentid': fix_path('./tests'),
},
{'id': fix_path('./tests/test_pytest.py::TestEggs'),
'kind': 'suite',
'name': 'TestEggs',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::TestParam'),
'kind': 'suite',
'name': 'TestParam',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::TestParam::test_param_13'),
'kind': 'function',
'name': 'test_param_13',
'parentid': fix_path('./tests/test_pytest.py::TestParam'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll'),
'kind': 'suite',
'name': 'TestParamAll',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13'),
'kind': 'function',
'name': 'test_param_13',
'parentid': fix_path('./tests/test_pytest.py::TestParamAll'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13'),
'kind': 'function',
'name': 'test_spam_13',
'parentid': fix_path('./tests/test_pytest.py::TestParamAll'),
},
{'id': fix_path('./tests/test_pytest.py::TestSpam'),
'kind': 'suite',
'name': 'TestSpam',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::TestSpam::TestHam'),
'kind': 'suite',
'name': 'TestHam',
'parentid': fix_path('./tests/test_pytest.py::TestSpam'),
},
{'id': fix_path('./tests/test_pytest.py::TestSpam::TestHam::TestEggs'),
'kind': 'suite',
'name': 'TestEggs',
'parentid': fix_path('./tests/test_pytest.py::TestSpam::TestHam'),
},
{'id': fix_path('./tests/test_pytest.py::test_fixture_param'),
'kind': 'function',
'name': 'test_fixture_param',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_01'),
'kind': 'function',
'name': 'test_param_01',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_11'),
'kind': 'function',
'name': 'test_param_11',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13'),
'kind': 'function',
'name': 'test_param_13',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_markers'),
'kind': 'function',
'name': 'test_param_13_markers',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_repeat'),
'kind': 'function',
'name': 'test_param_13_repeat',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_skipped'),
'kind': 'function',
'name': 'test_param_13_skipped',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13'),
'kind': 'function',
'name': 'test_param_23_13',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_raises'),
'kind': 'function',
'name': 'test_param_23_raises',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33'),
'kind': 'function',
'name': 'test_param_33',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33_ids'),
'kind': 'function',
'name': 'test_param_33_ids',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_fixture'),
'kind': 'function',
'name': 'test_param_fixture',
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_mark_fixture'),
'kind': 'function',
'name': 'test_param_mark_fixture',
'parentid': fix_path('./tests/test_pytest.py'),
},
# +++
{'id': fix_path('./tests/test_pytest_param.py'),
'kind': 'file',
'name': 'test_pytest_param.py',
'parentid': fix_path('./tests'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll'),
'kind': 'suite',
'name': 'TestParamAll',
'parentid': fix_path('./tests/test_pytest_param.py'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13'),
'kind': 'function',
'name': 'test_param_13',
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13'),
'kind': 'function',
'name': 'test_spam_13',
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll'),
},
{'id': fix_path('./tests/test_pytest_param.py::test_param_13'),
'kind': 'function',
'name': 'test_param_13',
'parentid': fix_path('./tests/test_pytest_param.py'),
},
# +++
{'id': fix_path('./tests/test_unittest.py'),
'kind': 'file',
'name': 'test_unittest.py',
'parentid': fix_path('./tests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests'),
'kind': 'suite',
'name': 'MyTests',
'parentid': fix_path('./tests/test_unittest.py'),
},
{'id': fix_path('./tests/test_unittest.py::OtherTests'),
'kind': 'suite',
'name': 'OtherTests',
'parentid': fix_path('./tests/test_unittest.py'),
},
##
{'id': fix_path('./tests/v'),
'kind': 'folder',
'name': 'v',
'parentid': fix_path('./tests'),
},
## +++
{'id': fix_path('./tests/v/test_eggs.py'),
'kind': 'file',
'name': 'test_eggs.py',
'parentid': fix_path('./tests/v'),
},
{'id': fix_path('./tests/v/test_eggs.py::TestSimple'),
'kind': 'suite',
'name': 'TestSimple',
'parentid': fix_path('./tests/v/test_eggs.py'),
},
## +++
{'id': fix_path('./tests/v/test_ham.py'),
'kind': 'file',
'name': 'test_ham.py',
'parentid': fix_path('./tests/v'),
},
## +++
{'id': fix_path('./tests/v/test_spam.py'),
'kind': 'file',
'name': 'test_spam.py',
'parentid': fix_path('./tests/v'),
},
##
{'id': fix_path('./tests/w'),
'kind': 'folder',
'name': 'w',
'parentid': fix_path('./tests'),
},
## +++
{'id': fix_path('./tests/w/test_spam.py'),
'kind': 'file',
'name': 'test_spam.py',
'parentid': fix_path('./tests/w'),
},
## +++
{'id': fix_path('./tests/w/test_spam_ex.py'),
'kind': 'file',
'name': 'test_spam_ex.py',
'parentid': fix_path('./tests/w'),
},
##
{'id': fix_path('./tests/x'),
'kind': 'folder',
'name': 'x',
'parentid': fix_path('./tests'),
},
###
{'id': fix_path('./tests/x/y'),
'kind': 'folder',
'name': 'y',
'parentid': fix_path('./tests/x'),
},
####
{'id': fix_path('./tests/x/y/z'),
'kind': 'folder',
'name': 'z',
'parentid': fix_path('./tests/x/y'),
},
#####
{'id': fix_path('./tests/x/y/z/a'),
'kind': 'folder',
'name': 'a',
'parentid': fix_path('./tests/x/y/z'),
},
##### +++
{'id': fix_path('./tests/x/y/z/a/test_spam.py'),
'kind': 'file',
'name': 'test_spam.py',
'parentid': fix_path('./tests/x/y/z/a'),
},
#####
{'id': fix_path('./tests/x/y/z/b'),
'kind': 'folder',
'name': 'b',
'parentid': fix_path('./tests/x/y/z'),
},
##### +++
{'id': fix_path('./tests/x/y/z/b/test_spam.py'),
'kind': 'file',
'name': 'test_spam.py',
'parentid': fix_path('./tests/x/y/z/b'),
},
#### +++
{'id': fix_path('./tests/x/y/z/test_ham.py'),
'kind': 'file',
'name': 'test_ham.py',
'parentid': fix_path('./tests/x/y/z'),
},
],
'tests': [
##########
{'id': fix_path('./tests/test_42-43.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_42-43.py:2'),
'markers': [],
'parentid': fix_path('./tests/test_42-43.py'),
},
#####
{'id': fix_path('./tests/test_42.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_42.py:2'),
'markers': [],
'parentid': fix_path('./tests/test_42.py'),
},
#####
{'id': fix_path('./tests/test_doctest.txt::test_doctest.txt'),
'name': 'test_doctest.txt',
'source': fix_path('./tests/test_doctest.txt:1'),
'markers': [],
'parentid': fix_path('./tests/test_doctest.txt'),
},
#####
{'id': fix_path('./tests/test_foo.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_foo.py:3'),
'markers': [],
'parentid': fix_path('./tests/test_foo.py'),
},
#####
{'id': fix_path('./tests/test_mixed.py::test_top_level'),
'name': 'test_top_level',
'source': fix_path('./tests/test_mixed.py:5'),
'markers': [],
'parentid': fix_path('./tests/test_mixed.py'),
},
{'id': fix_path('./tests/test_mixed.py::test_skipped'),
'name': 'test_skipped',
'source': fix_path('./tests/test_mixed.py:9'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_mixed.py'),
},
{'id': fix_path('./tests/test_mixed.py::TestMySuite::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_mixed.py:16'),
'markers': [],
'parentid': fix_path('./tests/test_mixed.py::TestMySuite'),
},
{'id': fix_path('./tests/test_mixed.py::MyTests::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_mixed.py:22'),
'markers': [],
'parentid': fix_path('./tests/test_mixed.py::MyTests'),
},
{'id': fix_path('./tests/test_mixed.py::MyTests::test_skipped'),
'name': 'test_skipped',
'source': fix_path('./tests/test_mixed.py:25'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_mixed.py::MyTests'),
},
#####
{'id': fix_path('./tests/test_pytest.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_pytest.py:6'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_failure'),
'name': 'test_failure',
'source': fix_path('./tests/test_pytest.py:10'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_runtime_skipped'),
'name': 'test_runtime_skipped',
'source': fix_path('./tests/test_pytest.py:14'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_runtime_failed'),
'name': 'test_runtime_failed',
'source': fix_path('./tests/test_pytest.py:18'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_raises'),
'name': 'test_raises',
'source': fix_path('./tests/test_pytest.py:22'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_skipped'),
'name': 'test_skipped',
'source': fix_path('./tests/test_pytest.py:26'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_maybe_skipped'),
'name': 'test_maybe_skipped',
'source': fix_path('./tests/test_pytest.py:31'),
'markers': ['skip-if'],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_known_failure'),
'name': 'test_known_failure',
'source': fix_path('./tests/test_pytest.py:36'),
'markers': ['expected-failure'],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_warned'),
'name': 'test_warned',
'source': fix_path('./tests/test_pytest.py:41'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_custom_marker'),
'name': 'test_custom_marker',
'source': fix_path('./tests/test_pytest.py:46'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_multiple_markers'),
'name': 'test_multiple_markers',
'source': fix_path('./tests/test_pytest.py:51'),
'markers': ['expected-failure', 'skip', 'skip-if'],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_dynamic_1'),
'name': 'test_dynamic_1',
'source': fix_path('./tests/test_pytest.py:62'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_dynamic_2'),
'name': 'test_dynamic_2',
'source': fix_path('./tests/test_pytest.py:62'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_dynamic_3'),
'name': 'test_dynamic_3',
'source': fix_path('./tests/test_pytest.py:62'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::TestSpam::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_pytest.py:70'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestSpam'),
},
{'id': fix_path('./tests/test_pytest.py::TestSpam::test_skipped'),
'name': 'test_skipped',
'source': fix_path('./tests/test_pytest.py:73'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_pytest.py::TestSpam'),
},
{'id': fix_path('./tests/test_pytest.py::TestSpam::TestHam::TestEggs::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_pytest.py:81'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestSpam::TestHam::TestEggs'),
},
{'id': fix_path('./tests/test_pytest.py::TestEggs::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_pytest.py:93'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestEggs'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_01[]'),
'name': 'test_param_01[]',
'source': fix_path('./tests/test_pytest.py:103'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_01'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_11[x0]'),
'name': 'test_param_11[x0]',
'source': fix_path('./tests/test_pytest.py:108'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_11'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13[x0]'),
'name': 'test_param_13[x0]',
'source': fix_path('./tests/test_pytest.py:113'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13[x1]'),
'name': 'test_param_13[x1]',
'source': fix_path('./tests/test_pytest.py:113'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13[x2]'),
'name': 'test_param_13[x2]',
'source': fix_path('./tests/test_pytest.py:113'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_repeat[x0]'),
'name': 'test_param_13_repeat[x0]',
'source': fix_path('./tests/test_pytest.py:118'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_repeat'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_repeat[x1]'),
'name': 'test_param_13_repeat[x1]',
'source': fix_path('./tests/test_pytest.py:118'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_repeat'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_repeat[x2]'),
'name': 'test_param_13_repeat[x2]',
'source': fix_path('./tests/test_pytest.py:118'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_repeat'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33[1-1-1]'),
'name': 'test_param_33[1-1-1]',
'source': fix_path('./tests/test_pytest.py:123'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_33'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33[3-4-5]'),
'name': 'test_param_33[3-4-5]',
'source': fix_path('./tests/test_pytest.py:123'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_33'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33[0-0-0]'),
'name': 'test_param_33[0-0-0]',
'source': fix_path('./tests/test_pytest.py:123'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_33'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33_ids[v1]'),
'name': 'test_param_33_ids[v1]',
'source': fix_path('./tests/test_pytest.py:128'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_33_ids'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33_ids[v2]'),
'name': 'test_param_33_ids[v2]',
'source': fix_path('./tests/test_pytest.py:128'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_33_ids'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_33_ids[v3]'),
'name': 'test_param_33_ids[v3]',
'source': fix_path('./tests/test_pytest.py:128'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_33_ids'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[1-1-z0]'),
'name': 'test_param_23_13[1-1-z0]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[1-1-z1]'),
'name': 'test_param_23_13[1-1-z1]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[1-1-z2]'),
'name': 'test_param_23_13[1-1-z2]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[3-4-z0]'),
'name': 'test_param_23_13[3-4-z0]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[3-4-z1]'),
'name': 'test_param_23_13[3-4-z1]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[3-4-z2]'),
'name': 'test_param_23_13[3-4-z2]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[0-0-z0]'),
'name': 'test_param_23_13[0-0-z0]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[0-0-z1]'),
'name': 'test_param_23_13[0-0-z1]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_13[0-0-z2]'),
'name': 'test_param_23_13[0-0-z2]',
'source': fix_path('./tests/test_pytest.py:134'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_markers[x0]'),
'name': 'test_param_13_markers[x0]',
'source': fix_path('./tests/test_pytest.py:140'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_markers'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_markers[???]'),
'name': 'test_param_13_markers[???]',
'source': fix_path('./tests/test_pytest.py:140'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_markers'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_markers[2]'),
'name': 'test_param_13_markers[2]',
'source': fix_path('./tests/test_pytest.py:140'),
'markers': ['expected-failure'],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_markers'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_skipped[x0]'),
'name': 'test_param_13_skipped[x0]',
'source': fix_path('./tests/test_pytest.py:149'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_skipped'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_skipped[x1]'),
'name': 'test_param_13_skipped[x1]',
'source': fix_path('./tests/test_pytest.py:149'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_skipped'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_13_skipped[x2]'),
'name': 'test_param_13_skipped[x2]',
'source': fix_path('./tests/test_pytest.py:149'),
'markers': ['skip'],
'parentid': fix_path('./tests/test_pytest.py::test_param_13_skipped'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_raises[1-None]'),
'name': 'test_param_23_raises[1-None]',
'source': fix_path('./tests/test_pytest.py:155'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_raises'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_raises[1.0-None]'),
'name': 'test_param_23_raises[1.0-None]',
'source': fix_path('./tests/test_pytest.py:155'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_raises'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_23_raises[2-catch2]'),
'name': 'test_param_23_raises[2-catch2]',
'source': fix_path('./tests/test_pytest.py:155'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_23_raises'),
},
{'id': fix_path('./tests/test_pytest.py::TestParam::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_pytest.py:164'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParam'),
},
{'id': fix_path('./tests/test_pytest.py::TestParam::test_param_13[x0]'),
'name': 'test_param_13[x0]',
'source': fix_path('./tests/test_pytest.py:167'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParam::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParam::test_param_13[x1]'),
'name': 'test_param_13[x1]',
'source': fix_path('./tests/test_pytest.py:167'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParam::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParam::test_param_13[x2]'),
'name': 'test_param_13[x2]',
'source': fix_path('./tests/test_pytest.py:167'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParam::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13[x0]'),
'name': 'test_param_13[x0]',
'source': fix_path('./tests/test_pytest.py:175'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13[x1]'),
'name': 'test_param_13[x1]',
'source': fix_path('./tests/test_pytest.py:175'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13[x2]'),
'name': 'test_param_13[x2]',
'source': fix_path('./tests/test_pytest.py:175'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParamAll::test_param_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13[x0]'),
'name': 'test_spam_13[x0]',
'source': fix_path('./tests/test_pytest.py:178'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13[x1]'),
'name': 'test_spam_13[x1]',
'source': fix_path('./tests/test_pytest.py:178'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13'),
},
{'id': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13[x2]'),
'name': 'test_spam_13[x2]',
'source': fix_path('./tests/test_pytest.py:178'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::TestParamAll::test_spam_13'),
},
{'id': fix_path('./tests/test_pytest.py::test_fixture'),
'name': 'test_fixture',
'source': fix_path('./tests/test_pytest.py:192'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_mark_fixture'),
'name': 'test_mark_fixture',
'source': fix_path('./tests/test_pytest.py:196'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_fixture[x0]'),
'name': 'test_param_fixture[x0]',
'source': fix_path('./tests/test_pytest.py:201'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_fixture'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_fixture[x1]'),
'name': 'test_param_fixture[x1]',
'source': fix_path('./tests/test_pytest.py:201'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_fixture'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_fixture[x2]'),
'name': 'test_param_fixture[x2]',
'source': fix_path('./tests/test_pytest.py:201'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_fixture'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_mark_fixture[x0]'),
'name': 'test_param_mark_fixture[x0]',
'source': fix_path('./tests/test_pytest.py:207'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_mark_fixture'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_mark_fixture[x1]'),
'name': 'test_param_mark_fixture[x1]',
'source': fix_path('./tests/test_pytest.py:207'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_mark_fixture'),
},
{'id': fix_path('./tests/test_pytest.py::test_param_mark_fixture[x2]'),
'name': 'test_param_mark_fixture[x2]',
'source': fix_path('./tests/test_pytest.py:207'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_param_mark_fixture'),
},
{'id': fix_path('./tests/test_pytest.py::test_fixture_param[spam]'),
'name': 'test_fixture_param[spam]',
'source': fix_path('./tests/test_pytest.py:216'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_fixture_param'),
},
{'id': fix_path('./tests/test_pytest.py::test_fixture_param[eggs]'),
'name': 'test_fixture_param[eggs]',
'source': fix_path('./tests/test_pytest.py:216'),
'markers': [],
'parentid': fix_path('./tests/test_pytest.py::test_fixture_param'),
},
######
{'id': fix_path('./tests/test_pytest_param.py::test_param_13[x0]'),
'name': 'test_param_13[x0]',
'source': fix_path('./tests/test_pytest_param.py:8'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::test_param_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::test_param_13[x1]'),
'name': 'test_param_13[x1]',
'source': fix_path('./tests/test_pytest_param.py:8'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::test_param_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::test_param_13[x2]'),
'name': 'test_param_13[x2]',
'source': fix_path('./tests/test_pytest_param.py:8'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::test_param_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13[x0]'),
'name': 'test_param_13[x0]',
'source': fix_path('./tests/test_pytest_param.py:14'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13[x1]'),
'name': 'test_param_13[x1]',
'source': fix_path('./tests/test_pytest_param.py:14'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13[x2]'),
'name': 'test_param_13[x2]',
'source': fix_path('./tests/test_pytest_param.py:14'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll::test_param_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13[x0]'),
'name': 'test_spam_13[x0]',
'source': fix_path('./tests/test_pytest_param.py:17'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13[x1]'),
'name': 'test_spam_13[x1]',
'source': fix_path('./tests/test_pytest_param.py:17'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13'),
},
{'id': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13[x2]'),
'name': 'test_spam_13[x2]',
'source': fix_path('./tests/test_pytest_param.py:17'),
'markers': [],
'parentid': fix_path('./tests/test_pytest_param.py::TestParamAll::test_spam_13'),
},
######
{'id': fix_path('./tests/test_unittest.py::MyTests::test_dynamic_'),
'name': 'test_dynamic_',
'source': fix_path('./tests/test_unittest.py:54'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_failure'),
'name': 'test_failure',
'source': fix_path('./tests/test_unittest.py:34'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_known_failure'),
'name': 'test_known_failure',
'source': fix_path('./tests/test_unittest.py:37'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_maybe_not_skipped'),
'name': 'test_maybe_not_skipped',
'source': fix_path('./tests/test_unittest.py:17'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_maybe_skipped'),
'name': 'test_maybe_skipped',
'source': fix_path('./tests/test_unittest.py:13'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_unittest.py:6'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_skipped'),
'name': 'test_skipped',
'source': fix_path('./tests/test_unittest.py:9'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_skipped_inside'),
'name': 'test_skipped_inside',
'source': fix_path('./tests/test_unittest.py:21'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_with_nested_subtests'),
'name': 'test_with_nested_subtests',
'source': fix_path('./tests/test_unittest.py:46'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::MyTests::test_with_subtests'),
'name': 'test_with_subtests',
'source': fix_path('./tests/test_unittest.py:41'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::MyTests'),
},
{'id': fix_path('./tests/test_unittest.py::OtherTests::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/test_unittest.py:61'),
'markers': [],
'parentid': fix_path('./tests/test_unittest.py::OtherTests'),
},
###########
{'id': fix_path('./tests/v/test_eggs.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/v/spam.py:2'),
'markers': [],
'parentid': fix_path('./tests/v/test_eggs.py'),
},
{'id': fix_path('./tests/v/test_eggs.py::TestSimple::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/v/spam.py:8'),
'markers': [],
'parentid': fix_path('./tests/v/test_eggs.py::TestSimple'),
},
######
{'id': fix_path('./tests/v/test_ham.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/v/spam.py:2'),
'markers': [],
'parentid': fix_path('./tests/v/test_ham.py'),
},
{'id': fix_path('./tests/v/test_ham.py::test_not_hard'),
'name': 'test_not_hard',
'source': fix_path('./tests/v/spam.py:2'),
'markers': [],
'parentid': fix_path('./tests/v/test_ham.py'),
},
######
{'id': fix_path('./tests/v/test_spam.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/v/spam.py:2'),
'markers': [],
'parentid': fix_path('./tests/v/test_spam.py'),
},
{'id': fix_path('./tests/v/test_spam.py::test_simpler'),
'name': 'test_simpler',
'source': fix_path('./tests/v/test_spam.py:4'),
'markers': [],
'parentid': fix_path('./tests/v/test_spam.py'),
},
###########
{'id': fix_path('./tests/w/test_spam.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/w/test_spam.py:4'),
'markers': [],
'parentid': fix_path('./tests/w/test_spam.py'),
},
{'id': fix_path('./tests/w/test_spam_ex.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/w/test_spam_ex.py:4'),
'markers': [],
'parentid': fix_path('./tests/w/test_spam_ex.py'),
},
###########
{'id': fix_path('./tests/x/y/z/test_ham.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/x/y/z/test_ham.py:2'),
'markers': [],
'parentid': fix_path('./tests/x/y/z/test_ham.py'),
},
######
{'id': fix_path('./tests/x/y/z/a/test_spam.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/x/y/z/a/test_spam.py:11'),
'markers': [],
'parentid': fix_path('./tests/x/y/z/a/test_spam.py'),
},
{'id': fix_path('./tests/x/y/z/b/test_spam.py::test_simple'),
'name': 'test_simple',
'source': fix_path('./tests/x/y/z/b/test_spam.py:7'),
'markers': [],
'parentid': fix_path('./tests/x/y/z/b/test_spam.py'),
},
],
}
| 43.828291
| 96
| 0.491605
| 5,793
| 53,602
| 4.257034
| 0.047989
| 0.132274
| 0.218969
| 0.242002
| 0.89084
| 0.857751
| 0.829691
| 0.792141
| 0.750253
| 0.719517
| 0
| 0.02468
| 0.315902
| 53,602
| 1,222
| 97
| 43.864157
| 0.64785
| 0.007817
| 0
| 0.467372
| 0
| 0
| 0.434642
| 0.293534
| 0
| 0
| 0
| 0.000818
| 0.004409
| 1
| 0.0097
| false
| 0
| 0.007937
| 0.000882
| 0.025573
| 0.001764
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc0a7d892ee7ccba2ec10d7aa3adc47150da3dac
| 98,817
|
py
|
Python
|
storm/Nimbus.py
|
krux/python-storm
|
1a9c06d3580a2b1bc2c27174d892a6dbcaa9e0bd
|
[
"BSD-3-Clause"
] | null | null | null |
storm/Nimbus.py
|
krux/python-storm
|
1a9c06d3580a2b1bc2c27174d892a6dbcaa9e0bd
|
[
"BSD-3-Clause"
] | null | null | null |
storm/Nimbus.py
|
krux/python-storm
|
1a9c06d3580a2b1bc2c27174d892a6dbcaa9e0bd
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
pass
def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
pass
def killTopology(self, name):
"""
Parameters:
- name
"""
pass
def killTopologyWithOpts(self, name, options):
"""
Parameters:
- name
- options
"""
pass
def activate(self, name):
"""
Parameters:
- name
"""
pass
def deactivate(self, name):
"""
Parameters:
- name
"""
pass
def rebalance(self, name, options):
"""
Parameters:
- name
- options
"""
pass
def beginFileUpload(self, ):
pass
def uploadChunk(self, location, chunk):
"""
Parameters:
- location
- chunk
"""
pass
def finishFileUpload(self, location):
"""
Parameters:
- location
"""
pass
def beginFileDownload(self, file):
"""
Parameters:
- file
"""
pass
def downloadChunk(self, id):
"""
Parameters:
- id
"""
pass
def getNimbusConf(self, ):
pass
def getClusterInfo(self, ):
pass
def getTopologyInfo(self, id):
"""
Parameters:
- id
"""
pass
def getTopologyConf(self, id):
"""
Parameters:
- id
"""
pass
def getTopology(self, id):
"""
Parameters:
- id
"""
pass
def getUserTopology(self, id):
"""
Parameters:
- id
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
self.send_submitTopology(name, uploadedJarLocation, jsonConf, topology)
self.recv_submitTopology()
def send_submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
self._oprot.writeMessageBegin('submitTopology', TMessageType.CALL, self._seqid)
args = submitTopology_args()
args.name = name
args.uploadedJarLocation = uploadedJarLocation
args.jsonConf = jsonConf
args.topology = topology
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_submitTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = submitTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
return
def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
self.send_submitTopologyWithOpts(name, uploadedJarLocation, jsonConf, topology, options)
self.recv_submitTopologyWithOpts()
def send_submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
self._oprot.writeMessageBegin('submitTopologyWithOpts', TMessageType.CALL, self._seqid)
args = submitTopologyWithOpts_args()
args.name = name
args.uploadedJarLocation = uploadedJarLocation
args.jsonConf = jsonConf
args.topology = topology
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_submitTopologyWithOpts(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = submitTopologyWithOpts_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
return
def killTopology(self, name):
"""
Parameters:
- name
"""
self.send_killTopology(name)
self.recv_killTopology()
def send_killTopology(self, name):
self._oprot.writeMessageBegin('killTopology', TMessageType.CALL, self._seqid)
args = killTopology_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_killTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = killTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def killTopologyWithOpts(self, name, options):
"""
Parameters:
- name
- options
"""
self.send_killTopologyWithOpts(name, options)
self.recv_killTopologyWithOpts()
def send_killTopologyWithOpts(self, name, options):
self._oprot.writeMessageBegin('killTopologyWithOpts', TMessageType.CALL, self._seqid)
args = killTopologyWithOpts_args()
args.name = name
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_killTopologyWithOpts(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = killTopologyWithOpts_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def activate(self, name):
"""
Parameters:
- name
"""
self.send_activate(name)
self.recv_activate()
def send_activate(self, name):
self._oprot.writeMessageBegin('activate', TMessageType.CALL, self._seqid)
args = activate_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_activate(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = activate_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def deactivate(self, name):
"""
Parameters:
- name
"""
self.send_deactivate(name)
self.recv_deactivate()
def send_deactivate(self, name):
self._oprot.writeMessageBegin('deactivate', TMessageType.CALL, self._seqid)
args = deactivate_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deactivate(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = deactivate_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def rebalance(self, name, options):
"""
Parameters:
- name
- options
"""
self.send_rebalance(name, options)
self.recv_rebalance()
def send_rebalance(self, name, options):
self._oprot.writeMessageBegin('rebalance', TMessageType.CALL, self._seqid)
args = rebalance_args()
args.name = name
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_rebalance(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = rebalance_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
return
def beginFileUpload(self, ):
self.send_beginFileUpload()
return self.recv_beginFileUpload()
def send_beginFileUpload(self, ):
self._oprot.writeMessageBegin('beginFileUpload', TMessageType.CALL, self._seqid)
args = beginFileUpload_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginFileUpload(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = beginFileUpload_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileUpload failed: unknown result");
def uploadChunk(self, location, chunk):
"""
Parameters:
- location
- chunk
"""
self.send_uploadChunk(location, chunk)
self.recv_uploadChunk()
def send_uploadChunk(self, location, chunk):
self._oprot.writeMessageBegin('uploadChunk', TMessageType.CALL, self._seqid)
args = uploadChunk_args()
args.location = location
args.chunk = chunk
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = uploadChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
return
def finishFileUpload(self, location):
"""
Parameters:
- location
"""
self.send_finishFileUpload(location)
self.recv_finishFileUpload()
def send_finishFileUpload(self, location):
self._oprot.writeMessageBegin('finishFileUpload', TMessageType.CALL, self._seqid)
args = finishFileUpload_args()
args.location = location
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_finishFileUpload(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = finishFileUpload_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
return
def beginFileDownload(self, file):
"""
Parameters:
- file
"""
self.send_beginFileDownload(file)
return self.recv_beginFileDownload()
def send_beginFileDownload(self, file):
self._oprot.writeMessageBegin('beginFileDownload', TMessageType.CALL, self._seqid)
args = beginFileDownload_args()
args.file = file
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginFileDownload(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = beginFileDownload_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileDownload failed: unknown result");
def downloadChunk(self, id):
"""
Parameters:
- id
"""
self.send_downloadChunk(id)
return self.recv_downloadChunk()
def send_downloadChunk(self, id):
self._oprot.writeMessageBegin('downloadChunk', TMessageType.CALL, self._seqid)
args = downloadChunk_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_downloadChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = downloadChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadChunk failed: unknown result");
def getNimbusConf(self, ):
self.send_getNimbusConf()
return self.recv_getNimbusConf()
def send_getNimbusConf(self, ):
self._oprot.writeMessageBegin('getNimbusConf', TMessageType.CALL, self._seqid)
args = getNimbusConf_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNimbusConf(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNimbusConf_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNimbusConf failed: unknown result");
def getClusterInfo(self, ):
self.send_getClusterInfo()
return self.recv_getClusterInfo()
def send_getClusterInfo(self, ):
self._oprot.writeMessageBegin('getClusterInfo', TMessageType.CALL, self._seqid)
args = getClusterInfo_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getClusterInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getClusterInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterInfo failed: unknown result");
def getTopologyInfo(self, id):
"""
Parameters:
- id
"""
self.send_getTopologyInfo(id)
return self.recv_getTopologyInfo()
def send_getTopologyInfo(self, id):
self._oprot.writeMessageBegin('getTopologyInfo', TMessageType.CALL, self._seqid)
args = getTopologyInfo_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTopologyInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyInfo failed: unknown result");
def getTopologyConf(self, id):
"""
Parameters:
- id
"""
self.send_getTopologyConf(id)
return self.recv_getTopologyConf()
def send_getTopologyConf(self, id):
self._oprot.writeMessageBegin('getTopologyConf', TMessageType.CALL, self._seqid)
args = getTopologyConf_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyConf(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTopologyConf_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyConf failed: unknown result");
def getTopology(self, id):
"""
Parameters:
- id
"""
self.send_getTopology(id)
return self.recv_getTopology()
def send_getTopology(self, id):
self._oprot.writeMessageBegin('getTopology', TMessageType.CALL, self._seqid)
args = getTopology_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopology failed: unknown result");
def getUserTopology(self, id):
"""
Parameters:
- id
"""
self.send_getUserTopology(id)
return self.recv_getUserTopology()
def send_getUserTopology(self, id):
self._oprot.writeMessageBegin('getUserTopology', TMessageType.CALL, self._seqid)
args = getUserTopology_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getUserTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getUserTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUserTopology failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["submitTopology"] = Processor.process_submitTopology
self._processMap["submitTopologyWithOpts"] = Processor.process_submitTopologyWithOpts
self._processMap["killTopology"] = Processor.process_killTopology
self._processMap["killTopologyWithOpts"] = Processor.process_killTopologyWithOpts
self._processMap["activate"] = Processor.process_activate
self._processMap["deactivate"] = Processor.process_deactivate
self._processMap["rebalance"] = Processor.process_rebalance
self._processMap["beginFileUpload"] = Processor.process_beginFileUpload
self._processMap["uploadChunk"] = Processor.process_uploadChunk
self._processMap["finishFileUpload"] = Processor.process_finishFileUpload
self._processMap["beginFileDownload"] = Processor.process_beginFileDownload
self._processMap["downloadChunk"] = Processor.process_downloadChunk
self._processMap["getNimbusConf"] = Processor.process_getNimbusConf
self._processMap["getClusterInfo"] = Processor.process_getClusterInfo
self._processMap["getTopologyInfo"] = Processor.process_getTopologyInfo
self._processMap["getTopologyConf"] = Processor.process_getTopologyConf
self._processMap["getTopology"] = Processor.process_getTopology
self._processMap["getUserTopology"] = Processor.process_getUserTopology
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_submitTopology(self, seqid, iprot, oprot):
args = submitTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = submitTopology_result()
try:
self._handler.submitTopology(args.name, args.uploadedJarLocation, args.jsonConf, args.topology)
except AlreadyAliveException as e:
result.e = e
except InvalidTopologyException as ite:
result.ite = ite
oprot.writeMessageBegin("submitTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_submitTopologyWithOpts(self, seqid, iprot, oprot):
args = submitTopologyWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = submitTopologyWithOpts_result()
try:
self._handler.submitTopologyWithOpts(args.name, args.uploadedJarLocation, args.jsonConf, args.topology, args.options)
except AlreadyAliveException as e:
result.e = e
except InvalidTopologyException as ite:
result.ite = ite
oprot.writeMessageBegin("submitTopologyWithOpts", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_killTopology(self, seqid, iprot, oprot):
args = killTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = killTopology_result()
try:
self._handler.killTopology(args.name)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("killTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_killTopologyWithOpts(self, seqid, iprot, oprot):
args = killTopologyWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = killTopologyWithOpts_result()
try:
self._handler.killTopologyWithOpts(args.name, args.options)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("killTopologyWithOpts", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_activate(self, seqid, iprot, oprot):
args = activate_args()
args.read(iprot)
iprot.readMessageEnd()
result = activate_result()
try:
self._handler.activate(args.name)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("activate", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deactivate(self, seqid, iprot, oprot):
args = deactivate_args()
args.read(iprot)
iprot.readMessageEnd()
result = deactivate_result()
try:
self._handler.deactivate(args.name)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("deactivate", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_rebalance(self, seqid, iprot, oprot):
args = rebalance_args()
args.read(iprot)
iprot.readMessageEnd()
result = rebalance_result()
try:
self._handler.rebalance(args.name, args.options)
except NotAliveException as e:
result.e = e
except InvalidTopologyException as ite:
result.ite = ite
oprot.writeMessageBegin("rebalance", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginFileUpload(self, seqid, iprot, oprot):
args = beginFileUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginFileUpload_result()
result.success = self._handler.beginFileUpload()
oprot.writeMessageBegin("beginFileUpload", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_uploadChunk(self, seqid, iprot, oprot):
args = uploadChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadChunk_result()
self._handler.uploadChunk(args.location, args.chunk)
oprot.writeMessageBegin("uploadChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_finishFileUpload(self, seqid, iprot, oprot):
args = finishFileUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = finishFileUpload_result()
self._handler.finishFileUpload(args.location)
oprot.writeMessageBegin("finishFileUpload", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginFileDownload(self, seqid, iprot, oprot):
args = beginFileDownload_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginFileDownload_result()
result.success = self._handler.beginFileDownload(args.file)
oprot.writeMessageBegin("beginFileDownload", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_downloadChunk(self, seqid, iprot, oprot):
args = downloadChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = downloadChunk_result()
result.success = self._handler.downloadChunk(args.id)
oprot.writeMessageBegin("downloadChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNimbusConf(self, seqid, iprot, oprot):
args = getNimbusConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNimbusConf_result()
result.success = self._handler.getNimbusConf()
oprot.writeMessageBegin("getNimbusConf", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getClusterInfo(self, seqid, iprot, oprot):
args = getClusterInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getClusterInfo_result()
result.success = self._handler.getClusterInfo()
oprot.writeMessageBegin("getClusterInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyInfo(self, seqid, iprot, oprot):
args = getTopologyInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyInfo_result()
try:
result.success = self._handler.getTopologyInfo(args.id)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("getTopologyInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyConf(self, seqid, iprot, oprot):
args = getTopologyConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyConf_result()
try:
result.success = self._handler.getTopologyConf(args.id)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("getTopologyConf", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopology(self, seqid, iprot, oprot):
args = getTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopology_result()
try:
result.success = self._handler.getTopology(args.id)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("getTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getUserTopology(self, seqid, iprot, oprot):
args = getUserTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = getUserTopology_result()
try:
result.success = self._handler.getUserTopology(args.id)
except NotAliveException as e:
result.e = e
oprot.writeMessageBegin("getUserTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class submitTopology_args:
"""
Attributes:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'uploadedJarLocation', None, None, ), # 2
(3, TType.STRING, 'jsonConf', None, None, ), # 3
(4, TType.STRUCT, 'topology', (StormTopology, StormTopology.thrift_spec), None, ), # 4
)
def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None,):
self.name = name
self.uploadedJarLocation = uploadedJarLocation
self.jsonConf = jsonConf
self.topology = topology
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.uploadedJarLocation = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.jsonConf = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.topology = StormTopology()
self.topology.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopology_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.uploadedJarLocation is not None:
oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
oprot.writeString(self.uploadedJarLocation)
oprot.writeFieldEnd()
if self.jsonConf is not None:
oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
oprot.writeString(self.jsonConf)
oprot.writeFieldEnd()
if self.topology is not None:
oprot.writeFieldBegin('topology', TType.STRUCT, 4)
self.topology.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class submitTopology_result:
"""
Attributes:
- e
- ite
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (AlreadyAliveException, AlreadyAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
)
def __init__(self, e=None, ite=None,):
self.e = e
self.ite = ite
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = AlreadyAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopology_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class submitTopologyWithOpts_args:
"""
Attributes:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'uploadedJarLocation', None, None, ), # 2
(3, TType.STRING, 'jsonConf', None, None, ), # 3
(4, TType.STRUCT, 'topology', (StormTopology, StormTopology.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'options', (SubmitOptions, SubmitOptions.thrift_spec), None, ), # 5
)
def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None, options=None,):
self.name = name
self.uploadedJarLocation = uploadedJarLocation
self.jsonConf = jsonConf
self.topology = topology
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.uploadedJarLocation = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.jsonConf = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.topology = StormTopology()
self.topology.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.options = SubmitOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopologyWithOpts_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.uploadedJarLocation is not None:
oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
oprot.writeString(self.uploadedJarLocation)
oprot.writeFieldEnd()
if self.jsonConf is not None:
oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
oprot.writeString(self.jsonConf)
oprot.writeFieldEnd()
if self.topology is not None:
oprot.writeFieldBegin('topology', TType.STRUCT, 4)
self.topology.write(oprot)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 5)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class submitTopologyWithOpts_result:
"""
Attributes:
- e
- ite
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (AlreadyAliveException, AlreadyAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
)
def __init__(self, e=None, ite=None,):
self.e = e
self.ite = ite
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = AlreadyAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopologyWithOpts_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopology_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopology_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopology_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopology_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopologyWithOpts_args:
"""
Attributes:
- name
- options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRUCT, 'options', (KillOptions, KillOptions.thrift_spec), None, ), # 2
)
def __init__(self, name=None, options=None,):
self.name = name
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = KillOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopologyWithOpts_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopologyWithOpts_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopologyWithOpts_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class activate_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('activate_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class activate_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('activate_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deactivate_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deactivate_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deactivate_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deactivate_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class rebalance_args:
"""
Attributes:
- name
- options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRUCT, 'options', (RebalanceOptions, RebalanceOptions.thrift_spec), None, ), # 2
)
def __init__(self, name=None, options=None,):
self.name = name
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = RebalanceOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('rebalance_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class rebalance_result:
"""
Attributes:
- e
- ite
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
)
def __init__(self, e=None, ite=None,):
self.e = e
self.ite = ite
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('rebalance_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileUpload_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileUpload_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileUpload_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileUpload_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class uploadChunk_args:
"""
Attributes:
- location
- chunk
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'location', None, None, ), # 1
(2, TType.STRING, 'chunk', None, None, ), # 2
)
def __init__(self, location=None, chunk=None,):
self.location = location
self.chunk = chunk
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.location = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.chunk = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('uploadChunk_args')
if self.location is not None:
oprot.writeFieldBegin('location', TType.STRING, 1)
oprot.writeString(self.location)
oprot.writeFieldEnd()
if self.chunk is not None:
oprot.writeFieldBegin('chunk', TType.STRING, 2)
oprot.writeString(self.chunk)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class uploadChunk_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('uploadChunk_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class finishFileUpload_args:
"""
Attributes:
- location
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'location', None, None, ), # 1
)
def __init__(self, location=None,):
self.location = location
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.location = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('finishFileUpload_args')
if self.location is not None:
oprot.writeFieldBegin('location', TType.STRING, 1)
oprot.writeString(self.location)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class finishFileUpload_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('finishFileUpload_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileDownload_args:
"""
Attributes:
- file
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'file', None, None, ), # 1
)
def __init__(self, file=None,):
self.file = file
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.file = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileDownload_args')
if self.file is not None:
oprot.writeFieldBegin('file', TType.STRING, 1)
oprot.writeString(self.file)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileDownload_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileDownload_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class downloadChunk_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('downloadChunk_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class downloadChunk_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('downloadChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNimbusConf_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNimbusConf_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNimbusConf_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNimbusConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getClusterInfo_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getClusterInfo_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getClusterInfo_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ClusterSummary, ClusterSummary.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ClusterSummary()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getClusterInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyInfo_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyInfo_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyInfo_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TopologyInfo, TopologyInfo.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TopologyInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyConf_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyConf_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyConf_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopology_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopology_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopology_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (StormTopology, StormTopology.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StormTopology()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopology_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getUserTopology_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getUserTopology_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getUserTopology_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (StormTopology, StormTopology.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StormTopology()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getUserTopology_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 30.182346
| 188
| 0.668508
| 11,134
| 98,817
| 5.684839
| 0.015538
| 0.017063
| 0.030713
| 0.023888
| 0.890339
| 0.865803
| 0.843669
| 0.828723
| 0.820855
| 0.819986
| 0
| 0.002812
| 0.215509
| 98,817
| 3,273
| 189
| 30.191567
| 0.813676
| 0.019905
| 0
| 0.866403
| 1
| 0
| 0.028866
| 0.004118
| 0
| 0
| 0
| 0
| 0
| 1
| 0.134387
| false
| 0.007115
| 0.002372
| 0.042688
| 0.263241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc4b846e57d2910c0d4eb0a932e3548a8ac421c6
| 31,822
|
py
|
Python
|
hero/hero.py
|
tmfds/dfk
|
91b6f95a4630b57deecf87cf4850b6576646c7d1
|
[
"MIT"
] | null | null | null |
hero/hero.py
|
tmfds/dfk
|
91b6f95a4630b57deecf87cf4850b6576646c7d1
|
[
"MIT"
] | null | null | null |
hero/hero.py
|
tmfds/dfk
|
91b6f95a4630b57deecf87cf4850b6576646c7d1
|
[
"MIT"
] | null | null | null |
import copy
from web3 import Web3
from .utils import utils as hero_utils
CONTRACT_ADDRESS = '0x5f753dcdf9b1ad9aabc1346614d1f4746fd6ce5c'
ABI = """
[
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"approved","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"Approval","type":"event"},
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"operator","type":"address"},{"indexed":false,"internalType":"bool","name":"approved","type":"bool"}],"name":"ApprovalForAll","type":"event"},
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":false,"internalType":"uint256","name":"heroId","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"summonerId","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"assistantId","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"statGenes","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"visualGenes","type":"uint256"}],"name":"HeroSummoned","type":"event"},
{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Paused","type":"event"},
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"previousAdminRole","type":"bytes32"},{"indexed":true,"internalType":"bytes32","name":"newAdminRole","type":"bytes32"}],"name":"RoleAdminChanged","type":"event"},
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleGranted","type":"event"},
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes32","name":"role","type":"bytes32"},{"indexed":true,"internalType":"address","name":"account","type":"address"},{"indexed":true,"internalType":"address","name":"sender","type":"address"}],"name":"RoleRevoked","type":"event"},
{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":true,"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"Transfer","type":"event"},
{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"account","type":"address"}],"name":"Unpaused","type":"event"},
{"inputs":[],"name":"DEFAULT_ADMIN_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"HERO_MODERATOR_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"MINTER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"MODERATOR_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"PAUSER_ROLE","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"approve","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"owner","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"uint256","name":"_statGenes","type":"uint256"},{"internalType":"uint256","name":"_visualGenes","type":"uint256"},
{"internalType":"enum IHeroTypes.Rarity","name":"_rarity","type":"uint8"},
{"internalType":"bool","name":"_shiny","type":"bool"},{"components":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"uint256","name":"summonerId","type":"uint256"},{"internalType":"uint256","name":"assistantId","type":"uint256"},{"internalType":"uint16","name":"generation","type":"uint16"},{"internalType":"uint256","name":"createdBlock","type":"uint256"},{"internalType":"uint256","name":"heroId","type":"uint256"},{"internalType":"uint8","name":"summonerTears","type":"uint8"},{"internalType":"uint8","name":"assistantTears","type":"uint8"},{"internalType":"address","name":"bonusItem","type":"address"},{"internalType":"uint32","name":"maxSummons","type":"uint32"},{"internalType":"uint32","name":"firstName","type":"uint32"},{"internalType":"uint32","name":"lastName","type":"uint32"},{"internalType":"uint8","name":"shinyStyle","type":"uint8"}],"internalType":"struct ICrystalTypes.HeroCrystal","name":"_crystal","type":"tuple"}],"name":"createHero","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"getApproved","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"uint256","name":"_id","type":"uint256"}],"name":"getHero","outputs":[{"components":[{"internalType":"uint256","name":"id","type":"uint256"},{"components":[{"internalType":"uint256","name":"summonedTime","type":"uint256"},{"internalType":"uint256","name":"nextSummonTime","type":"uint256"},{"internalType":"uint256","name":"summonerId","type":"uint256"},{"internalType":"uint256","name":"assistantId","type":"uint256"},{"internalType":"uint32","name":"summons","type":"uint32"},{"internalType":"uint32","name":"maxSummons","type":"uint32"}],"internalType":"struct IHeroTypes.SummoningInfo","name":"summoningInfo","type":"tuple"},{"components":[{"internalType":"uint256","name":"statGenes","type":"uint256"},{"internalType":"uint256","name":"visualGenes","type":"uint256"},{"internalType":"enum IHeroTypes.Rarity","name":"rarity","type":"uint8"},{"internalType":"bool","name":"shiny","type":"bool"},{"internalType":"uint16","name":"generation","type":"uint16"},{"internalType":"uint32","name":"firstName","type":"uint32"},{"internalType":"uint32","name":"lastName","type":"uint32"},{"internalType":"uint8","name":"shinyStyle","type":"uint8"},{"internalType":"uint8","name":"class","type":"uint8"},{"internalType":"uint8","name":"subClass","type":"uint8"}],"internalType":"struct IHeroTypes.HeroInfo","name":"info","type":"tuple"},{"components":[{"internalType":"uint256","name":"staminaFullAt","type":"uint256"},{"internalType":"uint256","name":"hpFullAt","type":"uint256"},{"internalType":"uint256","name":"mpFullAt","type":"uint256"},{"internalType":"uint16","name":"level","type":"uint16"},{"internalType":"uint64","name":"xp","type":"uint64"},{"internalType":"address","name":"currentQuest","type":"address"},{"internalType":"uint8","name":"sp","type":"uint8"},{"internalType":"enum IHeroTypes.HeroStatus","name":"status","type":"uint8"}],"internalType":"struct IHeroTypes.HeroState","name":"state","type":"tuple"},{"components":[{"internalType":"uint16","name":"strength","type":"uint16"},{"internalType":"uint16","name":"intelligence","type":"uint16"},{"internalType":"uint16","name":"wisdom","type":"uint16"},{"internalType":"uint16","name":"luck","type":"uint16"},{"internalType":"uint16","name":"agility","type":"uint16"},{"internalType":"uint16","name":"vitality","type":"uint16"},{"internalType":"uint16","name":"endurance","type":"uint16"},{"internalType":"uint16","name":"dexterity","type":"uint16"},{"internalType":"uint16","name":"hp","type":"uint16"},{"internalType":"uint16","name":"mp","type":"uint16"},{"internalType":"uint16","name":"stamina","type":"uint16"}],"internalType":"struct IHeroTypes.HeroStats","name":"stats","type":"tuple"},{"components":[{"internalType":"uint16","name":"strength","type":"uint16"},{"internalType":"uint16","name":"intelligence","type":"uint16"},{"internalType":"uint16","name":"wisdom","type":"uint16"},{"internalType":"uint16","name":"luck","type":"uint16"},{"internalType":"uint16","name":"agility","type":"uint16"},{"internalType":"uint16","name":"vitality","type":"uint16"},{"internalType":"uint16","name":"endurance","type":"uint16"},{"internalType":"uint16","name":"dexterity","type":"uint16"},{"internalType":"uint16","name":"hpSm","type":"uint16"},{"internalType":"uint16","name":"hpRg","type":"uint16"},{"internalType":"uint16","name":"hpLg","type":"uint16"},{"internalType":"uint16","name":"mpSm","type":"uint16"},{"internalType":"uint16","name":"mpRg","type":"uint16"},{"internalType":"uint16","name":"mpLg","type":"uint16"}],"internalType":"struct IHeroTypes.HeroStatGrowth","name":"primaryStatGrowth","type":"tuple"},{"components":[{"internalType":"uint16","name":"strength","type":"uint16"},{"internalType":"uint16","name":"intelligence","type":"uint16"},{"internalType":"uint16","name":"wisdom","type":"uint16"},{"internalType":"uint16","name":"luck","type":"uint16"},{"internalType":"uint16","name":"agility","type":"uint16"},{"internalType":"uint16","name":"vitality","type":"uint16"},{"internalType":"uint16","name":"endurance","type":"uint16"},{"internalType":"uint16","name":"dexterity","type":"uint16"},{"internalType":"uint16","name":"hpSm","type":"uint16"},{"internalType":"uint16","name":"hpRg","type":"uint16"},{"internalType":"uint16","name":"hpLg","type":"uint16"},{"internalType":"uint16","name":"mpSm","type":"uint16"},{"internalType":"uint16","name":"mpRg","type":"uint16"},{"internalType":"uint16","name":"mpLg","type":"uint16"}],"internalType":"struct IHeroTypes.HeroStatGrowth","name":"secondaryStatGrowth","type":"tuple"},{"components":[{"internalType":"uint16","name":"mining","type":"uint16"},{"internalType":"uint16","name":"gardening","type":"uint16"},{"internalType":"uint16","name":"foraging","type":"uint16"},{"internalType":"uint16","name":"fishing","type":"uint16"}],"internalType":"struct IHeroTypes.HeroProfessions","name":"professions","type":"tuple"}],"internalType":"struct IHeroTypes.Hero","name":"","type":"tuple"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleAdmin","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"uint256","name":"index","type":"uint256"}],"name":"getRoleMember","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"name":"getRoleMemberCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"address","name":"_address","type":"address"}],"name":"getUserHeroes","outputs":[{"internalType":"uint256[]","name":"","type":"uint256[]"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"grantRole","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"hasRole","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"string","name":"_name","type":"string"},{"internalType":"string","name":"_symbol","type":"string"},{"internalType":"string","name":"_url","type":"string"},{"internalType":"address","name":"_statScienceAddress","type":"address"}],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string","name":"symbol","type":"string"},{"internalType":"string","name":"baseTokenURI","type":"string"}],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"operator","type":"address"}],"name":"isApprovedForAll","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"mint","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"ownerOf","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"pause","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[],"name":"paused","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"renounceRole","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"name":"revokeRole","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"},{"internalType":"bytes","name":"_data","type":"bytes"}],"name":"safeTransferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"operator","type":"address"},{"internalType":"bool","name":"approved","type":"bool"}],"name":"setApprovalForAll","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"_statScienceAddress","type":"address"}],"name":"setStatScienceAddress","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"bytes4","name":"interfaceId","type":"bytes4"}],"name":"supportsInterface","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"index","type":"uint256"}],"name":"tokenByIndex","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"uint256","name":"index","type":"uint256"}],"name":"tokenOfOwnerByIndex","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"tokenURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},
{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},
{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"transferFrom","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[],"name":"unpause","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"components":[{"internalType":"uint256","name":"id","type":"uint256"},{"components":[{"internalType":"uint256","name":"summonedTime","type":"uint256"},{"internalType":"uint256","name":"nextSummonTime","type":"uint256"},{"internalType":"uint256","name":"summonerId","type":"uint256"},{"internalType":"uint256","name":"assistantId","type":"uint256"},{"internalType":"uint32","name":"summons","type":"uint32"},{"internalType":"uint32","name":"maxSummons","type":"uint32"}],"internalType":"struct IHeroTypes.SummoningInfo","name":"summoningInfo","type":"tuple"},{"components":[{"internalType":"uint256","name":"statGenes","type":"uint256"},{"internalType":"uint256","name":"visualGenes","type":"uint256"},{"internalType":"enum IHeroTypes.Rarity","name":"rarity","type":"uint8"},{"internalType":"bool","name":"shiny","type":"bool"},{"internalType":"uint16","name":"generation","type":"uint16"},{"internalType":"uint32","name":"firstName","type":"uint32"},{"internalType":"uint32","name":"lastName","type":"uint32"},{"internalType":"uint8","name":"shinyStyle","type":"uint8"},{"internalType":"uint8","name":"class","type":"uint8"},{"internalType":"uint8","name":"subClass","type":"uint8"}],"internalType":"struct IHeroTypes.HeroInfo","name":"info","type":"tuple"},{"components":[{"internalType":"uint256","name":"staminaFullAt","type":"uint256"},{"internalType":"uint256","name":"hpFullAt","type":"uint256"},{"internalType":"uint256","name":"mpFullAt","type":"uint256"},{"internalType":"uint16","name":"level","type":"uint16"},{"internalType":"uint64","name":"xp","type":"uint64"},{"internalType":"address","name":"currentQuest","type":"address"},{"internalType":"uint8","name":"sp","type":"uint8"},{"internalType":"enum IHeroTypes.HeroStatus","name":"status","type":"uint8"}],"internalType":"struct IHeroTypes.HeroState","name":"state","type":"tuple"},{"components":[{"internalType":"uint16","name":"strength","type":"uint16"},{"internalType":"uint16","name":"intelligence","type":"uint16"},{"internalType":"uint16","name":"wisdom","type":"uint16"},{"internalType":"uint16","name":"luck","type":"uint16"},{"internalType":"uint16","name":"agility","type":"uint16"},{"internalType":"uint16","name":"vitality","type":"uint16"},{"internalType":"uint16","name":"endurance","type":"uint16"},{"internalType":"uint16","name":"dexterity","type":"uint16"},{"internalType":"uint16","name":"hp","type":"uint16"},{"internalType":"uint16","name":"mp","type":"uint16"},{"internalType":"uint16","name":"stamina","type":"uint16"}],"internalType":"struct IHeroTypes.HeroStats","name":"stats","type":"tuple"},{"components":[{"internalType":"uint16","name":"strength","type":"uint16"},{"internalType":"uint16","name":"intelligence","type":"uint16"},{"internalType":"uint16","name":"wisdom","type":"uint16"},{"internalType":"uint16","name":"luck","type":"uint16"},{"internalType":"uint16","name":"agility","type":"uint16"},{"internalType":"uint16","name":"vitality","type":"uint16"},{"internalType":"uint16","name":"endurance","type":"uint16"},{"internalType":"uint16","name":"dexterity","type":"uint16"},{"internalType":"uint16","name":"hpSm","type":"uint16"},{"internalType":"uint16","name":"hpRg","type":"uint16"},{"internalType":"uint16","name":"hpLg","type":"uint16"},{"internalType":"uint16","name":"mpSm","type":"uint16"},{"internalType":"uint16","name":"mpRg","type":"uint16"},{"internalType":"uint16","name":"mpLg","type":"uint16"}],"internalType":"struct IHeroTypes.HeroStatGrowth","name":"primaryStatGrowth","type":"tuple"},{"components":[{"internalType":"uint16","name":"strength","type":"uint16"},{"internalType":"uint16","name":"intelligence","type":"uint16"},{"internalType":"uint16","name":"wisdom","type":"uint16"},{"internalType":"uint16","name":"luck","type":"uint16"},{"internalType":"uint16","name":"agility","type":"uint16"},{"internalType":"uint16","name":"vitality","type":"uint16"},{"internalType":"uint16","name":"endurance","type":"uint16"},{"internalType":"uint16","name":"dexterity","type":"uint16"},{"internalType":"uint16","name":"hpSm","type":"uint16"},{"internalType":"uint16","name":"hpRg","type":"uint16"},{"internalType":"uint16","name":"hpLg","type":"uint16"},{"internalType":"uint16","name":"mpSm","type":"uint16"},{"internalType":"uint16","name":"mpRg","type":"uint16"},{"internalType":"uint16","name":"mpLg","type":"uint16"}],"internalType":"struct IHeroTypes.HeroStatGrowth","name":"secondaryStatGrowth","type":"tuple"},{"components":[{"internalType":"uint16","name":"mining","type":"uint16"},{"internalType":"uint16","name":"gardening","type":"uint16"},{"internalType":"uint16","name":"foraging","type":"uint16"},{"internalType":"uint16","name":"fishing","type":"uint16"}],"internalType":"struct IHeroTypes.HeroProfessions","name":"professions","type":"tuple"}],"internalType":"struct IHeroTypes.Hero","name":"_hero","type":"tuple"}],"name":"updateHero","outputs":[],"stateMutability":"nonpayable","type":"function"},
{"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"uint256","name":"","type":"uint256"}],"name":"userHeroes","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"}
]
"""
def block_explorer_link(txid):
return 'https://explorer.harmony.one/tx/' + str(txid)
def transfer(hero_id, owner_private_key, owner_nonce, receiver_address, gas_price_gwei, rpc_address, logger):
w3 = Web3(Web3.HTTPProvider(rpc_address))
account = w3.eth.account.privateKeyToAccount(owner_private_key)
w3.eth.default_account = account.address
contract_address = Web3.toChecksumAddress(CONTRACT_ADDRESS)
contract = w3.eth.contract(contract_address, abi=ABI)
owner = contract.functions.ownerOf(hero_id).call()
logger.info("Hero's owner " + str(owner))
if owner != account.address:
raise Exception("Owner mismatch")
tx = contract.functions.transferFrom(owner, receiver_address, hero_id).buildTransaction(
{'gasPrice': w3.toWei(gas_price_gwei, 'gwei'), 'nonce': owner_nonce})
logger.debug("Signing transaction")
signed_tx = w3.eth.account.sign_transaction(tx, private_key=owner_private_key)
logger.debug("Sending transaction " + str(tx))
ret = w3.eth.send_raw_transaction(signed_tx.rawTransaction)
logger.debug("Transaction successfully sent !")
logger.info("Waiting for transaction " + block_explorer_link(signed_tx.hash.hex()) + " to be mined")
tx_receipt = w3.eth.wait_for_transaction_receipt(transaction_hash=signed_tx.hash, timeout=24 * 3600,
poll_latency=3)
logger.info("Transaction mined !")
logger.info(str(tx_receipt))
def get_owner(hero_id, rpc_address):
w3 = Web3(Web3.HTTPProvider(rpc_address))
contract_address = Web3.toChecksumAddress(CONTRACT_ADDRESS)
contract = w3.eth.contract(contract_address, abi=ABI)
return str(contract.functions.ownerOf(hero_id).call())
def get_users_heroes(user_address, rpc_address):
w3 = Web3(Web3.HTTPProvider(rpc_address))
contract_address = Web3.toChecksumAddress(CONTRACT_ADDRESS)
contract = w3.eth.contract(contract_address, abi=ABI)
return contract.functions.getUserHeroes(Web3.toChecksumAddress(user_address)).call()
def is_approved_for_all(owner, operator, rpc_address):
w3 = Web3(Web3.HTTPProvider(rpc_address))
contract_address = Web3.toChecksumAddress(CONTRACT_ADDRESS)
contract = w3.eth.contract(contract_address, abi=ABI)
return contract.functions.isApprovedForAll(Web3.toChecksumAddress(owner), Web3.toChecksumAddress(operator)).call()
def get_hero(hero_id, rpc_address):
w3 = Web3(Web3.HTTPProvider(rpc_address))
contract_address = Web3.toChecksumAddress(CONTRACT_ADDRESS)
contract = w3.eth.contract(contract_address, abi=ABI)
contract_entry = contract.functions.getHero(hero_id).call()
hero = {}
tuple_index = 0
hero['id'] = contract_entry[tuple_index]
tuple_index = tuple_index + 1
# SummoningInfo
summoning_info = {}
summoning_info['summonedTime'] = contract_entry[tuple_index][0]
summoning_info['nextSummonTime'] = contract_entry[tuple_index][1]
summoning_info['summonerId'] = contract_entry[tuple_index][2]
summoning_info['assistantId'] = contract_entry[tuple_index][3]
summoning_info['summons'] = contract_entry[tuple_index][4]
summoning_info['maxSummons'] = contract_entry[tuple_index][5]
hero['summoningInfo'] = summoning_info
tuple_index = tuple_index + 1
# HeroInfo
hero_info = {}
hero_info['statGenes'] = contract_entry[tuple_index][0]
hero_info['visualGenes'] = contract_entry[tuple_index][1]
hero_info['rarity'] = contract_entry[tuple_index][2]
hero_info['shiny'] = contract_entry[tuple_index][3]
hero_info['generation'] = contract_entry[tuple_index][4]
hero_info['firstName'] = contract_entry[tuple_index][5]
hero_info['lastName'] = contract_entry[tuple_index][6]
hero_info['shinyStyle'] = contract_entry[tuple_index][7]
hero_info['class'] = contract_entry[tuple_index][8]
hero_info['subClass'] = contract_entry[tuple_index][9]
hero['info'] = hero_info
tuple_index = tuple_index + 1
# HeroState
hero_state = {}
hero_state['staminaFullAt'] = contract_entry[tuple_index][0]
hero_state['hpFullAt'] = contract_entry[tuple_index][1]
hero_state['mpFullAt'] = contract_entry[tuple_index][2]
hero_state['level'] = contract_entry[tuple_index][3]
hero_state['xp'] = contract_entry[tuple_index][4]
hero_state['currentQuest'] = contract_entry[tuple_index][5]
hero_state['sp'] = contract_entry[tuple_index][6]
hero_state['status'] = contract_entry[tuple_index][7]
hero['state'] = hero_state
tuple_index = tuple_index + 1
# HeroStats
hero_stats = {}
hero_stats['strength'] = contract_entry[tuple_index][0]
hero_stats['intelligence'] = contract_entry[tuple_index][1]
hero_stats['wisdom'] = contract_entry[tuple_index][2]
hero_stats['luck'] = contract_entry[tuple_index][3]
hero_stats['agility'] = contract_entry[tuple_index][4]
hero_stats['vitality'] = contract_entry[tuple_index][5]
hero_stats['endurance'] = contract_entry[tuple_index][6]
hero_stats['dexterity'] = contract_entry[tuple_index][7]
hero_stats['hp'] = contract_entry[tuple_index][8]
hero_stats['mp'] = contract_entry[tuple_index][9]
hero_stats['stamina'] = contract_entry[tuple_index][10]
hero['stats'] = hero_stats
tuple_index = tuple_index + 1
# primary HeroStatGrowth
hero_primary_stat_growth = {}
hero_primary_stat_growth['strength'] = contract_entry[tuple_index][0]
hero_primary_stat_growth['intelligence'] = contract_entry[tuple_index][1]
hero_primary_stat_growth['wisdom'] = contract_entry[tuple_index][2]
hero_primary_stat_growth['luck'] = contract_entry[tuple_index][3]
hero_primary_stat_growth['agility'] = contract_entry[tuple_index][4]
hero_primary_stat_growth['vitality'] = contract_entry[tuple_index][5]
hero_primary_stat_growth['endurance'] = contract_entry[tuple_index][6]
hero_primary_stat_growth['dexterity'] = contract_entry[tuple_index][7]
hero_primary_stat_growth['hpSm'] = contract_entry[tuple_index][8]
hero_primary_stat_growth['hpRg'] = contract_entry[tuple_index][9]
hero_primary_stat_growth['hpLg'] = contract_entry[tuple_index][10]
hero_primary_stat_growth['mpSm'] = contract_entry[tuple_index][11]
hero_primary_stat_growth['mpRg'] = contract_entry[tuple_index][12]
hero_primary_stat_growth['mpLg'] = contract_entry[tuple_index][13]
hero['primaryStatGrowth'] = hero_primary_stat_growth
tuple_index = tuple_index + 1
# secondary HeroStatGrowth
hero_secondary_stat_growth = {}
hero_secondary_stat_growth['strength'] = contract_entry[tuple_index][0]
hero_secondary_stat_growth['intelligence'] = contract_entry[tuple_index][1]
hero_secondary_stat_growth['wisdom'] = contract_entry[tuple_index][2]
hero_secondary_stat_growth['luck'] = contract_entry[tuple_index][3]
hero_secondary_stat_growth['agility'] = contract_entry[tuple_index][4]
hero_secondary_stat_growth['vitality'] = contract_entry[tuple_index][5]
hero_secondary_stat_growth['endurance'] = contract_entry[tuple_index][6]
hero_secondary_stat_growth['dexterity'] = contract_entry[tuple_index][7]
hero_secondary_stat_growth['hpSm'] = contract_entry[tuple_index][8]
hero_secondary_stat_growth['hpRg'] = contract_entry[tuple_index][9]
hero_secondary_stat_growth['hpLg'] = contract_entry[tuple_index][10]
hero_secondary_stat_growth['mpSm'] = contract_entry[tuple_index][11]
hero_secondary_stat_growth['mpRg'] = contract_entry[tuple_index][12]
hero_secondary_stat_growth['mpLg'] = contract_entry[tuple_index][13]
hero['secondaryStatGrowth'] = hero_secondary_stat_growth
tuple_index = tuple_index + 1
# HeroProfessions
hero_professions = {}
hero_professions['mining'] = contract_entry[tuple_index][0]
hero_professions['gardening'] = contract_entry[tuple_index][1]
hero_professions['foraging'] = contract_entry[tuple_index][2]
hero_professions['fishing'] = contract_entry[tuple_index][3]
hero['professions'] = hero_professions
return hero
def human_readable_hero(raw_hero, hero_male_first_names=None, hero_female_first_names=None, hero_last_names=None):
readable_hero = copy.deepcopy(raw_hero)
readable_hero['info']['rarity'] = hero_utils.parse_rarity(readable_hero['info']['rarity'])
readable_hero['info']['class'] = hero_utils.parse_class(readable_hero['info']['class'])
readable_hero['info']['subClass'] = hero_utils.parse_class(readable_hero['info']['subClass'])
# visualGenes
readable_hero['info']['visualGenes'] = hero_utils.parse_visual_genes(readable_hero['info']['visualGenes'])
# statsGenes
readable_hero['info']['statGenes'] = hero_utils.parse_stat_genes(readable_hero['info']['statGenes'])
# names
if readable_hero['info']['visualGenes']['gender'] == 'male':
if hero_male_first_names is not None:
readable_hero['info']['firstName'] = hero_male_first_names[readable_hero['info']['firstName']]
else:
if hero_female_first_names is not None:
readable_hero['info']['firstName'] = hero_female_first_names[readable_hero['info']['firstName']]
if hero_last_names is not None:
readable_hero['info']['lastName'] = hero_last_names[readable_hero['info']['lastName']]
return readable_hero
| 117.859259
| 5,003
| 0.671139
| 3,312
| 31,822
| 6.311292
| 0.081824
| 0.078362
| 0.095776
| 0.104483
| 0.818591
| 0.803138
| 0.744821
| 0.681385
| 0.677606
| 0.60575
| 0
| 0.033411
| 0.068852
| 31,822
| 269
| 5,004
| 118.297398
| 0.672033
| 0.004242
| 0
| 0.105263
| 0
| 0.220096
| 0.737372
| 0.678274
| 0
| 0
| 0.001326
| 0
| 0
| 1
| 0.033493
| false
| 0
| 0.014354
| 0.004785
| 0.076555
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fc9cde2fe4aa2f639814b61454d79983712258b4
| 918
|
py
|
Python
|
build_osx/copy_runtime.py
|
ozsolarwind/SAM
|
0967b0a4be8f8924ec1ad915a14575ac22c4ec3c
|
[
"MIT"
] | null | null | null |
build_osx/copy_runtime.py
|
ozsolarwind/SAM
|
0967b0a4be8f8924ec1ad915a14575ac22c4ec3c
|
[
"MIT"
] | null | null | null |
build_osx/copy_runtime.py
|
ozsolarwind/SAM
|
0967b0a4be8f8924ec1ad915a14575ac22c4ec3c
|
[
"MIT"
] | 1
|
2019-05-21T23:16:17.000Z
|
2019-05-21T23:16:17.000Z
|
import os
import shutil
SOURCE_DIR = '../deploy/runtime'
TARGET_DIR = 'SAM.app/Contents/runtime'
if os.path.exists(TARGET_DIR):
shutil.rmtree(TARGET_DIR)
shutil.copytree(SOURCE_DIR, TARGET_DIR, ignore=shutil.ignore_patterns('.git'))
SOURCE_DIR = '../deploy/solar_resource'
TARGET_DIR = 'SAM.app/Contents/solar_resource'
if os.path.exists(TARGET_DIR):
shutil.rmtree(TARGET_DIR)
shutil.copytree(SOURCE_DIR, TARGET_DIR, ignore=shutil.ignore_patterns('.git'))
SOURCE_DIR = '../deploy/wind_resource'
TARGET_DIR = 'SAM.app/Contents/wind_resource'
if os.path.exists(TARGET_DIR):
shutil.rmtree(TARGET_DIR)
shutil.copytree(SOURCE_DIR, TARGET_DIR, ignore=shutil.ignore_patterns('.git'))
SOURCE_DIR = '../deploy/libraries'
TARGET_DIR = 'SAM.app/Contents/libraries'
if os.path.exists(TARGET_DIR):
shutil.rmtree(TARGET_DIR)
shutil.copytree(SOURCE_DIR, TARGET_DIR, ignore=shutil.ignore_patterns('.git'))
| 26.228571
| 78
| 0.769063
| 132
| 918
| 5.106061
| 0.174242
| 0.21365
| 0.178042
| 0.089021
| 0.873887
| 0.805638
| 0.71365
| 0.71365
| 0.71365
| 0.71365
| 0
| 0
| 0.087146
| 918
| 34
| 79
| 27
| 0.804296
| 0
| 0
| 0.545455
| 0
| 0
| 0.228758
| 0.172113
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dba9613244bd2e35eb89625f766b4f652fe90d8
| 2,901
|
py
|
Python
|
parse_scripts/import_osm.py
|
nokout/au_address
|
07138ecd8fedab9566435b609cb8124b67ad42ff
|
[
"MIT"
] | 1
|
2018-11-16T15:41:38.000Z
|
2018-11-16T15:41:38.000Z
|
training/parse_scripts/import_osm.py
|
crccheck/us-address-parser
|
826fd365cba065a0588fa013cddbb23a8dac27a9
|
[
"MIT"
] | 6
|
2016-10-05T11:21:36.000Z
|
2016-10-18T15:11:20.000Z
|
parse_scripts/import_osm.py
|
nokout/au_address
|
07138ecd8fedab9566435b609cb8124b67ad42ff
|
[
"MIT"
] | null | null | null |
import requests
import codecs
query1 = """<union>
<query type="way">
<has-kv k="addr:housenumber"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:type"/>
<has-kv k="addr:state"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="way">
<has-kv k="addr:housenumber"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:type"/>
<has-kv k="addr:city"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="way">
<has-kv k="addr:housenumber"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:type"/>
<has-kv k="addr:postcode"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="node">
<has-kv k="addr:housenumber"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:type"/>
<has-kv k="addr:state"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="node">
<has-kv k="addr:housenumber"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:type"/>
<has-kv k="addr:city"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="node">
<has-kv k="addr:housenumber"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:type"/>
<has-kv k="addr:postcode"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
</union>
<print/>""" % ((-70.000000, 50.000000, 25.000000, -125.000000) * 6)
r1 = requests.post('http://overpass-api.de/api/interpreter/', data=query1)
r1.encoding = 'utf-8'
f = codecs.open('data/osm_data.xml', encoding='utf-8' , mode='w+')
f.write(r1.text)
query2 = """<union>
<query type="way">
<has-kv k="addr:street"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:prefix"/>
<has-kv k="addr:street:type"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="node">
<has-kv k="addr:street"/>
<has-kv k="addr:street:name"/>
<has-kv k="addr:street:prefix"/>
<has-kv k="addr:street:type"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
</union>
<print/>""" % ((-87.61309146881104, 41.890042371392965, 41.87234107841773, -87.64235973358154) * 2)
#r2 = requests.post('http://overpass-api.de/api/interpreter/', data=query2)
#f = codecs.open("data/osm_data_street.xml", "wb", "utf-8")
#r2.encoding = 'utf-8'
#f.write(r2.text)
query3 = """<union>
<query type="way">
<has-kv k="addr:full" regv="^[0-9]+.*[a-z]+.*[0-9]{5}.*"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
<query type="node">
<has-kv k="addr:full" regv="^[0-9]+.*[a-z]+.*[0-9]{5}.*"/>
<bbox-query e="%s" n="%s" s="%s" w="%s"/>
</query>
</union>
<print/>
""" % ((-70.000000, 50.000000, 25.000000, -125.000000) * 2)
if __name__ == '__main__' :
r3 = requests.post('http://overpass-api.de/api/interpreter/', data=query3)
f = codecs.open("data/osm_data_full_addr.xml", "wb", "utf-8")
r3.encoding = 'utf-8'
f.write(r3.text)
| 28.441176
| 99
| 0.558083
| 482
| 2,901
| 3.329876
| 0.147303
| 0.105919
| 0.127103
| 0.211838
| 0.850467
| 0.828037
| 0.786916
| 0.786916
| 0.762617
| 0.674766
| 0
| 0.067155
| 0.153051
| 2,901
| 101
| 100
| 28.722772
| 0.586081
| 0.058256
| 0
| 0.788235
| 0
| 0.023529
| 0.802786
| 0.231305
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.023529
| 0.023529
| 0
| 0.023529
| 0.035294
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5d20ce5f76b15dfb9e999e6d113dbf7e789ecd49
| 10,386
|
py
|
Python
|
simpleredial/dataloader/fine_grained_test_dataloader.py
|
gmftbyGMFTBY/SimpleReDial-v1
|
f45b8eb23d1499ec617b4cc4f417d83d8f2b6bde
|
[
"MIT"
] | 36
|
2021-10-13T10:32:08.000Z
|
2022-03-20T07:50:05.000Z
|
simpleredial/dataloader/fine_grained_test_dataloader.py
|
gmftbyGMFTBY/SimpleReDial-v1
|
f45b8eb23d1499ec617b4cc4f417d83d8f2b6bde
|
[
"MIT"
] | 3
|
2021-11-24T10:57:59.000Z
|
2022-03-27T15:37:40.000Z
|
simpleredial/dataloader/fine_grained_test_dataloader.py
|
gmftbyGMFTBY/SimpleReDial-v1
|
f45b8eb23d1499ec617b4cc4f417d83d8f2b6bde
|
[
"MIT"
] | 1
|
2022-03-15T07:13:22.000Z
|
2022-03-15T07:13:22.000Z
|
from header import *
from .utils import *
from .util_func import *
'''Only for Testing'''
class FineGrainedTestDataset(Dataset):
def __init__(self, vocab, path, **args):
self.args = args
self.vocab = vocab
self.vocab.add_tokens(['[EOS]'])
self.pad = self.vocab.convert_tokens_to_ids('[PAD]')
self.sep = self.vocab.convert_tokens_to_ids('[SEP]')
self.eos = self.vocab.convert_tokens_to_ids('[EOS]')
self.cls = self.vocab.convert_tokens_to_ids('[CLS]')
suffix = args['tokenizer'].replace('/', '_')
self.pp_path = f'{os.path.splitext(path)[0]}_fg_test_{suffix}.pt'
if os.path.exists(self.pp_path):
self.data = torch.load(self.pp_path)
print(f'[!] load preprocessed file from {self.pp_path}')
return None
self.data = []
for fix in ['brandenwang', 'lt', 'lt2']:
path = f'{args["root_dir"]}/data/{args["dataset"]}/fg-{fix}-test.txt'
data = read_text_data_utterances(path, lang=self.args['lang'])
for i in tqdm(range(0, len(data), 7)):
batch = data[i:i+7]
rids = []
for label, utterances in batch:
item = self.vocab.batch_encode_plus(utterances, add_special_tokens=False)['input_ids']
cids, rids_ = item[:-1], item[-1]
ids = []
for u in cids:
ids.extend(u + [self.sep])
ids.pop()
ids = ids[-(self.args['max_len']-2):] # ignore [CLS] and [SEP]
rids_ = rids_[:(self.args['res_max_len']-2)]
ids = [self.cls] + ids + [self.sep]
rids_ = [self.cls] + rids_ + [self.sep]
rids.append(rids_)
self.data.append({
'label': [b[0] for b in batch],
'ids': ids,
'rids': rids,
'text': ['\t'.join(b[1]) for b in batch],
'owner': fix,
})
def __len__(self):
return len(self.data)
def __getitem__(self, i):
bundle = self.data[i]
ids = torch.LongTensor(bundle['ids'])
rids = [torch.LongTensor(i) for i in bundle['rids']]
return ids, rids, bundle['label'], bundle['text'], bundle['owner']
def save(self):
data = torch.save(self.data, self.pp_path)
print(f'[!] save preprocessed dataset into {self.pp_path}')
def collate(self, batch):
assert len(batch) == 1
ids, rids, label, text, owner = batch[0]
rids = pad_sequence(rids, batch_first=True, padding_value=self.pad)
rids_mask = generate_mask(rids)
label = torch.LongTensor(label)
ids, rids, rids_mask, label = to_cuda(ids, rids, rids_mask, label)
return {
'ids': ids,
'rids': rids,
'rids_mask': rids_mask,
'label': label,
'text': text,
'owner': owner,
}
class FineGrainedTestPositionWeightDataset(Dataset):
def __init__(self, vocab, path, **args):
self.args = args
self.vocab = vocab
self.vocab.add_tokens(['[EOS]'])
self.pad = self.vocab.convert_tokens_to_ids('[PAD]')
self.sep = self.vocab.convert_tokens_to_ids('[SEP]')
self.eos = self.vocab.convert_tokens_to_ids('[EOS]')
self.cls = self.vocab.convert_tokens_to_ids('[CLS]')
self.unk = self.vocab.convert_tokens_to_ids('[UNK]')
self.special_tokens = set([self.unk, self.cls, self.sep])
suffix = args['tokenizer'].replace('/', '_')
self.pp_path = f'{os.path.splitext(path)[0]}_fg_test_pw_{suffix}.pt'
if os.path.exists(self.pp_path):
self.data = torch.load(self.pp_path)
print(f'[!] load preprocessed file from {self.pp_path}')
return None
self.data = []
for fix in ['brandenwang', 'lt', 'lt2']:
path = f'{args["root_dir"]}/data/{args["dataset"]}/fg-{fix}-test.txt'
data = read_text_data_utterances(path, lang=self.args['lang'])
for i in tqdm(range(0, len(data), 7)):
batch = data[i:i+7]
rids = []
for label, utterances in batch:
item = self.vocab.batch_encode_plus(utterances, add_special_tokens=False)['input_ids']
cids, rids_ = item[:-1], item[-1]
ids = []
position_w, w = [], self.args['min_w']
for u in cids:
ids.extend(u + [self.sep])
for token in u + [self.sep]:
if token not in self.special_tokens:
position_w.append(w)
else:
position_w.append(self.args['w_sp_token'])
w += self.args['w_delta']
ids.pop()
position_w.pop()
ids = ids[-(self.args['max_len']-2):] # ignore [CLS] and [SEP]
position_w = position_w[-(self.args['max_len']-2):]
rids_ = rids_[:(self.args['res_max_len']-2)]
ids = [self.cls] + ids + [self.sep]
position_w = [w-self.args['w_delta']] + position_w + [self.args['w_sp_token']]
rids_ = [self.cls] + rids_ + [self.sep]
rids.append(rids_)
self.data.append({
'label': [b[0] for b in batch],
'ids': ids,
'rids': rids,
'text': ['\t'.join(b[1]) for b in batch],
'position_w': position_w,
'owner': fix,
})
def __len__(self):
return len(self.data)
def __getitem__(self, i):
bundle = self.data[i]
ids = torch.LongTensor(bundle['ids'])
rids = [torch.LongTensor(i) for i in bundle['rids']]
position_w = torch.tensor(bundle['position_w'])
return ids, rids, position_w, bundle['label'], bundle['text'], bundle['owner']
def save(self):
data = torch.save(self.data, self.pp_path)
print(f'[!] save preprocessed dataset into {self.pp_path}')
def collate(self, batch):
assert len(batch) == 1
ids, rids, pos_w, label, text, owner = batch[0]
rids = pad_sequence(rids, batch_first=True, padding_value=self.pad)
rids_mask = generate_mask(rids)
label = torch.LongTensor(label)
ids, rids, pos_w, rids_mask, label = to_cuda(ids, rids, pos_w, rids_mask, label)
return {
'ids': ids,
'rids': rids,
'rids_mask': rids_mask,
'pos_w': pos_w,
'label': label,
'text': text,
'owner': owner,
}
class FineGrainedTestInteractionDataset(Dataset):
def __init__(self, vocab, path, **args):
self.args = args
self.vocab = vocab
self.vocab.add_tokens(['[EOS]'])
self.pad = self.vocab.convert_tokens_to_ids('[PAD]')
self.sep = self.vocab.convert_tokens_to_ids('[SEP]')
self.eos = self.vocab.convert_tokens_to_ids('[EOS]')
self.cls = self.vocab.convert_tokens_to_ids('[CLS]')
suffix = args['tokenizer'].replace('/', '_')
self.pp_path = f'{os.path.splitext(path)[0]}_fg_interaction_test_{suffix}.pt'
if os.path.exists(self.pp_path):
self.data = torch.load(self.pp_path)
print(f'[!] load preprocessed file from {self.pp_path}')
return None
self.data = []
for fix in ['brandenwang', 'lt', 'lt2']:
path = f'{args["root_dir"]}/data/{args["dataset"]}/fg-{fix}-test.txt'
data = read_text_data_utterances(path, lang=self.args['lang'])
for i in tqdm(range(0, len(data), 7)):
batch = data[i:i+7]
rids = []
ids, tids = [], []
context, responses = [], []
for _, utterances in batch:
item = self.vocab.batch_encode_plus(utterances, add_special_tokens=False)['input_ids']
cids = []
for u in item[:-1]:
cids.extend(u + [self.eos])
cids.pop()
rids = item[-1]
truncate_pair(cids, rids, self.args['max_len'])
ids_ = [self.cls] + cids + [self.sep] + rids + [self.sep]
tids_ = [0] * (len(cids) + 2) + [1] * (len(rids) + 1)
ids.append(ids_)
tids.append(tids_)
responses.append(utterances[-1])
context = ' [SEP] '.join(utterances[:-1])
self.data.append({
'label': [b[0] for b in batch],
'ids': ids,
'tids': tids,
'context': context,
'responses': responses,
'owner': fix,
})
def __len__(self):
return len(self.data)
def __getitem__(self, i):
bundle = self.data[i]
ids = [torch.LongTensor(i) for i in bundle['ids']]
tids = [torch.LongTensor(i) for i in bundle['tids']]
context, responses = bundle['context'], bundle['responses']
return ids, tids, bundle['label'], context, responses, bundle['owner']
def save(self):
data = torch.save(self.data, self.pp_path)
print(f'[!] save preprocessed dataset into {self.pp_path}')
def collate(self, batch):
assert len(batch) == 1
ids, tids, label, context, responses, owner = batch[0]
ids = pad_sequence(ids, batch_first=True, padding_value=self.pad)
tids = pad_sequence(tids, batch_first=True, padding_value=self.pad)
label = torch.LongTensor(label)
mask = generate_mask(ids)
ids, tids, mask, label = to_cuda(ids, tids, mask, label)
return {
'ids': ids,
'tids': tids,
'mask': mask,
'label': label,
'owner': owner,
}
| 41.378486
| 106
| 0.501637
| 1,213
| 10,386
| 4.114592
| 0.10305
| 0.045081
| 0.036065
| 0.057303
| 0.802044
| 0.773392
| 0.767982
| 0.721298
| 0.721298
| 0.710479
| 0
| 0.006425
| 0.355575
| 10,386
| 250
| 107
| 41.544
| 0.73928
| 0.004333
| 0
| 0.72973
| 0
| 0
| 0.113804
| 0.03228
| 0
| 0
| 0
| 0
| 0.013514
| 1
| 0.067568
| false
| 0
| 0.013514
| 0.013514
| 0.148649
| 0.027027
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d27ee1b746e920a8fbbde21e0ae74440138e1ce
| 2,293
|
py
|
Python
|
src/wheezy/template/tests/test_utils.py
|
nxsofsys/wheezy.template
|
b65b70b2927974790ff2413843ec752dd9c6c609
|
[
"MIT"
] | 2
|
2017-02-08T11:48:41.000Z
|
2017-12-18T08:04:13.000Z
|
src/wheezy/template/tests/test_utils.py
|
ezotrank/wheezy.template
|
d54bc667303fd4de7314f659e17dd317ac3e3a82
|
[
"MIT"
] | null | null | null |
src/wheezy/template/tests/test_utils.py
|
ezotrank/wheezy.template
|
d54bc667303fd4de7314f659e17dd317ac3e3a82
|
[
"MIT"
] | 1
|
2022-03-04T20:26:20.000Z
|
2022-03-04T20:26:20.000Z
|
""" Unit tests for ``wheezy.templates.utils``.
"""
import unittest
class FindAllBalancedTestCase(unittest.TestCase):
""" Test the ``find_all_balanced``.
"""
def test_start_out(self):
""" The start index is out of range.
"""
from wheezy.template.utils import find_all_balanced
assert 10 == find_all_balanced('test', 10)
def test_start_separator(self):
""" If text doesn't start with ``([`` return.
"""
from wheezy.template.utils import find_all_balanced
assert 0 == find_all_balanced('test([', 0)
assert 3 == find_all_balanced('test([', 3)
def test_not_balanced(self):
""" Separators are not balanced.
"""
from wheezy.template.utils import find_all_balanced
assert 4 == find_all_balanced('test(a, b', 4)
assert 4 == find_all_balanced('test[a, b()', 4)
def test_balanced(self):
""" Separators are balanced.
"""
from wheezy.template.utils import find_all_balanced
assert 10 == find_all_balanced('test(a, b)', 4)
assert 13 == find_all_balanced('test(a, b)[0]', 4)
assert 12 == find_all_balanced('test(a, b())', 4)
assert 17 == find_all_balanced('test(a, b())[0]()', 4)
class FindBalancedTestCase(unittest.TestCase):
""" Test the ``find_balanced``.
"""
def test_start_out(self):
""" The start index is out of range.
"""
from wheezy.template.utils import find_balanced
assert 10 == find_balanced('test', 10)
def test_start_separator(self):
""" If text doesn't start with ``start_sep`` return.
"""
from wheezy.template.utils import find_balanced
assert 0 == find_balanced('test(', 0)
assert 3 == find_balanced('test(', 3)
def test_not_balanced(self):
""" Separators are not balanced.
"""
from wheezy.template.utils import find_balanced
assert 4 == find_balanced('test(a, b', 4)
assert 4 == find_balanced('test(a, b()', 4)
def test_balanced(self):
""" Separators are balanced.
"""
from wheezy.template.utils import find_balanced
assert 10 == find_balanced('test(a, b)', 4)
assert 12 == find_balanced('test(a, b())', 4)
| 31.847222
| 62
| 0.602704
| 294
| 2,293
| 4.513605
| 0.163265
| 0.144687
| 0.158252
| 0.105501
| 0.894499
| 0.853806
| 0.803316
| 0.787491
| 0.666164
| 0.590053
| 0
| 0.025989
| 0.261666
| 2,293
| 71
| 63
| 32.295775
| 0.757826
| 0.195813
| 0
| 0.457143
| 0
| 0
| 0.081127
| 0
| 0
| 0
| 0
| 0
| 0.457143
| 1
| 0.228571
| false
| 0
| 0.257143
| 0
| 0.542857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
5d28c827c798225b6b2063067a58a432acbd8766
| 34,967
|
py
|
Python
|
akshare/economic/macro_constitute.py
|
peterrosetu/akshare
|
9eac9ccb531b6e07d39140830d65349ea9441dc3
|
[
"MIT"
] | 1
|
2020-12-26T23:39:05.000Z
|
2020-12-26T23:39:05.000Z
|
akshare/economic/macro_constitute.py
|
Hecate2/akshare
|
2d8904f5cf242ab784f748ab2f886329ebf69742
|
[
"MIT"
] | null | null | null |
akshare/economic/macro_constitute.py
|
Hecate2/akshare
|
2d8904f5cf242ab784f748ab2f886329ebf69742
|
[
"MIT"
] | 2
|
2020-09-23T08:50:14.000Z
|
2020-09-28T09:57:07.000Z
|
# -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2019/10/21 12:08
Desc: 获取金十数据-数据中心-主要机构-宏观经济
"""
import json
import time
import pandas as pd
import requests
from tqdm import tqdm
from akshare.economic.cons import (
JS_CONS_GOLD_ETF_URL,
JS_CONS_SLIVER_ETF_URL,
JS_CONS_OPEC_URL,
)
def macro_cons_gold_volume():
"""
全球最大黄金ETF—SPDR Gold Trust持仓报告, 数据区间从20041118-至今
:return: pandas.Series
2004-11-18 8.09
2004-11-19 57.85
2004-11-22 87.09
2004-11-23 87.09
2004-11-24 96.42
...
2019-10-20 924.64
2019-10-21 924.64
2019-10-22 919.66
2019-10-23 918.48
2019-10-24 918.48
"""
t = time.time()
res = requests.get(
JS_CONS_GOLD_ETF_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
value_list = [item["datas"]["黄金"] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["总库存(吨)"]
url = "https://datacenter-api.jin10.com/reports/list_v2"
params = {
"max_date": "",
"category": "etf",
"attr_id": "1",
"_": str(int(round(t * 1000))),
}
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_usa_michigan_consumer_sentiment",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
r = requests.get(url, params=params, headers=headers)
temp_se = pd.DataFrame(r.json()["data"]["values"]).iloc[:, :2]
temp_se.index = pd.to_datetime(temp_se.iloc[:, 0])
temp_se = temp_se.iloc[:, 1]
temp_df = temp_df.append(temp_se)
temp_df.dropna(inplace=True)
temp_df.sort_index(inplace=True)
temp_df = temp_df.reset_index()
temp_df.drop_duplicates(subset="index", keep="last", inplace=True)
temp_df.set_index("index", inplace=True)
temp_df = temp_df.squeeze()
temp_df.index.name = None
temp_df.name = "gold_volume"
temp_df = temp_df.astype(float)
return temp_df
def macro_cons_gold_change():
"""
全球最大黄金ETF—SPDR Gold Trust持仓报告, 数据区间从20041118-至今
:return: pandas.Series
2004-11-18 0
2004-11-19 49.76
2004-11-22 29.24
2004-11-23 0.00
2004-11-24 9.33
...
2019-10-20 0.00
2019-10-21 0.00
2019-10-22 -4.98
2019-10-23 -1.18
2019-10-24 0.00
"""
t = time.time()
res = requests.get(
JS_CONS_GOLD_ETF_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
value_list = [item["datas"]["黄金"] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["增持/减持(吨)"]
url = "https://datacenter-api.jin10.com/reports/list_v2"
params = {
"max_date": "",
"category": "etf",
"attr_id": "1",
"_": str(int(round(t * 1000))),
}
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_usa_michigan_consumer_sentiment",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
r = requests.get(url, params=params, headers=headers)
temp_se = pd.DataFrame(r.json()["data"]["values"]).iloc[:, [0, 2]]
temp_se.index = pd.to_datetime(temp_se.iloc[:, 0])
temp_se = temp_se.iloc[:, 1]
temp_df = temp_df.append(temp_se)
temp_df.dropna(inplace=True)
temp_df.sort_index(inplace=True)
temp_df = temp_df.reset_index()
temp_df.drop_duplicates(subset="index", keep="last", inplace=True)
temp_df.set_index("index", inplace=True)
temp_df = temp_df.squeeze()
temp_df.index.name = None
temp_df.name = "gold_change"
temp_df = temp_df.astype(float)
return temp_df
def macro_cons_gold_amount():
"""
全球最大黄金ETF—SPDR Gold Trust持仓报告, 数据区间从20041118-至今
:return: pandas.Series
2004-11-18 114920000.00
2004-11-19 828806907.20
2004-11-22 1253785205.50
2004-11-23 1254751438.19
2004-11-24 1390568824.08
...
2019-10-20 44286078486.23
2019-10-21 44333677232.68
2019-10-22 43907962483.56
2019-10-23 44120217405.82
2019-10-24 44120217405.82
"""
t = time.time()
res = requests.get(
JS_CONS_GOLD_ETF_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
value_list = [item["datas"]["黄金"] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["总价值(美元)"]
url = "https://datacenter-api.jin10.com/reports/list_v2"
params = {
"max_date": "",
"category": "etf",
"attr_id": "1",
"_": str(int(round(t * 1000))),
}
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_usa_michigan_consumer_sentiment",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
r = requests.get(url, params=params, headers=headers)
temp_se = pd.DataFrame(r.json()["data"]["values"]).iloc[:, [0, 3]]
temp_se.index = pd.to_datetime(temp_se.iloc[:, 0])
temp_se = temp_se.iloc[:, 1]
temp_df = temp_df.append(temp_se)
temp_df.dropna(inplace=True)
temp_df.sort_index(inplace=True)
temp_df = temp_df.reset_index()
temp_df.drop_duplicates(subset="index", keep="last", inplace=True)
temp_df.set_index("index", inplace=True)
temp_df = temp_df.squeeze()
temp_df.index.name = None
temp_df.name = "gold_amount"
temp_df = temp_df.astype(float)
return temp_df
def macro_cons_silver_volume():
"""
全球最大白银ETF--iShares Silver Trust持仓报告, 数据区间从20060429-至今
:return: pandas.Series
2006-04-29 653.17
2006-05-02 653.17
2006-05-03 995.28
2006-05-04 1197.43
2006-05-05 1306.29
...
2019-10-17 11847.91
2019-10-18 11847.91
2019-10-21 11813.02
2019-10-22 11751.96
2019-10-23 11751.96
"""
t = time.time()
res = requests.get(
JS_CONS_SLIVER_ETF_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
value_list = [item["datas"]["白银"] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["总库存(吨)"]
url = "https://datacenter-api.jin10.com/reports/list_v2"
params = {
"max_date": "",
"category": "etf",
"attr_id": "2",
"_": str(int(round(t * 1000))),
}
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_usa_michigan_consumer_sentiment",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
r = requests.get(url, params=params, headers=headers)
temp_se = pd.DataFrame(r.json()["data"]["values"]).iloc[:, [0, 1]]
temp_se.index = pd.to_datetime(temp_se.iloc[:, 0])
temp_se = temp_se.iloc[:, 1]
temp_df = temp_df.append(temp_se)
temp_df.dropna(inplace=True)
temp_df.sort_index(inplace=True)
temp_df = temp_df.reset_index()
temp_df.drop_duplicates(subset="index", keep="last", inplace=True)
temp_df.set_index("index", inplace=True)
temp_df = temp_df.squeeze()
temp_df.index.name = None
temp_df.name = "silver_volume"
url = "https://cdn.jin10.com/data_center/reports/etf_2.json"
r = requests.get(url)
data_json = r.json()
append_temp_df = pd.DataFrame(data_json["values"]).T
append_temp_df.columns = [item["name"] for item in data_json["keys"]]
temp_append_df = append_temp_df["总库存"]
temp_append_df.name = "silver_volume"
temp_df = temp_df.reset_index()
temp_df["index"] = temp_df["index"].astype(str)
temp_df = temp_df.append(temp_append_df.reset_index())
temp_df.drop_duplicates(subset=["index"], keep="last", inplace=True)
temp_df.index = pd.to_datetime(temp_df["index"])
del temp_df["index"]
temp_df = temp_df[temp_df != 'Show All']
temp_df.sort_index(inplace=True)
temp_df = temp_df.astype(float)
return temp_df
def macro_cons_silver_change():
"""
全球最大白银ETF--iShares Silver Trust持仓报告, 数据区间从20060429-至今
:return: pandas.Series
2006-04-29 0
2006-05-02 0.00
2006-05-03 342.11
2006-05-04 202.15
2006-05-05 108.86
...
2019-10-17 -58.16
2019-10-18 0.00
2019-10-21 -34.89
2019-10-22 -61.06
2019-10-23 0.00
"""
t = time.time()
res = requests.get(
JS_CONS_SLIVER_ETF_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
value_list = [item["datas"]["白银"] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["增持/减持(吨)"]
temp_df.name = "silver_change"
url = "https://datacenter-api.jin10.com/reports/list_v2"
params = {
"max_date": "",
"category": "etf",
"attr_id": "2",
"_": str(int(round(t * 1000))),
}
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_usa_michigan_consumer_sentiment",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
r = requests.get(url, params=params, headers=headers)
temp_se = pd.DataFrame(r.json()["data"]["values"]).iloc[:, [0, 2]]
temp_se.index = pd.to_datetime(temp_se.iloc[:, 0])
temp_se = temp_se.iloc[:, 1]
temp_df = temp_df.append(temp_se)
temp_df.dropna(inplace=True)
temp_df.sort_index(inplace=True)
temp_df = temp_df.reset_index()
temp_df.drop_duplicates(subset="index", keep="last", inplace=True)
temp_df.set_index("index", inplace=True)
temp_df = temp_df.squeeze()
temp_df.index.name = None
temp_df.name = "silver_change"
url = "https://cdn.jin10.com/data_center/reports/etf_2.json"
r = requests.get(url)
data_json = r.json()
append_temp_df = pd.DataFrame(data_json["values"]).T
append_temp_df.columns = [item["name"] for item in data_json["keys"]]
temp_append_df = append_temp_df["增持/减持"]
temp_append_df.name = "silver_change"
temp_df = temp_df.reset_index()
temp_df["index"] = temp_df["index"].astype(str)
temp_df = temp_df.append(temp_append_df.reset_index())
temp_df.drop_duplicates(subset=["index"], keep="last", inplace=True)
temp_df.index = pd.to_datetime(temp_df["index"])
del temp_df["index"]
temp_df = temp_df[temp_df != 'Show All']
temp_df.sort_index(inplace=True)
temp_df = temp_df.astype(float)
return temp_df
def macro_cons_silver_amount():
"""
全球最大白银ETF--iShares Silver Trust持仓报告, 数据区间从20060429-至今
:return: pandas.Series
2006-04-29 263651152
2006-05-02 263651152
2006-05-03 445408550
2006-05-04 555123947
2006-05-05 574713264
...
2019-10-17 Show All
2019-10-18 Show All
2019-10-21 Show All
2019-10-22 Show All
2019-10-23 Show All
"""
t = time.time()
res = requests.get(
JS_CONS_SLIVER_ETF_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
value_list = [item["datas"]["白银"] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["总价值(美元)"]
url = "https://datacenter-api.jin10.com/reports/list_v2"
params = {
"max_date": "",
"category": "etf",
"attr_id": "2",
"_": str(int(round(t * 1000))),
}
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_usa_michigan_consumer_sentiment",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
r = requests.get(url, params=params, headers=headers)
temp_se = pd.DataFrame(r.json()["data"]["values"]).iloc[:, [0, 3]]
temp_se.index = pd.to_datetime(temp_se.iloc[:, 0])
temp_se = temp_se.iloc[:, 1]
temp_df = temp_df.append(temp_se)
temp_df.dropna(inplace=True)
temp_df.sort_index(inplace=True)
temp_df = temp_df.reset_index()
temp_df.drop_duplicates(subset="index", keep="last", inplace=True)
temp_df.set_index("index", inplace=True)
temp_df = temp_df.squeeze()
temp_df.index.name = None
temp_df.name = "silver_amount"
url = "https://cdn.jin10.com/data_center/reports/etf_2.json"
r = requests.get(url)
data_json = r.json()
append_temp_df = pd.DataFrame(data_json["values"]).T
append_temp_df.columns = [item["name"] for item in data_json["keys"]]
temp_append_df = append_temp_df["总价值"]
temp_append_df.name = "silver_amount"
temp_df = temp_df.reset_index()
temp_df["index"] = temp_df["index"].astype(str)
temp_df = temp_df.append(temp_append_df.reset_index())
temp_df.drop_duplicates(subset=["index"], keep="last", inplace=True)
temp_df.index = pd.to_datetime(temp_df["index"])
del temp_df["index"]
temp_df = temp_df[temp_df != 'Show All']
temp_df.sort_index(inplace=True)
temp_df = temp_df.astype(float)
return temp_df
def macro_cons_opec_near_change():
"""
欧佩克报告-变动, 数据区间从20170118-至今
:return: pandas.Series
阿尔及利亚 安哥拉 厄瓜多尔 加蓬 伊朗 伊拉克 科威特 利比亚 尼日利亚 \
2017-01-18 -0.87 3.56 -0.25 -0.87 0.95 4.26 0.20 3.13 -11.35
2017-02-13 -4.17 -2.32 -1.67 -1.00 5.02 -16.57 -14.12 6.47 10.18
2017-03-14 -0.02 -1.82 -0.44 -0.69 3.61 -6.20 -0.93 -1.11 5.80
2017-04-12 0.45 -1.87 -0.28 0.19 -2.87 -0.85 -0.95 -6.08 -2.98
2017-05-11 -0.75 9.71 -0.06 0.88 -3.47 -3.91 0.03 -6.16 5.08
2017-06-13 0.96 -5.42 0.22 -0.13 0.45 4.44 0.00 17.82 17.42
2017-07-12 -0.09 6.60 -0.21 -0.77 1.67 6.06 -0.02 12.70 9.67
2017-08-10 -0.10 -1.93 0.85 0.71 0.69 -3.31 -0.74 15.43 3.43
2017-09-12 0.41 0.83 -0.03 -3.23 -0.23 -2.31 0.01 -11.23 13.83
2017-10-11 -0.85 -0.29 -0.05 1.44 0.09 3.16 -0.17 5.39 5.08
2017-11-13 -3.84 6.98 0.71 0.18 -1.13 -13.10 -0.37 4.23 -5.44
2017-12-13 1.41 -10.87 -0.51 -0.47 -0.22 0.10 -0.53 0.61 9.58
2018-01-18 3.03 4.48 -0.72 -0.01 1.32 0.79 -0.25 -0.70 7.57
2018-04-12 -4.95 -8.17 0.26 -0.91 0.33 -1.31 0.23 -3.72 1.82
2018-05-14 1.77 -0.78 0.31 -0.93 1.00 -0.07 0.08 0.69 -0.83
2018-06-12 3.90 1.40 0.06 0.18 0.56 2.77 -0.57 -2.43 -5.35
2018-07-11 0.46 -8.83 -0.09 0.35 -2.27 7.15 2.73 -25.43 2.78
2018-08-13 1.38 1.17 0.42 -0.34 -5.63 2.41 7.85 -5.67 7.05
2018-09-12 -1.40 -0.80 0.40 18.80 -15.00 9.00 0.80 25.60 7.40
2018-10-11 -0.80 5.70 53.10 -0.10 -15.00 0.80 0.60 10.30 2.60
2018-11-13 -0.40 2.20 -0.30 0.30 -15.60 465.30 -3.30 6.00 -1.70
2018-12-12 -0.50 0.30 0.10 -1.10 -38.00 -2.30 4.50 -1.10 -3.00
2019-03-14 0.20 2.20 0.50 0.70 1.20 -7.00 -1.40 2.30 1.00
2019-04-10 -0.70 0.70 52.40 0.90 -2.80 -12.60 -0.10 19.60 1.10
2019-06-13 0.60 7.40 -0.10 2.30 -22.70 9.40 1.30 -0.30 -9.20
沙特 阿联酋 委内瑞拉 欧佩克产量
2017-01-18 -14.93 -0.63 -4.52 -22.09
2017-02-13 -49.62 -15.93 -3.05 -89.02
2017-03-14 -6.81 -3.69 -1.60 -13.95
2017-04-12 4.16 -3.27 -2.59 -15.27
2017-05-11 4.92 -6.23 -2.60 -1.82
2017-06-13 0.23 -1.80 -0.77 33.61
2017-07-12 5.13 -0.07 -1.36 39.35
2017-08-10 3.18 -0.67 -1.58 17.26
2017-09-12 -1.03 -2.02 -3.19 -7.91
2017-10-11 -0.07 -0.84 -5.19 8.85
2017-11-13 1.69 -0.60 -4.36 -15.09
2017-12-13 -4.54 -3.55 -4.16 -13.35
2018-01-18 -1.09 -0.70 -8.22 4.24
2018-04-12 -4.69 4.49 -5.53 -20.14
2018-05-14 4.65 0.61 -4.17 1.21
2018-06-12 8.55 -0.63 -4.25 3.54
2018-07-11 40.54 3.51 -4.75 17.34
2018-08-13 -5.28 6.92 -4.77 4.07
2018-09-12 3.80 1.20 -3.60 27.80
2018-10-11 10.80 3.00 -4.20 13.20
2018-11-13 12.70 14.20 -4.00 12.70
2018-12-12 37.70 7.10 -5.20 -1.10
2019-03-14 -8.60 -0.40 -14.20 -22.10
2019-04-10 -32.40 -0.90 -28.90 -53.40
2019-06-13 -7.60 0.30 -3.50 -23.60
"""
t = time.time()
big_df = pd.DataFrame()
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_opec_report",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
res = requests.get(f"https://datacenter-api.jin10.com/reports/dates?category=opec&_={str(int(round(t * 1000)))}",
headers=headers) # 日期序列
all_date_list = res.json()["data"]
bar = tqdm(reversed(all_date_list[:-1]))
for item in bar:
bar.set_description(f"Please wait for a moment, now downing {item}'s data")
res = requests.get(
f"https://datacenter-api.jin10.com/reports/list?category=opec&date={item}&_={str(int(round(t * 1000)))}",
headers=headers)
temp_df = pd.DataFrame(res.json()["data"]["values"],
columns=pd.DataFrame(res.json()["data"]["keys"])["name"].tolist()).T
temp_df.columns = temp_df.iloc[0, :]
temp_df = temp_df.iloc[1:, :]
try:
temp_df = temp_df[['阿尔及利亚', '安哥拉', '加蓬', '伊朗', '伊拉克', '科威特', '利比亚', '尼日利亚', '沙特',
'阿联酋', '委内瑞拉', '欧佩克产量']].iloc[-1, :]
except:
temp_df = temp_df[['阿尔及利亚', '安哥拉', '加蓬', '伊朗', '伊拉克', '科威特', '利比亚', '尼日利亚', '沙特',
'阿联酋', '委内瑞拉', '欧佩克产量']].iloc[-1, :]
big_df[temp_df.name] = temp_df
big_df = big_df.T
big_df.columns.name = "日期"
big_df = big_df.astype(float)
return big_df
def _macro_cons_opec_month():
"""
欧佩克报告-月度, 数据区间从20170118-至今
这里返回的具体索引日期的数据为上一个月的数据, 由于某些国家的数据有缺失,
只选择有数据的国家返回
:return: pandas.Series
阿尔及利亚 安哥拉 厄瓜多尔 加蓬 伊朗 伊拉克 科威特 利比亚 尼日利亚 \
2017-01-18 108.0 172.4 54.5 21.3 372.0 463.2 281.2 60.8 154.2
2017-02-13 104.5 165.1 52.7 19.9 377.5 447.6 271.8 67.5 157.6
2017-03-14 105.3 164.1 52.6 19.4 381.4 441.4 270.9 66.9 160.8
2017-04-12 105.6 161.4 52.6 19.8 379.0 440.2 270.2 62.2 154.5
2017-05-11 104.7 169.2 52.4 20.6 375.9 437.3 270.2 55.0 150.8
2017-06-13 105.9 161.3 52.8 20.4 379.5 442.4 270.5 73.0 168.0
2017-07-12 106.0 166.8 52.7 19.7 379.0 450.2 270.9 85.2 173.3
2017-08-10 105.9 164.6 53.6 20.5 382.4 446.8 270.3 100.1 174.8
2017-09-12 106.5 164.6 53.7 17.3 382.8 444.8 270.2 89.0 186.1
2017-10-11 104.6 164.1 53.6 20.1 382.7 449.4 270.0 92.3 185.5
2017-11-13 101.2 171.1 54.1 20.3 382.3 438.3 270.8 96.2 173.8
2017-12-13 101.3 158.1 53.3 19.7 381.8 439.6 270.3 97.3 179.0
2018-01-18 103.7 163.3 52.6 19.7 382.9 440.5 270.0 96.2 186.1
2018-04-12 98.4 152.4 51.8 18.3 381.4 442.6 270.4 96.8 181.0
2018-05-14 99.7 151.5 52.0 18.3 382.3 442.9 270.5 98.2 179.1
2018-06-12 103.1 152.5 51.9 18.9 382.9 445.5 270.1 95.5 171.1
2018-07-11 103.9 143.1 51.9 19.0 379.9 453.3 273.1 70.8 166.0
2018-08-13 106.2 145.6 52.5 18.8 373.7 455.6 279.1 66.4 166.7
2018-09-12 104.5 144.8 52.9 18.7 358.4 464.9 280.2 92.6 172.5
2018-10-11 104.9 151.9 53.1 18.7 344.7 465.0 281.2 105.3 174.8
2018-11-13 105.4 153.3 52.5 18.6 329.6 465.4 276.4 111.4 175.1
2018-12-12 105.2 152.1 52.5 17.6 295.4 463.1 280.9 110.4 173.6
2019-03-14 102.6 145.7 52.2 20.3 274.3 463.3 270.9 90.6 174.1
2019-04-10 101.8 145.4 52.4 21.4 269.8 452.2 270.9 109.8 173.3
2019-06-13 102.9 147.1 52.9 21.1 237.0 472.4 271.0 117.4 173.3
沙特 阿联酋 委内瑞拉 欧佩克产量
2017-01-18 1047.4 307.1 202.1 3308.5
2017-02-13 994.6 293.1 200.4 3213.9
2017-03-14 979.7 292.5 198.7 3195.8
2017-04-12 999.4 289.5 197.2 3192.8
2017-05-11 995.4 284.2 195.6 3173.2
2017-06-13 994.0 288.5 196.3 3213.9
2017-07-12 995.0 289.8 193.8 3261.1
2017-08-10 1006.7 290.5 193.2 3286.9
2017-09-12 1002.2 290.1 191.8 3275.5
2017-10-11 997.5 290.5 189.0 3274.8
2017-11-13 1000.0 291.1 186.3 3258.9
2017-12-13 999.6 288.3 183.4 3244.8
2018-01-18 991.8 287.8 174.5 3241.6
2018-04-12 993.4 286.4 148.8 3195.8
2018-05-14 995.9 287.2 143.6 3193.0
2018-06-12 998.7 286.5 139.2 3186.9
2018-07-11 1042.0 289.7 134.0 3232.7
2018-08-13 1038.7 295.9 127.8 3232.3
2018-09-12 1040.1 297.2 123.5 3256.5
2018-10-11 1051.2 300.4 119.7 3276.1
2018-11-13 1063.0 316.0 117.1 3290.0
2018-12-12 1101.6 324.6 113.7 3296.5
2019-03-14 1008.7 307.2 100.8 3054.9
2019-04-10 979.4 305.9 73.2 3002.2
2019-06-13 969.0 306.1 74.1 2987.6
"""
t = time.time()
res = requests.get(
JS_CONS_OPEC_URL.format(
str(int(round(t * 1000))), str(int(round(t * 1000)) + 90)
)
)
json_data = json.loads(res.text[res.text.find("{"): res.text.rfind("}") + 1])
date_list = [item["date"] for item in json_data["list"]]
big_df = pd.DataFrame()
for country in [item["datas"] for item in json_data["list"]][0].keys():
try:
value_list = [item["datas"][country] for item in json_data["list"]]
value_df = pd.DataFrame(value_list)
value_df.columns = json_data["kinds"]
value_df.index = pd.to_datetime(date_list)
temp_df = value_df["上个月"]
temp_df.name = country
big_df = big_df.append(temp_df)
except:
continue
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_opec_report",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
res = requests.get(f"https://datacenter-api.jin10.com/reports/dates?category=opec&_={str(int(round(t * 1000)))}",
headers=headers) # 日期序列
all_date_list = res.json()["data"]
need_date_list = [item for item in all_date_list if
item.split("-")[0] + item.split("-")[1] + item.split("-")[2] not in date_list]
for item in reversed(need_date_list):
res = requests.get(
f"https://datacenter-api.jin10.com/reports/list?category=opec&date={item}&_={str(int(round(t * 1000)))}",
headers=headers)
temp_df = pd.DataFrame(res.json()["data"]["values"],
columns=pd.DataFrame(res.json()["data"]["keys"])["name"].tolist()).T
temp_df.columns = temp_df.iloc[0, :]
temp_df = temp_df[['阿尔及利亚', '安哥拉', '厄瓜多尔', '加蓬', '伊朗', '伊拉克', '科威特', '利比亚', '尼日利亚', '沙特',
'阿联酋', '委内瑞拉', '欧佩克产量']].iloc[-2, :]
big_df[item] = temp_df
return big_df.T
def macro_cons_opec_month():
"""
欧佩克报告-月度, 数据区间从 20170118-至今
这里返回的具体索引日期的数据为上一个月的数据, 由于某些国家的数据有缺失
只选择有数据的国家返回
20200312:fix:由于 “厄瓜多尔” 已经有几个月没有更新数据,在这里加以剔除
https://datacenter.jin10.com/reportType/dc_opec_report
:return: pandas.Series
阿尔及利亚 安哥拉 厄瓜多尔 加蓬 伊朗 伊拉克 科威特 利比亚 尼日利亚 \
2017-01-18 108.0 172.4 54.5 21.3 372.0 463.2 281.2 60.8 154.2
2017-02-13 104.5 165.1 52.7 19.9 377.5 447.6 271.8 67.5 157.6
2017-03-14 105.3 164.1 52.6 19.4 381.4 441.4 270.9 66.9 160.8
2017-04-12 105.6 161.4 52.6 19.8 379.0 440.2 270.2 62.2 154.5
2017-05-11 104.7 169.2 52.4 20.6 375.9 437.3 270.2 55.0 150.8
2017-06-13 105.9 161.3 52.8 20.4 379.5 442.4 270.5 73.0 168.0
2017-07-12 106.0 166.8 52.7 19.7 379.0 450.2 270.9 85.2 173.3
2017-08-10 105.9 164.6 53.6 20.5 382.4 446.8 270.3 100.1 174.8
2017-09-12 106.5 164.6 53.7 17.3 382.8 444.8 270.2 89.0 186.1
2017-10-11 104.6 164.1 53.6 20.1 382.7 449.4 270.0 92.3 185.5
2017-11-13 101.2 171.1 54.1 20.3 382.3 438.3 270.8 96.2 173.8
2017-12-13 101.3 158.1 53.3 19.7 381.8 439.6 270.3 97.3 179.0
2018-01-18 103.7 163.3 52.6 19.7 382.9 440.5 270.0 96.2 186.1
2018-04-12 98.4 152.4 51.8 18.3 381.4 442.6 270.4 96.8 181.0
2018-05-14 99.7 151.5 52.0 18.3 382.3 442.9 270.5 98.2 179.1
2018-06-12 103.1 152.5 51.9 18.9 382.9 445.5 270.1 95.5 171.1
2018-07-11 103.9 143.1 51.9 19.0 379.9 453.3 273.1 70.8 166.0
2018-08-13 106.2 145.6 52.5 18.8 373.7 455.6 279.1 66.4 166.7
2018-09-12 104.5 144.8 52.9 18.7 358.4 464.9 280.2 92.6 172.5
2018-10-11 104.9 151.9 53.1 18.7 344.7 465.0 281.2 105.3 174.8
2018-11-13 105.4 153.3 52.5 18.6 329.6 465.4 276.4 111.4 175.1
2018-12-12 105.2 152.1 52.5 17.6 295.4 463.1 280.9 110.4 173.6
2019-03-14 102.6 145.7 52.2 20.3 274.3 463.3 270.9 90.6 174.1
2019-04-10 101.8 145.4 52.4 21.4 269.8 452.2 270.9 109.8 173.3
2019-06-13 102.9 147.1 52.9 21.1 237.0 472.4 271.0 117.4 173.3
沙特 阿联酋 委内瑞拉 欧佩克产量
2017-01-18 1047.4 307.1 202.1 3308.5
2017-02-13 994.6 293.1 200.4 3213.9
2017-03-14 979.7 292.5 198.7 3195.8
2017-04-12 999.4 289.5 197.2 3192.8
2017-05-11 995.4 284.2 195.6 3173.2
2017-06-13 994.0 288.5 196.3 3213.9
2017-07-12 995.0 289.8 193.8 3261.1
2017-08-10 1006.7 290.5 193.2 3286.9
2017-09-12 1002.2 290.1 191.8 3275.5
2017-10-11 997.5 290.5 189.0 3274.8
2017-11-13 1000.0 291.1 186.3 3258.9
2017-12-13 999.6 288.3 183.4 3244.8
2018-01-18 991.8 287.8 174.5 3241.6
2018-04-12 993.4 286.4 148.8 3195.8
2018-05-14 995.9 287.2 143.6 3193.0
2018-06-12 998.7 286.5 139.2 3186.9
2018-07-11 1042.0 289.7 134.0 3232.7
2018-08-13 1038.7 295.9 127.8 3232.3
2018-09-12 1040.1 297.2 123.5 3256.5
2018-10-11 1051.2 300.4 119.7 3276.1
2018-11-13 1063.0 316.0 117.1 3290.0
2018-12-12 1101.6 324.6 113.7 3296.5
2019-03-14 1008.7 307.2 100.8 3054.9
2019-04-10 979.4 305.9 73.2 3002.2
2019-06-13 969.0 306.1 74.1 2987.6
"""
t = time.time()
big_df = pd.DataFrame()
headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate, br",
"accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
"cache-control": "no-cache",
"origin": "https://datacenter.jin10.com",
"pragma": "no-cache",
"referer": "https://datacenter.jin10.com/reportType/dc_opec_report",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36",
"x-app-id": "rU6QIu7JHe2gOUeR",
"x-csrf-token": "",
"x-version": "1.0.0",
}
res = requests.get(f"https://datacenter-api.jin10.com/reports/dates?category=opec&_={str(int(round(t * 1000)))}",
headers=headers) # 日期序列
all_date_list = res.json()["data"]
bar = tqdm(reversed(all_date_list))
for item in bar:
bar.set_description(f"Please wait for a moment, now downing {item}'s data")
res = requests.get(
f"https://datacenter-api.jin10.com/reports/list?category=opec&date={item}&_={str(int(round(t * 1000)))}",
headers=headers)
temp_df = pd.DataFrame(res.json()["data"]["values"],
columns=pd.DataFrame(res.json()["data"]["keys"])["name"].tolist()).T
temp_df.columns = temp_df.iloc[0, :]
temp_df = temp_df.iloc[1:, :]
try:
temp_df = temp_df[['阿尔及利亚', '安哥拉', '加蓬', '伊朗', '伊拉克', '科威特', '利比亚', '尼日利亚', '沙特',
'阿联酋', '委内瑞拉', '欧佩克产量']].iloc[-2, :]
except:
temp_df = temp_df[['阿尔及利亚', '安哥拉', '加蓬', '伊朗', '伊拉克', '科威特', '利比亚', '尼日利亚', '沙特',
'阿联酋', '委内瑞拉', '欧佩克产量']].iloc[-1, :]
big_df[temp_df.name] = temp_df
big_df = big_df.T
big_df.columns.name = "日期"
big_df = big_df.astype(float)
return big_df
if __name__ == "__main__":
macro_cons_gold_volume_df = macro_cons_gold_volume()
print(macro_cons_gold_volume_df)
macro_cons_gold_change_df = macro_cons_gold_change()
print(macro_cons_gold_change_df)
macro_cons_gold_amount_df = macro_cons_gold_amount()
print(macro_cons_gold_amount_df)
print(pd.concat([macro_cons_gold_volume_df, macro_cons_gold_change_df, macro_cons_gold_amount_df], axis=1))
macro_cons_silver_volume_df = macro_cons_silver_volume()
print(macro_cons_silver_volume_df)
macro_cons_silver_change_df = macro_cons_silver_change()
print(macro_cons_silver_change_df)
macro_cons_silver_amount_df = macro_cons_silver_amount()
print(macro_cons_silver_amount_df)
print(pd.concat([macro_cons_silver_volume_df, macro_cons_silver_change_df, macro_cons_silver_amount_df], axis=1))
macro_cons_opec_near_change_df = macro_cons_opec_near_change()
print(macro_cons_opec_near_change_df)
macro_cons_opec_month_df = macro_cons_opec_month()
print(macro_cons_opec_month_df)
| 42.128916
| 140
| 0.574885
| 6,079
| 34,967
| 3.192795
| 0.086363
| 0.055335
| 0.018548
| 0.026586
| 0.848885
| 0.832191
| 0.830594
| 0.822969
| 0.81591
| 0.811376
| 0
| 0.241673
| 0.258158
| 34,967
| 829
| 141
| 42.179735
| 0.506438
| 0.321846
| 0
| 0.818356
| 0
| 0.028681
| 0.278654
| 0.00915
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017208
| false
| 0
| 0.011472
| 0
| 0.045889
| 0.01912
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.