repo_name
stringlengths 9
76
| fname
listlengths 1
253
| context
stringlengths 377
218k
| question
stringlengths 31
67
| length
int64 136
61.2k
| answer
stringlengths 4
1.29k
| full_len
int64 158
61k
|
|---|---|---|---|---|---|---|
thebargaintenor/autocomplete-metadata
|
[
"repos/thebargaintenor/autocomplete-metadata/src/completion.py",
"repos/thebargaintenor/autocomplete-metadata/src/test_completion.py",
"repos/thebargaintenor/autocomplete-metadata/src/utilities/config.example.py",
"repos/thebargaintenor/autocomplete-metadata/src/utilities/create_metadata_json.py"
] |
from typing import List , Type , Any , Optional
import typing
import src
import builtins
import attr
import json
import re
import sys
from typing import Optional
@ attr . s ( ) class Completion ( ) :
id = attr . ib ( type = str )
name = attr . ib ( type = str )
tokens = attr . ib ( type = list )
@ attr . s ( ) class Repository ( ) :
authors = attr . ib ( type = list )
courses = attr . ib ( type = list )
def tokenize_name ( name ) :
return name . lower ( ) . split ( ) if name else [ ]
def tokenize_tag ( tag ) :
return re . split ( [string] , tag . lower ( ) ) if tag else [ ]
def tokenize_names ( names ) :
tokens = [ ]
for name in names :
tokens . extend ( tokenize_name ( name ) )
return tokens
def parse_course_record ( record ) :
try :
tokens = tokenize_name ( record [ [string] ] )
if record [ [string] ] :
tokens . extend ( tokenize_names ( record [ [string] ] ) )
return Completion ( id = record [ [string] ] , name = record [ [string] ] , tokens = tokens )
except :
return None
def get_courses ( course_records ) :
return list ( filter ( None , ( parse_course_record ( c ) for c in course_records ) ) )
def get_courses_from_json ( repo_blob ) :
return get_courses ( repo_blob [ [string] ] ) if [string] in repo_blob else [ ]
def parse_author_record ( record ) :
try :
return Completion ( id = record [ [string] ] , name = record [ [string] ] , tokens = tokenize_name ( record [ [string] ] ) )
except :
return None
def get_authors ( author_records ) :
return list ( filter ( None , ( parse_author_record ( c ) for c in author_records ) ) )
def get_authors_from_json ( repo_blob ) :
return get_authors ( repo_blob [ [string] ] ) if [string] in repo_blob else [ ]
def load_repository_from_file ( file_name ) :
with open ( file_name , [string] ) as course_file :
repo_blob = json . load ( course_file )
return Repository ( courses = get_courses_from_json ( repo_blob ) , authors = get_authors_from_json ( repo_blob ) )
def main ( ) :
file_name = sys . argv [ [number] ]
courses = load_repository_from_file ( file_name )
print ( [string] . format ( len ( courses ) ) )
if __name__ == [string] :
main ( )
repos/thebargaintenor/autocomplete-metadata/src/test_completion.py
from typing import List , Dict , Optional , Any , Union
import typing
from uuid import uuid4
from completion import ( Completion , parse_author_record , parse_course_record , tokenize_name , tokenize_tag )
def test_name_tokenizes_as_lowercase_list ( ) :
name = [string]
got = tokenize_name ( name )
want = [ [string] , [string] , [string] , [string] ]
assert got == want
def test_no_name_tokenizes_as_empty_list ( ) :
got = tokenize_name ( None )
want = [ ]
assert got == want
def test_tag_tokenizes_as_lowercase_list ( ) :
tag = [string]
got = tokenize_tag ( tag )
want = [ [string] , [string] ]
assert got == want
def test_tag_name_tokenizes_as_empty_list ( ) :
got = tokenize_tag ( None )
want = [ ]
assert got == want
def test_valid_course_record_parses_as_completion_object ( ) :
id = str ( uuid4 ( ) )
valid_record = { [string] : id , [string] : [string] , [string] : [ [string] ] , [string] : [ ] }
course_tokens = [ [string] , [string] , [string] , [string] , [string] , [string] ]
got = parse_course_record ( valid_record )
want = Completion ( id = id , name = [string] , tokens = course_tokens )
assert got == want
def test_invalid_course_record_parses_as_none ( ) :
invalid_record = { [string] : None , [string] : [ ] , [string] : [ ] }
got = parse_course_record ( invalid_record )
want = None
assert got == want
def test_valid_author_record_parses_as_completion_object ( ) :
id = str ( uuid4 ( ) )
valid_record = { [string] : id , [string] : [string] , [string] : [ ] }
author_tokens = [ [string] , [string] ]
got = parse_author_record ( valid_record )
want = Completion ( id = id , name = [string] , tokens = author_tokens )
assert got == want
def test_invalid_author_record_parses_as_none ( ) :
invalid_record = { [string] : None , [string] : [ ] }
got = parse_author_record ( invalid_record )
want = None
assert got == want
repos/thebargaintenor/autocomplete-metadata/src/utilities/config.example.py
host = [string]
username = [string]
password = [string]
database = [string]
repos/thebargaintenor/autocomplete-metadata/src/utilities/create_metadata_json.py
[comment]
from typing import List , Any , Dict
import typing
import sqlalchemy
import builtins
from sqlalchemy import ( create_engine , func )
from sqlalchemy . orm import ( Query , sessionmaker )
from sqlalchemy . util import KeyedTuple
from search_models import ( Author , Course , CourseAuthorMapping , CourseModuleMapping , Module , ModuleClipMapping , Clip )
import config
import json
import sys
[comment]
host = config . host
username = config . username
password = config . password
database = config . database
connection_string = [string] . format ( username , password , host , database )
pg_engine = create_engine ( connection_string )
SessionFactory = sessionmaker ( bind = pg_engine )
session = SessionFactory ( )
def fetch_authors_query ( ) :
return ( session . query ( Author . id , Author . displayName . label ( [string] ) ) . select_from ( Author ) )
def format_author ( author ) :
return { [string] : author . id , [string] : author . name }
def fetch_courses_query ( ) :
return ( session . query ( Course . id , Course . deprecatedCourseId , Course . title , func . array_agg ( Author . displayName ) . label ( [string] ) , Course . description , Course . level , Course . modifiedAt . label ( [string] ) , Course . tags , Course . averageRating . label ( [string] ) , Course . numberOfRatings . label ( [string] ) ) . select_from ( Course ) . filter ( Course . deprecatedCourseId is not None ) . filter ( Course . status != [string] ) . join ( CourseAuthorMapping , Course . id == CourseAuthorMapping . courseId ) . join ( Author , Author . id == CourseAuthorMapping . authorId ) . group_by ( Course . id ) )
[comment]
def format_course ( course ) :
return { [string] : course . id , [string] : course . title , [string] : course . deprecatedCourseId , [string] : course . authors , [string] : course . tags }
def fetch_modules_query ( ) :
return ( session . query ( Module . id , Module . deprecatedModuleId , Module . title , Module . updatedAt . label ( [string] ) , Course . id . label ( [string] ) , Course . title . label ( [string] ) , Course . deprecatedCourseId . label ( [string] ) , Course . modifiedAt . label ( [string] ) , CourseModuleMapping . modulePosition . label ( [string] ) ) . select_from ( Module ) . join ( CourseModuleMapping , CourseModuleMapping . moduleId == Module . id ) . join ( Course , CourseModuleMapping . courseId == Course . id ) . filter ( Course . deprecatedCourseId is not None ) . filter ( Course . status != [string] ) )
def format_module ( index , module ) :
return { [string] : index , [string] : [string] , [string] : module . id , [string] : { [string] : module . deprecatedModuleId , [string] : module . title , [string] : module . updated_on , [string] : module . course_title , [string] : module . course_id , [string] : module . deprecated_course_id , [string] : module . course_modified_on , [string] : module . position } }
def fetch_clips_query ( ) :
return ( session . query ( Clip . id , Clip . title , Course . id . label ( [string] ) , Course . title . label ( [string] ) , Course . deprecatedCourseId . label ( [string] ) , Course . modifiedAt . label ( [string] ) , ModuleClipMapping . moduleId . label ( [string] ) , Module . deprecatedModuleId . label ( [string] ) , Module . title . label ( [string] ) , ModuleClipMapping . clipPosition . label ( [string] ) ) . select_from ( Clip ) . join ( ModuleClipMapping , ModuleClipMapping . clipId == Clip . id ) . join ( Module , Module . id == ModuleClipMapping . moduleId ) . join ( CourseModuleMapping , CourseModuleMapping . moduleId == ModuleClipMapping . moduleId ) . join ( Course , CourseModuleMapping . courseId == Course . id ) . filter ( Course . deprecatedCourseId is not None ) . filter ( Course . status != [string] ) )
def format_clip ( index , clip ) :
return { [string] : index , [string] : [string] , [string] : clip . id , [string] : { [string] : clip . title , [string] : clip . course_title , [string] : clip . deprecated_course_id , [string] : clip . course_id , [string] : clip . course_modified_on , [string] : clip . deprecated_module_id , [string] : clip . module_title , [string] : clip . module_id , [string] : clip . position } }
def main ( ) :
output_file_name = sys . argv [ [number] ]
courses = fetch_courses_query ( ) . yield_per ( [number] )
authors = fetch_authors_query ( ) . yield_per ( [number] )
course_dict = { [string] : list ( format_course ( c ) for c in courses ) , [string] : list ( format_author ( a ) for a in authors ) }
with open ( output_file_name , [string] ) as output_file :
json . dump ( course_dict , output_file )
if __name__ == [string] :
main ( )
|
What is the type of variable fetch_clips_query?
| 2,469
|
sqlalchemy.orm.Query
| 2,452
|
conanfanli/slotomania
|
[
"repos/conanfanli/slotomania/tests/test_core.py",
"repos/conanfanli/slotomania/tests/test_pyty.py",
"repos/conanfanli/slotomania/tests/phony/phony/wsgi.py",
"repos/conanfanli/slotomania/tests/phony/phony/urls.py",
"repos/conanfanli/slotomania/tests/phony/casino/admin.py",
"repos/conanfanli/slotomania/tests/phony/casino/tests.py",
"repos/conanfanli/slotomania/tests/phony/casino/models.py",
"repos/conanfanli/slotomania/tests/phony/casino/views.py",
"repos/conanfanli/slotomania/tests/phony/casino/apps.py",
"repos/conanfanli/slotomania/pytypegen/debugger.py",
"repos/conanfanli/slotomania/pytypegen/pyty.py",
"repos/conanfanli/slotomania/pytypegen/exceptions.py",
"repos/conanfanli/slotomania/pytypegen/contrib/jwt_auth.py",
"repos/conanfanli/slotomania/pytypegen/contrib/contracts.py"
] |
from typing import List , Optional , Any
import tests
import typing
from dataclasses import asdict , dataclass , is_dataclass
import datetime
from enum import Enum
from typing import List , Optional
from unittest import TestCase
from pytypegen . contrib . contracts import AuthenticateUserRequest
from pytypegen . core import ( Contract , EntityTypes , Instruction , Operation , ReduxAction , contracts_to_typescript , )
class Gender ( Enum ) :
male = [number]
female = [number]
@ dataclass class Address ( Contract ) :
street = ...
@ dataclass class Person ( Contract ) :
name = ...
gender = ...
birth_date = ...
addresses = None
class DataclassConverterTestCase ( TestCase ) :
def test_dataclass_converter ( self ) :
assert is_dataclass ( Person )
man = Person ( [string] , Gender . male , datetime . datetime . utcnow ( ) , [ Address ( [string] ) ] )
woman = Person ( [string] , Gender . female , datetime . datetime . utcnow ( ) )
assert is_dataclass ( man ) and is_dataclass ( woman )
assert ( contracts_to_typescript ( contracts = [ Gender , Address , Person ] ) == [string] )
assert man == man . load_from_dict ( asdict ( man ) )
class InstructorTestCase ( TestCase ) :
def test_instruction_serialize ( self ) :
instruction = Instruction ( [ Operation . OVERWRITE ( EntityTypes . jwt_auth_token , target_value = [ AuthenticateUserRequest ( [string] , [string] ) ] , ) ] )
assert instruction . serialize ( ) == { [string] : None , [string] : [string] , [string] : [ { [string] : [string] , [string] : [string] , [string] : [ { [string] : [string] , [string] : [string] } ] , } ] , }
repos/conanfanli/slotomania/tests/test_pyty.py
from typing import Dict , Any
import typing
from unittest import TestCase
from pytypegen . pyty import Array , Shape , String , parse_type
class PytyTestCase ( TestCase ) :
def test_pyty ( self ) :
actual = Shape . load_from_dict ( [string] , { [string] : { [string] : [string] } , [string] : { [string] : [string] } , } , ) . to_dict ( )
expected = Shape ( identifier = [string] , fields = { [string] : String ( ) , [string] : Array ( [string] ) } ) . to_dict ( )
assert actual == expected
def test_parse_type ( self ) :
assert parse_type ( [string] ) . to_dict ( ) == { [string] : [string] }
assert parse_type ( [string] ) . to_dict ( ) == { [string] : [string] }
repos/conanfanli/slotomania/tests/phony/phony/wsgi.py
from typing import Any
import typing
[docstring]
import os
from django . core . wsgi import get_wsgi_application
os . environ . setdefault ( [string] , [string] )
application = get_wsgi_application ( )
repos/conanfanli/slotomania/tests/phony/phony/urls.py
from typing import List , Any
import typing
[docstring]
from django . contrib import admin
from django . urls import path
from django . views . decorators . csrf import csrf_exempt
from tests . phony . casino . views import InstructorView
urlpatterns = [ path ( [string] , admin . site . urls ) , path ( [string] , csrf_exempt ( InstructorView . as_view ( ) ) , name = [string] ) , ]
repos/conanfanli/slotomania/tests/phony/casino/admin.py
from django . contrib import admin
[comment]
repos/conanfanli/slotomania/tests/phony/casino/tests.py
from typing import Any
import typing
import builtins
import json
from typing import Any
from django . contrib . auth import get_user_model
from django . test import TestCase
from django . urls import reverse
from pytypegen . exceptions import MissingField , NotAuthenticated
class LoginTestCase ( TestCase ) :
def POST ( self , url , data ) :
return self . client . post ( url , data = json . dumps ( data ) , content_type = [string] )
def test_missing_username ( self ) :
url = reverse ( [string] , args = [ [string] ] )
assert self . client . get ( url ) . data == { }
with self . assertRaises ( MissingField ) :
self . POST ( url , data = { } )
def test_login_success ( self ) :
url = reverse ( [string] , args = [ [string] ] )
response = self . POST ( url , data = { [string] : [string] , [string] : [string] } )
assert response . status_code == [number]
assert response . data [ [string] ] == [string]
class ViewTestCase ( TestCase ) :
def setUp ( self ) :
super ( ) . setUp ( )
self . jwt_auth_token = [string]
self . user = get_user_model ( ) . objects . create_user ( username = [string] , password = [string] )
res = self . POST ( reverse ( [string] , kwargs = { [string] : [string] } ) , data = { [string] : [string] , [string] : [string] } , )
assert res . status_code == [number]
self . jwt_auth_token = next ( op for op in res . data [ [string] ] if op [ [string] ] == [string] ) [ [string] ]
def POST ( self , url , data ) :
return self . client . post ( url , data = json . dumps ( data ) , content_type = [string] , HTTP_AUTHORIZATION = f" [string] { self . jwt_auth_token }" , )
def test_not_authenticated ( self ) :
self . jwt_auth_token = [string]
url = reverse ( [string] , args = [ [string] ] )
with self . assertRaises ( NotAuthenticated ) :
self . POST ( url , { } )
[comment]
self . jwt_auth_token = [string]
with self . assertRaises ( NotAuthenticated ) :
self . POST ( url , { } )
self . jwt_auth_token = [string]
with self . assertRaises ( NotAuthenticated ) :
self . client . post ( url , data = json . dumps ( { } ) , content_type = [string] , HTTP_AUTHORIZATION = f" [string] " , )
def test_return_http_response ( self ) :
url = reverse ( [string] , args = [ [string] ] )
response = self . POST ( url , { } )
assert response . content == [string]
def test_return_instruction ( self ) :
url = reverse ( [string] , args = [ [string] ] )
response = self . POST ( url , { } )
assert response . data [ [string] ] [ [number] ] == { [string] : [string] , [string] : [string] , [string] : [ { [string] : [number] , [string] : [string] , [string] : [string] } ] , }
repos/conanfanli/slotomania/tests/phony/casino/models.py
from django . db import models
[comment]
repos/conanfanli/slotomania/tests/phony/casino/views.py
import builtins
from typing import Dict , Union , Type
import tests
import typing
import django
import pytypegen
from dataclasses import dataclass
import datetime
from decimal import Decimal
from enum import Enum
from django . http import HttpResponse
from pytypegen . contrib import contracts
from pytypegen . contrib . jwt_auth import AuthenticateUser
from pytypegen . core import Contract , Instruction
from pytypegen . core import InstructorView as BaseView
from pytypegen . core import Operation , RequestResolver
@ dataclass class Card ( Contract ) :
rank = ...
width = ...
played_at = ...
class PhonyEntityTypes ( Enum ) :
CARD = [number]
class ReturnHttpResponse ( RequestResolver ) :
use_jwt_authentication = False
data = ...
def resolve ( self ) :
return HttpResponse ( [string] )
class ReturnInstruction ( RequestResolver ) :
data = ...
def resolve ( self ) :
return Instruction ( [ Operation . MERGE_APPEND ( PhonyEntityTypes . CARD , target_value = [ Card ( rank = [number] , width = Decimal ( [string] ) , played_at = datetime . datetime ( [number] , [number] , [number] , [number] , [number] , [number] ) , ) ] , ) ] )
class InstructorView ( BaseView ) :
routes = { [string] : AuthenticateUser , [string] : ReturnHttpResponse , [string] : ReturnInstruction , }
repos/conanfanli/slotomania/tests/phony/casino/apps.py
from django . apps import AppConfig
class CasinoConfig ( AppConfig ) :
name = [string]
repos/conanfanli/slotomania/pytypegen/debugger.py
def Debugger ( ) :
import ipdb
return ipdb
repos/conanfanli/slotomania/pytypegen/pyty.py
from typing import Optional , Any , Union , Type , Match , Dict
import typing
import builtins
import pytypegen
[docstring]
import re
from typing import Type
class FieldType :
def __init__ ( self , * __type_args__ ) :
self . __type_args__ = list ( __type_args__ )
def to_dict ( self ) :
raise NotImplementedError ( )
class Primitive ( FieldType ) :
def to_dict ( self ) :
return { [string] : self . __class__ . __name__ }
class Composite ( FieldType ) :
def to_dict ( self ) :
raise NotImplementedError ( )
class String ( Primitive ) :
pass
class Integer ( Primitive ) :
pass
class Boolean ( Primitive ) :
pass
class Decimal ( Primitive ) :
pass
class Array ( Composite ) :
def __init__ ( self , element_type_string ) :
super ( ) . __init__ ( element_type_string )
self . element_type_string = element_type_string
def to_dict ( self ) :
return { [string] : f"{ self . __class__ . __name__ } [string] { self . element_type_string } [string] " }
def get_type_class_by_name ( type_name ) :
lookup = { k . __name__ : k for k in [ String , Integer , Boolean , Decimal , Array ] }
return lookup [ type_name ]
def parse_type ( field_type_string ) :
match = re . match ( [string] , field_type_string )
if not match :
raise Exception ( f" [string] { field_type_string }" )
type_name , type_args = match . groups ( )
return get_type_class_by_name ( type_name ) ( type_args [ [number] : - [number] ] if type_args else [ ] )
class Shape :
def __init__ ( self , identifier , fields ) :
self . identifier = identifier
self . fields = fields
@ classmethod def load_from_dict ( cls , identifier , fields_dict ) :
defined_shapes = { }
fields = { }
for field_name , field in fields_dict . items ( ) :
[comment]
field_type_string = field [ [string] ]
fields [ field_name ] = parse_type ( field_type_string )
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
return Shape ( identifier = identifier , fields = fields )
def to_dict ( self ) :
return { [string] : self . identifier , [string] : { name : field . to_dict ( ) for name , field in self . fields . items ( ) } , }
repos/conanfanli/slotomania/pytypegen/exceptions.py
class NotAuthenticated ( Exception ) :
pass
class UnknowFieldType ( Exception ) :
pass
class BadResolver ( Exception ) :
pass
class ValidationError ( Exception ) :
pass
class MissingField ( ValidationError ) :
pass
repos/conanfanli/slotomania/pytypegen/contrib/jwt_auth.py
from typing import ClassVar , Mapping , Dict , Any
import typing
import builtins
import pytypegen
import datetime
from typing import Any , ClassVar , Mapping
import uuid
from django . conf import settings
from django . contrib . auth import authenticate , get_user_model , login
import jwt
from pytypegen . contrib . contracts import AuthenticateUserRequest
from pytypegen . core import EntityTypes , Instruction , Operation , RequestResolver
from pytypegen . exceptions import NotAuthenticated
def authenticate_request ( request ) :
header = request . META . get ( [string] )
try :
prefix , token = header . split ( [string] )
except ( ValueError , AttributeError ) as e :
raise NotAuthenticated ( f" [string] { e } [string] { header }" )
if not token :
raise NotAuthenticated ( )
try :
payload = jwt_decode_handler ( token )
except jwt . ExpiredSignature as e :
raise NotAuthenticated ( str ( e ) )
except jwt . DecodeError as e :
raise NotAuthenticated ( str ( e ) )
except jwt . InvalidTokenError as e :
raise NotAuthenticated ( str ( e ) )
user = authenticate_credentials ( payload )
request . user = user
def authenticate_credentials ( payload ) :
username = payload . get ( [string] )
if not username :
raise NotAuthenticated ( [string] )
User = get_user_model ( )
try :
user = User . objects . get_by_natural_key ( username )
except User . DoesNotExist :
raise NotAuthenticated ( [string] )
if not user . is_active :
raise NotAuthenticated ( [string] )
return user
def jwt_decode_handler ( token ) :
options = { [string] : True }
secret_key = settings . SECRET_KEY
return jwt . decode ( token , secret_key , True , options = options , leeway = [number] , audience = None , issuer = None , algorithms = [ [string] ] , )
def jwt_encode_handler ( payload ) :
key = settings . SECRET_KEY
return jwt . encode ( payload , key , [string] ) . decode ( [string] )
def jwt_payload_handler ( user ) :
username = user . username
payload = { [string] : user . pk , [string] : username , [string] : datetime . datetime . utcnow ( ) + getattr ( settings , [string] , datetime . timedelta ( seconds = [number] ) ) , }
if hasattr ( user , [string] ) :
payload [ [string] ] = user . email
if isinstance ( user . pk , uuid . UUID ) :
payload [ [string] ] = str ( user . pk )
payload [ [string] ] = username
return payload
class AuthenticateUser ( RequestResolver ) :
[docstring]
data = ...
use_jwt_authentication = False
def resolve ( self ) :
username = self . data . username
password = self . data . password
user = authenticate ( username = username , password = password )
if user :
login ( self . request , user )
payload = jwt_payload_handler ( user )
token = jwt_encode_handler ( payload )
return Instruction ( [ Operation . OVERWRITE ( EntityTypes . jwt_auth_token , token ) ] )
return Instruction ( operations = [ ] , errors = [string] )
repos/conanfanli/slotomania/pytypegen/contrib/contracts.py
from dataclasses import dataclass
from pytypegen . core import Contract
@ dataclass class AuthenticateUserRequest ( Contract ) :
username = ...
password = ...
@ dataclass class EmptyBodySchema ( Contract ) :
pass
|
What is the type of variable rank?
| 3,725
|
builtins.int
| 3,583
|
woocart/localizations
|
[
"repos/woocart/localizations/.circleci/wplang.py",
"repos/woocart/localizations/.circleci/csv2html.py"
] |
from typing import List
import builtins
import typing
[docstring]
[comment]
[comment]
from typing import List
WPLANGS = [ [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , ]
repos/woocart/localizations/.circleci/csv2html.py
from typing import Any , List
import _csv
import argparse
import typing
import builtins
import pathlib
[docstring]
from os import getcwd
from pathlib import Path
from random import randrange
import argparse
import csv
import urllib . request
t = [string]
def main ( csv_file , base ) :
[docstring]
products = [ ]
with open ( csv_file ) as csvfile :
out = csv . reader ( csvfile , delimiter = [string] , quotechar = [string] )
for i , row in enumerate ( out ) :
if i == [number] :
continue
name , long , short , price , images = row
if not price :
price = str ( randrange ( [number] , [number] ) )
links = [ ]
for image in images . split ( [string] ) :
stem = Path ( image . replace ( [string] , [string] ) ) . name
local = Path ( getcwd ( ) ) . joinpath ( base )
Path ( local ) . mkdir ( exist_ok = True )
local = local . joinpath ( stem )
if not Path ( local ) . exists ( ) :
urllib . request . urlretrieve ( image , local )
common = base . replace ( [string] , [string] )
links . append ( f" [string] { common } [string] { stem }" )
images = [string] . join ( links )
products . append ( t . format ( name = name , short = short . replace ( [string] , [string] ) , long = long . replace ( [string] , [string] ) , price = price , images = images , ) )
Path ( getcwd ( ) ) . joinpath ( f"{ base } [string] " ) . write_text ( [string] . join ( products ) )
if __name__ == [string] :
parser = argparse . ArgumentParser ( epilog = __doc__ )
parser . add_argument ( [string] , type = str , help = [string] )
parser . add_argument ( [string] , type = str , help = [string] )
args = parser . parse_args ( )
main ( ** vars ( args ) )
|
What is the type of variable t?
| 1,043
|
builtins.str
| 1,050
|
SmartManoj/CST3
|
[
"repos/SmartManoj/CST3/Data/Packages/CodeFormatter/codeformatter/lib/scssbeautifier/css/__init__.py",
"repos/SmartManoj/CST3/Data/Packages/CodeFormatter/codeformatter/lib/coldfusionbeautifier/__version__.py",
"repos/SmartManoj/CST3/Data/Packages/CodeFormatter/codeformatter/lib/coldfusionbeautifier/__init__.py"
] |
[comment]
repos/SmartManoj/CST3/Data/Packages/CodeFormatter/codeformatter/lib/coldfusionbeautifier/__version__.py
__version__ = [string]
repos/SmartManoj/CST3/Data/Packages/CodeFormatter/codeformatter/lib/coldfusionbeautifier/__init__.py
from typing import Any , Pattern , Optional , List , Iterator , Match
import typing
import io
import Data
from __future__ import print_function
import sys
import re
import sublime
try :
[comment]
from . __version__ import __version__
except (ValueError) :
[comment]
from __version__ import __version__
class BeautifierOptions :
def __init__ ( self ) :
self . indent_size = [number]
self . indent_char = [string]
self . indent_with_tabs = False
self . expand_tags = False
[comment]
self . minimum_attribute_count = [number]
self . first_attribute_on_new_line = False
self . reduce_empty_tags = False
self . reduce_whole_word_tags = False
self . exception_on_tag_mismatch = False
self . custom_singletons = [string]
def __repr__ ( self ) :
return [string] % ( self . indent_size , self . indent_char , self . indent_with_tabs , self . expand_tags , self . minimum_attribute_count , self . first_attribute_on_new_line , self . reduce_empty_tags , self . reduce_whole_word_tags , self . exception_on_tag_mismatch , self . custom_singletons )
def default_options ( ) :
return BeautifierOptions ( )
def beautify ( string , opts = default_options ( ) ) :
b = Beautifier ( string , opts )
return b . beautify ( )
def beautify_file ( file_name , opts = default_options ( ) ) :
if file_name == [string] : [comment]
stream = sys . stdin
else :
stream = open ( file_name )
content = [string] . join ( stream . readlines ( ) )
b = Beautifier ( content , opts )
return b . beautify ( )
def usage ( stream = sys . stdout ) :
print ( [string] + __version__ + [string] , file = stream )
return stream == sys . stderr
if stream == sys . stderr : return [number]
else : return [number]
class Beautifier :
def __init__ ( self , source_text , opts = default_options ( ) ) :
self . source_text = source_text
self . opts = opts
self . exception_on_tag_mismatch = opts . exception_on_tag_mismatch
self . expand_tags = opts . expand_tags
self . expand_javascript = opts . expand_javascript
self . minimum_attribute_count = opts . minimum_attribute_count
self . first_attribute_on_new_line = opts . first_attribute_on_new_line
self . reduce_empty_tags = opts . reduce_empty_tags
self . reduce_whole_word_tags = opts . reduce_whole_word_tags
self . indent_size = opts . indent_size
self . indent_char = opts . indent_char
self . indent_with_tabs = opts . indent_with_tabs
if self . indent_with_tabs :
self . indent_char = [string]
self . indent_size = [number]
self . tab_size = sublime . load_settings ( [string] ) . get ( [string] , [number] )
self . indent_level = [number]
[comment]
self . singletons = [string]
if not opts . custom_singletons == [string] :
self . singletons = re . sub ( [string] , [string] + opts . custom_singletons , self . singletons )
else :
self . singletons = re . sub ( [string] , [string] , self . singletons )
self . midle_tags = [string]
[comment]
self . singletons = re . compile ( self . singletons , re . I )
self . removed_css = [ ]
self . removed_js = [ ]
self . removed_comments = [ ]
def expand_tag ( self , str ) :
_str = str . group ( [number] ) [comment]
s = re . findall ( [string] , _str )
[comment]
if len ( s ) <= self . minimum_attribute_count : return _str
tagEnd = re . search ( [string] , _str )
if not tagEnd == None : s += [ tagEnd . group ( [number] ) ] [comment]
tag = [string] + s [ [number] ] [comment]
indent = len ( tag ) + [number] [comment]
s = s [ [number] : ] [comment]
[comment]
if self . first_attribute_on_new_line : [comment]
if self . indent_with_tabs :
indent = [number]
extra_tabs = [number]
else :
indent = self . indent_size
extra_tabs = [number]
else : [comment]
if self . indent_with_tabs :
extra_tabs = int ( indent / self . tab_size )
indent = indent % self . tab_size
else :
extra_tabs = [number]
tag += [string] + s [ [number] ]
s = s [ [number] : ] [comment]
[comment]
for l in s :
tag += [string] + ( ( ( self . indent_level * self . indent_size ) + extra_tabs ) * self . indent_char ) + ( indent * [string] ) + l
return tag
def remove_newlines ( self , ch = [string] ) : return lambda str : re . sub ( [string] , ch , str . group ( [number] ) )
def remove ( self , pattern , replacement , findList , raw ) :
pattern = re . compile ( [string] + pattern , re . S | re . I )
findList . extend ( pattern . findall ( raw ) )
return pattern . sub ( ( lambda match : match . group ( [number] ) [ : - len ( match . group ( [number] ) . lstrip ( ) ) ] + replacement ) , raw ) [comment]
def remove_js ( self , raw ) : return self . remove ( [string] , [string] , self . removed_js , raw )
def remove_css ( self , raw ) : return self . remove ( [string] , [string] , self . removed_css , raw )
def remove_comments ( self , raw ) : return self . remove ( [string] , [string] , self . removed_comments , raw )
def reindent ( self , raw , match ) :
prev_newline = [string]
lowest_indent = - [number]
for l in re . split ( [string] , raw ) :
indent = len ( l ) - len ( l . strip ( ) )
if lowest_indent == - [number] or lowest_indent > indent :
lowest_indent = indent
indent = len ( match . group ( [number] ) ) * self . indent_char
return indent + re . sub ( prev_newline , indent , re . sub ( prev_newline + ( lowest_indent * self . indent_char ) , [string] , raw . lstrip ( ) ) ) ; [comment]
def getNextFrom ( self , _list ) :
it = iter ( _list )
return lambda match : self . reindent ( next ( it ) , match )
def replace ( self , pattern , replaceList , raw ) : return re . compile ( [string] + pattern , re . S | re . I ) . sub ( self . getNextFrom ( replaceList ) , raw )
def replace_comments ( self , raw ) : return self . replace ( [string] , self . removed_comments , raw )
def replace_css ( self , raw ) : return self . replace ( [string] , self . removed_css , raw )
def replace_js ( self , raw ) : return self . replace ( [string] , self . removed_js , raw )
def beautify ( self ) :
beautiful = [string]
replaceWithSpace = self . remove_newlines ( [string] )
raw = self . source_text
[comment]
raw = self . remove_js ( raw )
raw = self . remove_css ( raw )
raw = self . remove_comments ( raw )
[comment]
raw = re . sub ( [string] , [string] , raw )
raw = re . sub ( [string] , [string] , raw )
[comment]
raw = re . sub ( [string] , [string] , re . sub ( [string] , self . remove_newlines ( ) , raw ) )
raw = re . sub ( [string] , replaceWithSpace , raw ) [comment]
[comment]
raw = re . compile ( [string] , re . S ) . sub ( replaceWithSpace , raw )
raw = self . singletons . sub ( [string] , raw ) [comment]
raw = self . singletons . sub ( replaceWithSpace , raw )
raw = re . sub ( [string] , [string] , raw )
raw = re . sub ( [string] , [string] , raw ) [comment]
for l in re . split ( [string] , raw ) :
l = l . strip ( ) [comment]
if l == [string] : continue [comment]
[comment]
if re . match ( [string] , l ) or re . search ( self . midle_tags , l ) : self . indent_level -= [number]
beautiful += ( self . indent_char * self . indent_level * self . indent_size )
if self . expand_tags :
beautiful += re . sub ( [string] , self . expand_tag , l )
else :
beautiful += l
beautiful += [string]
if self . singletons . search ( l ) : pass [comment]
elif re . search ( self . midle_tags , l ) : self . indent_level += [number]
else :
[comment]
if re . match ( [string] , l ) : self . indent_level += [number]
[comment]
if not self . indent_level == [number] and self . exception_on_tag_mismatch :
raise Exception ( [string] )
[comment]
if self . reduce_empty_tags :
beautiful = re . sub ( [string] , [string] , beautiful )
if self . reduce_whole_word_tags :
beautiful = re . sub ( [string] , [string] , beautiful )
[comment]
beautiful = self . replace_comments ( beautiful )
beautiful = self . replace_css ( beautiful )
beautiful = self . replace_js ( beautiful )
return beautiful
|
What is the type of variable indent_with_tabs?
| 2,360
|
builtins.bool
| 2,380
|
monkut/kippo
|
[
"repos/monkut/kippo/kippo/projects/management/commands/load_from_db.py"
] |
import builtins
from typing import Type , Tuple , Any , Dict
import kippo
import typing
[docstring]
from django . core . management . base import BaseCommand , CommandError
from django . utils . translation import ugettext as _
from django . conf import settings
import psycopg2
import psycopg2 . extras
from accounts . models import KippoOrganization , KippoUser , OrganizationMembership
from projects . models import KippoProject , KippoProjectStatus , ProjectColumnSet
from octocat . models import GithubRepository , GithubRepositoryLabelSet
from tasks . models import KippoTask , KippoTaskStatus
try :
CLI_USER = KippoUser . objects . get ( username = settings . CLI_MANAGER_USERNAME )
except KippoUser . DoesNotExist :
raise CommandError ( f' [string] { settings . CLI_MANAGER_USERNAME }' )
ADMIN_USER = KippoUser . objects . get ( username = [string] )
GITHUB_USER = KippoUser . objects . get ( username = [string] )
DEFAULT_LABELSET = GithubRepositoryLabelSet . objects . all ( ) [ [number] ]
DEFAULT_COLUMNSET = ProjectColumnSet . objects . all ( ) [ [number] ]
class Command ( BaseCommand ) :
help = __doc__
def add_arguments ( self , parser ) :
parser . add_argument ( [string] , [string] , type = str , default = None , required = True , help = _ ( [string] ) )
parser . add_argument ( [string] , default = [string] , )
parser . add_argument ( [string] , [string] , type = int , default = [number] , help = _ ( [string] ) )
parser . add_argument ( [string] , [string] , type = str , default = [string] )
parser . add_argument ( [string] , default = [string] , )
def handle ( self , * args , ** options ) :
params = { [string] : options [ [string] ] , [string] : options [ [string] ] , [string] : options [ [string] ] , [string] : options [ [string] ] , [string] : options [ [string] ] }
with psycopg2 . connect ( ** params ) as conn :
with conn . cursor ( cursor_factory = psycopg2 . extras . DictCursor ) as cursor :
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name } [string] " )
existing_users = { u . username : u for u in KippoUser . objects . all ( ) }
user_previous_id = { }
for result in cursor :
existing_user = existing_users . get ( result [ [string] ] , None )
if existing_user :
self . stdout . write ( f' [string] { existing_user } [string] { result [ [string] ] } [string] ' )
user_previous_id [ result [ [string] ] ] = existing_user
else :
user = KippoUser ( is_superuser = result [ [string] ] , username = result [ [string] ] , first_name = result [ [string] ] , last_name = result [ [string] ] , is_staff = result [ [string] ] , github_login = result [ [string] ] , holiday_country_id = result [ [string] ] )
self . stdout . write ( f' [string] { user } [string] { result [ [string] ] } [string] ' )
user . save ( )
user_previous_id [ result [ [string] ] ] = user
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
existing_organizations = { o . name : o for o in KippoOrganization . objects . all ( ) }
organization_previous_id = { }
for result in cursor :
existing_organization = existing_organizations . get ( result [ [string] ] , None )
if existing_organization :
self . stdout . write ( f' [string] { existing_organization }' )
organization_previous_id [ result [ [string] ] ] = existing_organization
else :
new_organization = KippoOrganization ( name = result [ [string] ] , github_organization_name = result [ [string] ] , default_task_category = result [ [string] ] , default_task_display_state = result [ [string] ] , day_workhours = result [ [string] ] , created_datetime = result [ [string] ] , updated_datetime = result [ [string] ] , created_by = ADMIN_USER , updated_by = ADMIN_USER , )
self . stdout . write ( f' [string] { new_organization }' )
new_organization . save ( )
organization_previous_id [ result [ [string] ] ] = new_organization
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
for result in cursor :
member = user_previous_id . get ( result [ [string] ] , None )
if member :
result = dict ( result )
result . pop ( [string] )
result [ [string] ] = organization_previous_id [ result [ [string] ] ]
result . pop ( [string] )
result [ [string] ] = user_previous_id [ result [ [string] ] ]
result . pop ( [string] )
result . pop ( [string] )
result . pop ( [string] )
membership = OrganizationMembership ( created_by = ADMIN_USER , updated_by = ADMIN_USER , ** result )
self . stdout . write ( f' [string] { membership }' )
membership . save ( )
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
previous_project_id = { }
existing_projects = { p . name : p for p in KippoProject . objects . all ( ) }
for result in cursor :
existing_project = existing_projects . get ( result [ [string] ] )
if existing_project :
self . stdout . write ( f' [string] { existing_project }' )
previous_project_id [ result [ [string] ] ] = existing_project
else :
organization = organization_previous_id [ result [ [string] ] ]
project = KippoProject ( name = result [ [string] ] , created_datetime = result [ [string] ] , updated_datetime = result [ [string] ] , organization = organization , columnset = DEFAULT_COLUMNSET , created_by = ADMIN_USER , updated_by = ADMIN_USER , )
self . stdout . write ( f' [string] { project }' )
project . save ( )
previous_project_id [ result [ [string] ] ] = project
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
existing_projectstatuses = { p . comment : p for p in KippoProjectStatus . objects . all ( ) }
for result in cursor :
existing_projectstatus = existing_projectstatuses . get ( result [ [string] ] , None )
if not existing_projectstatus :
created_by_user = user_previous_id . get ( result [ [string] ] , ADMIN_USER )
updated_by_user = user_previous_id . get ( result [ [string] ] , ADMIN_USER )
projectstatus = KippoProjectStatus ( created_datetime = result [ [string] ] , updated_datetime = result [ [string] ] , created_by = created_by_user , updated_by = updated_by_user , project = previous_project_id [ result [ [string] ] ] , comment = result [ [string] ] , )
self . stdout . write ( f' [string] { projectstatus }' )
projectstatus . save ( )
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
existing_repos = { r . html_url : r for r in GithubRepository . objects . all ( ) }
for result in cursor :
existing_repo = existing_repos . get ( result [ [string] ] )
if not existing_repo :
organization = organization_previous_id [ result [ [string] ] ]
repo = GithubRepository ( name = result [ [string] ] , api_url = result [ [string] ] , html_url = result [ [string] ] , label_set = DEFAULT_LABELSET , created_datetime = result [ [string] ] , updated_datetime = result [ [string] ] , organization = organization , created_by = GITHUB_USER , updated_by = GITHUB_USER , )
self . stdout . write ( f' [string] { repo }' )
repo . save ( )
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
task_previous_id = { }
existing_tasks = { t . github_issue_html_url : t for t in KippoTask . objects . all ( ) }
for result in cursor :
existing_task = existing_tasks . get ( result [ [string] ] )
if existing_task :
self . stdout . write ( f' [string] { existing_task }' )
task_previous_id [ result [ [string] ] ] = existing_task
else :
result = dict ( result )
previous_id = result . pop ( [string] )
result [ [string] ] = previous_project_id [ result [ [string] ] ] . id
result [ [string] ] = user_previous_id [ result [ [string] ] ] . id
result . pop ( [string] )
result . pop ( [string] )
task = KippoTask ( created_by = GITHUB_USER , updated_by = GITHUB_USER , ** result )
self . stdout . write ( f' [string] { task }' )
task . save ( )
task_previous_id [ previous_id ] = task
[comment]
table_name = [string]
cursor . execute ( f" [string] { table_name }" )
exisiting_taskstatuses = { ( t . effort_date , t . task_id ) : t for t in KippoTaskStatus . objects . all ( ) }
for result in cursor :
key = ( result [ [string] ] , task_previous_id [ result [ [string] ] ] . id )
exisiting_taskstatus = exisiting_taskstatuses . get ( key , None )
if not exisiting_taskstatus :
result = dict ( result )
result . pop ( [string] )
result . pop ( [string] )
result . pop ( [string] )
result [ [string] ] = task_previous_id [ result [ [string] ] ] . id
taskstatus = KippoTaskStatus ( created_by = GITHUB_USER , updated_by = GITHUB_USER , ** result )
self . stdout . write ( f' [string] { taskstatus }' )
taskstatus . save ( )
|
What is the type of variable existing_project?
| 2,395
|
None
| 2,416
|
lidatong/dataclasses-json
|
[
"repos/lidatong/dataclasses-json/tests/test_enum.py",
"repos/lidatong/dataclasses-json/tests/hypothesis2/strategies.py",
"repos/lidatong/dataclasses-json/tests/hypothesis2/core.py",
"repos/lidatong/dataclasses-json/tests/hypothesis2/__init__.py",
"repos/lidatong/dataclasses-json/tests/test_annotations.py"
] |
import builtins
from typing import Any
import typing
import tests
import json
from enum import Enum
from typing import Dict , List
import pytest
from dataclasses import dataclass
from dataclasses_json import dataclass_json
from marshmallow . exceptions import ValidationError
class MyEnum ( Enum ) :
STR1 = [string]
STR2 = [string]
STR3 = [string]
INT1 = [number]
FLOAT1 = [number]
class MyStrEnum ( str , Enum ) :
STR1 = [string]
@ dataclass_json @ dataclass ( frozen = True ) class DataWithEnum :
name = ...
my_enum = MyEnum . STR3
d1 = DataWithEnum ( [string] , MyEnum . STR1 )
d1_json = [string]
[comment]
d2_using_default_value = DataWithEnum ( [string] )
d2_json = [string]
d3_int = DataWithEnum ( [string] , MyEnum . INT1 )
d3_int_json = [string]
d4_float = DataWithEnum ( [string] , MyEnum . FLOAT1 )
d4_float_json = [string]
@ dataclass_json @ dataclass ( frozen = True ) class DataWithStrEnum :
my_str_enum = MyEnum . STR1
ds = DataWithStrEnum ( MyStrEnum . STR1 )
ds_json = [string]
@ dataclass_json @ dataclass ( frozen = True ) class EnumContainer :
enum_list = ...
dict_enum_value = ...
container_json = [string]
container = EnumContainer ( enum_list = [ MyEnum . STR3 , MyEnum . INT1 ] , dict_enum_value = { [string] : MyEnum . STR1 , [string] : MyEnum . FLOAT1 } )
class TestEncoder :
def test_data_with_enum ( self ) :
assert d1 . to_json ( ) == d1_json , f' [string] { d1 . to_json ( ) } [string] { d1_json }'
assert d3_int . to_json ( ) == d3_int_json , f' [string] { d3_int . to_json ( ) } [string] { d3_int_json }'
assert d4_float . to_json ( ) == d4_float_json , f' [string] { d4_float . to_json ( ) } [string] { d4_float_json }'
def test_data_with_str_enum ( self ) :
assert ds . to_json ( ) == ds_json , f' [string] { ds . to_json ( ) } [string] { ds_json }'
def test_data_with_enum_default_value ( self ) :
d2_to_json = d2_using_default_value . to_json ( )
assert d2_to_json == d2_json , f" [string] " f" [string] { d2_json } [string] { d2_to_json }"
def test_collection_with_enum ( self ) :
assert container . to_json ( ) == container_json
class TestDecoder :
def test_data_with_enum ( self ) :
d1_from_json = DataWithEnum . from_json ( d1_json )
assert d1 == d1_from_json
assert d1_from_json . to_json ( ) == d1_json
d3_int_from_json = DataWithEnum . from_json ( d3_int_json )
assert d3_int == d3_int_from_json
assert d3_int_from_json . to_json ( ) == d3_int_json
d4_float_from_json = DataWithEnum . from_json ( d4_float_json )
assert d4_float == d4_float_from_json
assert d4_float_from_json . to_json ( ) == d4_float_json
def test_data_with_str_enum ( self ) :
ds_from_json = DataWithStrEnum . from_json ( ds_json )
assert ds == ds_from_json
assert ds_from_json . to_json ( ) == ds_json
def test_data_with_enum_default_value ( self ) :
d2_from_json = DataWithEnum . from_json ( d2_json )
assert d2_using_default_value == d2_from_json
json_from_d2 = d2_from_json . to_json ( )
assert json_from_d2 == d2_json , f" [string] " f" [string] { d2_json } [string] { json_from_d2 }"
def test_collection_with_enum ( self ) :
container_from_json = EnumContainer . from_json ( container_json )
assert container == container_from_json
assert container_from_json . to_json ( ) == container_json
class TestValidator :
@ pytest . mark . parametrize ( [string] , [ ( [string] , True ) , ( [string] , True ) , ( [string] , True ) , ( [number] , False ) , ( [number] , False ) , ( [string] , False ) , ( [number] , False ) , ( [number] , False ) , ] ) def test_data_with_enum ( self , enum_value , is_valid ) :
data = [string] + str ( enum_value ) + [string]
schema = DataWithEnum . schema ( )
res = schema . validate ( json . loads ( data ) )
no_errors = not res
assert no_errors == is_valid , str ( res )
@ pytest . mark . parametrize ( [string] , [ ( [string] , True ) , ( [string] , False ) , ] ) def test_data_with_str_enum ( self , enum_value , is_valid ) :
data = [string] + str ( enum_value ) + [string]
schema = DataWithStrEnum . schema ( )
res = schema . validate ( json . loads ( data ) )
no_errors = not res
assert no_errors == is_valid
class TestLoader :
@ pytest . mark . parametrize ( [string] , [ ( d1_json , d1 ) , ( d2_json , d2_using_default_value ) , ( d3_int_json , d3_int ) , ( d4_float_json , d4_float ) , ] ) def test_data_with_enum ( self , json_data , expected_data ) :
schema = DataWithEnum . schema ( )
assert schema . loads ( json_data ) == expected_data
def test_data_with_enum_exception ( self ) :
schema = DataWithEnum . schema ( )
with pytest . raises ( ValidationError ) :
schema . loads ( [string] )
@ pytest . mark . parametrize ( [string] , [ ( ds_json , ds ) , ] ) def test_data_with_str_enum ( self , json_data , expected_data ) :
schema = DataWithStrEnum . schema ( )
assert schema . loads ( json_data ) == expected_data
def test_data_with_str_enum_exception ( self ) :
schema = DataWithStrEnum . schema ( )
with pytest . raises ( ValidationError ) :
schema . loads ( [string] )
repos/lidatong/dataclasses-json/tests/hypothesis2/strategies.py
from collections import deque
from hypothesis . strategies import lists , none , one_of
def deques ( elements = None , min_size = [number] , max_size = None , unique_by = None , unique = False ) :
return lists ( ** locals ( ) ) . map ( deque )
def optionals ( strategy ) :
return one_of ( strategy , none ( ) )
repos/lidatong/dataclasses-json/tests/hypothesis2/core.py
from typing import Any
import typing
from hypothesis import example
def examples ( * args ) :
[docstring]
def examples_decorator ( f ) :
g = f
for arg in args :
g = example ( arg ) ( g )
return g
return examples_decorator
repos/lidatong/dataclasses-json/tests/hypothesis2/__init__.py
[docstring]
from tests . hypothesis2 . core import examples
repos/lidatong/dataclasses-json/tests/test_annotations.py
from typing import Optional , Any , List , Type , Dict , Tuple
import builtins
import typing
import tests
import dataclasses_json
import json
import logging
import os
[comment]
from dataclasses import dataclass
from io import StringIO
from typing import Any , Dict , List , NewType , Optional , Tuple , Union
from mypy . main import main as mypy_main
from dataclasses_json import DataClassJsonMixin , CatchAll
@ dataclass class User ( DataClassJsonMixin ) :
id = ...
name = [string]
ca = None
Filename = NewType ( [string] , str )
LineNumber = NewType ( [string] , int )
ErrorLevel = NewType ( [string] , str )
ErrorMessage = NewType ( [string] , str )
class TestAnnotations :
u = User ( [string] )
j = u . to_json ( )
u2 = User . from_json ( j )
u2a = User . from_json ( j . encode ( ) )
jMany = [ { [string] : [string] , [string] : [string] } , { [string] : [string] , [string] : [string] } ]
sch = User . schema ( )
users1 = sch . loads ( json . dumps ( jMany ) , many = True )
n = users1 [ [number] ] . name
users2 = sch . load ( jMany , many = True ) [comment]
u3 = sch . load ( jMany [ [number] ] )
j2 = sch . dump ( u )
j3 = sch . dump ( [ u2 , u3 ] , many = True )
j4 = sch . dumps ( u2 )
j4_dict = json . loads ( j4 )
u4a = User . from_json ( j4 )
u4b = User . from_dict ( j4_dict )
def filter_errors ( self , errors ) :
real_errors = list ( )
current_file = __file__
current_path = os . path . split ( current_file )
for line in errors :
line = line . strip ( )
if ( not line ) :
continue
fn , lno , lvl , msg = self . parse_trace_line ( line )
if ( fn is not None ) :
_path = os . path . split ( fn )
if ( _path [ - [number] ] != current_path [ - [number] ] ) :
continue
real_errors . append ( line )
return real_errors
def parse_trace_line ( self , line ) :
[comment]
file_name = ...
line_no = ...
level = ...
msg = ...
where , sep , msg = line . partition ( [string] )
if (sep) :
file_name , sep , line_no = where . rpartition ( [string] )
file_name = Filename ( file_name )
if (sep) :
line_no = LineNumber ( int ( line_no ) )
else :
line_no = None
level , sep , msg = msg . partition ( [string] )
if (sep) :
level = ErrorLevel ( level )
else :
msg = level
level = None
else :
[comment]
[comment]
file_name = Filename ( [string] ) if line . startswith ( [string] ) else None
line_no = None
level = None
msg = line
msg = ErrorMessage ( msg )
return file_name , line_no , level , msg
def test_type_hints ( self ) :
text_io = StringIO ( [string] )
try :
[comment]
[comment]
mypy_main ( None , text_io , text_io , [ __file__ ] )
except SystemExit :
[comment]
[comment]
errors = text_io . getvalue ( ) . splitlines ( )
errors = self . filter_errors ( errors )
else :
errors = None
[comment]
if (errors) :
logging . error ( [string] . join ( errors ) )
raise AssertionError ( [string] )
|
What is the type of variable _path?
| 2,693
|
typing.Tuple[builtins.str,builtins.str]
| 2,670
|
topher200/assertion-context
|
[
"repos/topher200/assertion-context/src/lib/traceback/traceback_formatter.py",
"repos/topher200/assertion-context/src/lib/traceback/test_assertion_regex.py",
"repos/topher200/assertion-context/src/lib/jira/jira_issue_db.py"
] |
from typing import Set , Callable , List , Any , Optional
import logging
import builtins
import lib
import typing
from typing import Optional , Callable , Set
import logging
from common_util import ( config_util , )
from lib . fullstory import fullstory
from lib . traceback . traceback import Traceback
logger = logging . getLogger ( )
KIBANA_REDIRECT_URL = config_util . get ( [string] )
PRODUCT_URL = config_util . get ( [string] )
TIMESTAMP_TEMPLATE = [string]
[docstring]
PAPERTRAIL_LINK_JIRA_TEMPLATE = [string]
PAPERTRAIL_LINK_SLACK_TEMPLATE = [string]
[docstring]
FULLSTORY_LINK_JIRA_TEMPLATE = [string]
FULLSTORY_LINK_SLACK_TEMPLATE = [string]
[docstring]
PROFILE_NAME_JIRA_TEMPLATE = [string]
PROFILE_NAME_SLACK_TEMPLATE = [string]
[docstring]
USERNAME_JIRA_TEMPLATE = [string]
USERNAME_SLACK_TEMPLATE = [string]
[docstring]
def jira_formatted_string ( t , include_profile_link , include_user_link ) :
[docstring]
[comment]
timestamp_str = PAPERTRAIL_LINK_JIRA_TEMPLATE . format ( timestamp = t . origin_timestamp . strftime ( TIMESTAMP_TEMPLATE ) , kibana_redirect_url = KIBANA_REDIRECT_URL , papertrail_id = t . origin_papertrail_id )
[comment]
profile_str = None
if t . profile_name :
if include_profile_link :
profile_str = PROFILE_NAME_JIRA_TEMPLATE . format ( profile_name = t . profile_name , product_url = PRODUCT_URL , )
else :
profile_str = t . profile_name
user_str = None
if t . username :
if include_user_link :
user_str = USERNAME_JIRA_TEMPLATE . format ( username = t . username , product_url = PRODUCT_URL , )
else :
user_str = t . username
[comment]
fullstory_link = fullstory . get_link_to_session_at_traceback_time ( t )
fullstory_str = None
if fullstory_link :
fullstory_str = FULLSTORY_LINK_JIRA_TEMPLATE . format ( fullstory_link = fullstory_link , )
[comment]
combined_str = [string] . join ( s for s in ( timestamp_str , profile_str , user_str , fullstory_str , ) if s is not None )
return [string] % combined_str
def slack_formatted_string ( t , include_profile_link , include_user_link ) :
[docstring]
[comment]
timestamp_str = PAPERTRAIL_LINK_SLACK_TEMPLATE . format ( timestamp = t . origin_timestamp . strftime ( TIMESTAMP_TEMPLATE ) , kibana_redirect_url = KIBANA_REDIRECT_URL , papertrail_id = t . origin_papertrail_id )
[comment]
profile_str = None
if t . profile_name :
if include_profile_link :
profile_str = PROFILE_NAME_SLACK_TEMPLATE . format ( profile_name = t . profile_name , product_url = PRODUCT_URL )
else :
profile_str = t . profile_name
user_str = None
if t . username :
if include_user_link :
user_str = USERNAME_SLACK_TEMPLATE . format ( username = t . username , product_url = PRODUCT_URL )
else :
user_str = t . username
[comment]
fullstory_link = fullstory . get_link_to_session_at_traceback_time ( t )
fullstory_str = None
if fullstory_link :
fullstory_str = FULLSTORY_LINK_SLACK_TEMPLATE . format ( fullstory_link = fullstory_link , )
[comment]
combined_str = [string] . join ( s for s in ( timestamp_str , profile_str , user_str , fullstory_str , ) if s is not None )
return [string] % combined_str
def create_hits_list ( tracebacks , formatter , max_number_hits = None ) :
[docstring]
seen_profile_names = set ( )
seen_usernames = set ( )
hits_list = [ ]
for t in tracebacks :
[comment]
if t . profile_name and t . profile_name not in seen_profile_names :
include_profile_link = True
if t . profile_name . isdigit ( ) :
[comment]
include_profile_link = False
seen_profile_names . add ( t . profile_name )
else :
include_profile_link = False
if t . username and t . username not in seen_usernames :
include_username_link = True
seen_usernames . add ( t . username )
else :
include_username_link = False
[comment]
if t . username and t . username . startswith ( [string] ) :
include_username_link = False
hits_list . append ( formatter ( t , include_profile_link , include_username_link ) )
[comment]
index = len ( hits_list )
if max_number_hits :
index = min ( index , max_number_hits )
comment_string = [string]
for index in range ( index , [number] , - [number] ) :
comment_string = [string] . join ( hits_list [ : index ] )
if len ( comment_string ) < [number] :
break
logger . info ( [string] , len ( tracebacks ) , len ( hits_list ) , index , len ( comment_string ) )
return comment_string
repos/topher200/assertion-context/src/lib/traceback/test_assertion_regex.py
from typing import List
import typing
import unittest
from lib . traceback . parser import Parser
LOG_LINES_THAT_SHOULD_MATCH = [ [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , ]
LOG_LINES_THAT_SHOULD_NOT_MATCH = [ [string] , [string] , [string] , [string] , [string] , [string] , [string] , ]
class TestAssertionRegex ( unittest . TestCase ) :
def test_lines_that_should_match ( self ) :
[docstring]
for line in LOG_LINES_THAT_SHOULD_MATCH :
self . assertTrue ( Parser . log_line_contains_important_error ( line ) , line )
def test_lines_that_should_not_match ( self ) :
[docstring]
for line in LOG_LINES_THAT_SHOULD_NOT_MATCH :
self . assertFalse ( Parser . log_line_contains_important_error ( line ) , line )
repos/topher200/assertion-context/src/lib/jira/jira_issue_db.py
from typing import Any , Union , List , Dict
import logging
import builtins
import lib
import typing
[docstring]
from typing import ( List , )
import logging
import elasticsearch
from opentracing_instrumentation . request_context import get_current_span
import opentracing
from lib . jira . jira_issue import JiraIssue , generate_from_source
from common_util import ( es_util , redis_util , retry , )
DOGPILE_REGION_PREFIX = [string]
DOGPILE_REGION = redis_util . make_dogpile_region ( DOGPILE_REGION_PREFIX )
def invalidate_cache ( ) :
redis_util . force_redis_cache_invalidation ( DOGPILE_REGION_PREFIX )
INDEX = [string]
DOC_TYPE = [string]
logger = logging . getLogger ( )
@ retry . Retry ( exceptions = ( elasticsearch . exceptions . ConnectionTimeout , ) ) def save_jira_issue ( es , jira_issue ) :
[docstring]
assert isinstance ( jira_issue , JiraIssue ) , ( type ( jira_issue ) , jira_issue )
doc = jira_issue . document ( )
res = es . index ( index = INDEX , doc_type = DOC_TYPE , id = jira_issue . key , body = doc )
invalidate_cache ( )
return res
@ retry . Retry ( exceptions = ( elasticsearch . exceptions . ConnectionTimeout , ) ) def remove_jira_issue ( es , issue_key ) :
[docstring]
try :
es . delete ( index = INDEX , doc_type = DOC_TYPE , id = issue_key )
except elasticsearch . exceptions . NotFoundError :
return [comment]
else :
invalidate_cache ( )
@ retry . Retry ( exceptions = ( elasticsearch . exceptions . ConnectionTimeout , ) ) def refresh ( es ) :
[docstring]
es . indices . refresh ( index = INDEX )
@ DOGPILE_REGION . cache_on_arguments ( ) @ retry . Retry ( exceptions = ( elasticsearch . exceptions . ConnectionTimeout , ) ) def get_matching_jira_issues ( es , tracer , traceback_text , match_level ) :
[docstring]
assert isinstance ( traceback_text , str ) , ( type ( traceback_text ) , traceback_text )
assert match_level in es_util . ALL_MATCH_LEVELS , ( match_level , es_util . ALL_MATCH_LEVELS )
tracer = tracer or opentracing . tracer
body = es_util . generate_text_match_payload ( traceback_text , [ [string] , [string] ] , match_level )
root_span = get_current_span ( )
with tracer . start_span ( [string] , child_of = root_span ) :
try :
raw_es_response = es . search ( index = INDEX , doc_type = DOC_TYPE , body = body , size = [number] )
except elasticsearch . exceptions . NotFoundError :
logger . warning ( [string] )
return [ ]
res = [ ]
for raw_jira_issue in raw_es_response [ [string] ] [ [string] ] :
res . append ( generate_from_source ( raw_jira_issue [ [string] ] ) )
return res
def search_jira_issues ( es , search_phrase , max_count ) :
[docstring]
body = { [string] : { [string] : { [string] : search_phrase , [string] : [ [string] , [string] , [string] , [string] ] , [string] : [string] , } } }
raw_es_response = es . search ( index = INDEX , doc_type = DOC_TYPE , body = body , size = max_count )
res = [ ]
for raw_jira_issue in raw_es_response [ [string] ] [ [string] ] :
res . append ( generate_from_source ( raw_jira_issue [ [string] ] ) )
return res
@ retry . Retry ( exceptions = ( elasticsearch . exceptions . ConnectionTimeout , ) ) def get_num_jira_issues ( es ) :
[docstring]
try :
return es . count ( index = INDEX , doc_type = DOC_TYPE , body = { } , ) [ [string] ]
except elasticsearch . exceptions . NotFoundError :
logger . warning ( [string] )
return [number]
|
What is the type of variable INDEX?
| 2,344
|
builtins.str
| 2,341
|
tikatoo/tikatwo
|
[
"repos/tikatoo/tikatwo/tikatwo/__main__.py",
"repos/tikatoo/tikatwo/tikatwo/__init__.py",
"repos/tikatoo/tikatwo/tikatwo/channel.py",
"repos/tikatoo/tikatwo/tikatwo/modules/__init__.py",
"repos/tikatoo/tikatwo/tikatwo/modules/counter.py",
"repos/tikatoo/tikatwo/tikatwo/modules/module.py"
] |
import pathlib
import tikatwo
from os import walk
from pathlib import Path
from . import TikaBot
confpath = Path ( [string] )
if not confpath . is_dir ( ) :
if confpath . exists ( ) :
raise OSError ( f" [string] { confpath !r} [string] " )
else :
raise OSError ( f" [string] { confpath !r} [string] " )
connpath = confpath / [string]
if not connpath . is_file ( ) :
raise OSError ( f" [string] { connpath !r}" )
bot = TikaBot ( confpath )
bot . run ( )
repos/tikatoo/tikatwo/tikatwo/__init__.py
import builtins
from typing import Any , List , Optional , MutableMapping
import twitchio
import pathlib
import tikatwo
import typing
from pathlib import Path
from typing import Any , List , MutableMapping , NamedTuple , Optional
import aiosqlite
import toml
import twitchio
from twitchio . ext import commands
from . channel import Channel
class _ChannelInfo ( NamedTuple ) :
name = ...
config = ...
class TikaBot ( commands . Bot ) :
_channelinfo = ...
_channels = ...
def __init__ ( self , confpath ) :
with ( confpath / [string] ) . open ( ) as rf :
connection = toml . load ( rf )
con_twitch = connection [ [string] ]
super ( ) . __init__ ( irc_token = con_twitch [ [string] ] , client_id = con_twitch [ [string] ] , nick = con_twitch [ [string] ] , prefix = [string] , )
channels = confpath / [string]
if not channels . is_dir ( ) :
raise OSError ( f" [string] { channels !r}" )
self . _channelinfo = [ ]
self . _channels = { }
for chanpath in channels . iterdir ( ) :
channame = chanpath . stem
if chanpath . is_file ( ) and chanpath . suffix == [string] :
with chanpath . open ( ) as rf :
chanconf = toml . load ( rf )
else :
raise OSError ( f" [string] { chanpath !r}" )
self . _channelinfo . append ( _ChannelInfo ( channame , chanconf ) )
self . _channels [ channame ] = Channel ( chanconf , self . nick , self . get_channel ( channame ) )
async def event_ready ( self ) :
for info in self . _channelinfo :
await self . join_channels ( ( info . name , ) )
self . _channels [ info . name ] = Channel ( info . config , self . nick , self . get_channel ( info . name ) )
print ( f' [string] { self . nick }' )
async def event_message ( self , message ) :
print ( [string] , message . author . name + [string] , message . content )
if message . author . name != self . nick :
await self . _channels [ message . channel . name ] . handle_message ( message )
await self . handle_commands ( message )
repos/tikatoo/tikatwo/tikatwo/channel.py
from typing import Match , List , MutableMapping , Dict , Optional , Any
import modules
import tikatwo
import builtins
import typing
import twitchio
import functools
import re
import time
from dataclasses import dataclass
from typing import Any , Awaitable , Callable , List , MutableMapping , Match , Optional
import twitchio
from . modules import MessageHandler , Module , mapping as modules
@ dataclass class _PatternsEntry :
matcher = ...
handlers = ...
timeout = ...
last_invoke = [number]
class _RegexMatcher :
class _UserFetch :
def __init__ ( self , me ) :
self . me = self [ me ]
def __getitem__ ( self , key ) :
return f" [string] { key } [string] "
def __init__ ( self , nick , re_src , flags = re . RegexFlag . IGNORECASE ) :
self . _re = re . compile ( self . _preprocess ( re_src , nick ) , flags = flags )
def _preprocess ( self , re_src , nick ) :
return re_src . format ( u = self . _UserFetch ( nick ) )
def __call__ ( self , msg ) :
return self . _re . search ( msg . content )
class Channel :
_channel = ...
_patterns = ...
modules = ...
def __init__ ( self , config , _nick , _channel ) :
self . _channel = _channel
self . _patterns = [ ]
self . modules = { }
default_timeout = [number]
if [string] in config and [string] in config [ [string] ] :
default_timeout = config [ [string] ] [ [string] ]
for pattern in config [ [string] ] :
if [string] in pattern :
matcher = _RegexMatcher ( _nick , pattern [ [string] ] )
handlers = [ ]
if [string] in pattern :
for modname in pattern [ [string] ] :
if modname in self . modules :
module = self . modules [ modname ]
else :
if [string] in config and modname in config [ [string] ] :
modconf = config [ [string] ] [ modname ]
else :
modconf = { }
module = modules [ modname ] ( _channel , modconf )
self . modules [ modname ] = module
handlers . append ( module . handler ( pattern ) )
response = ( ( [string] + pattern [ [string] ] )
if [string] in pattern
else pattern [ [string] ]
if [string] in pattern
else None )
if response is not None :
handlers . append ( functools . partial ( self . handle_respond , response ) )
if handlers :
self . _patterns . append ( _PatternsEntry ( matcher , handlers , pattern [ [string] ] if [string] in pattern
else default_timeout ) )
async def handle_message ( self , message ) :
for pattern in self . _patterns :
[comment]
match = pattern . matcher ( message ) [comment]
if match :
now = time . monotonic ( )
if now < ( pattern . last_invoke + pattern . timeout ) :
continue
pattern . last_invoke = now
for handler in pattern . handlers :
await handler ( message , match )
async def handle_respond ( self , response , message , match ) :
await self . _channel . send ( response . format ( m = message , a = match , ** self . modules ) )
repos/tikatoo/tikatwo/tikatwo/modules/__init__.py
from typing import Type , Mapping
import module
import typing
import builtins
from typing import Callable , Mapping , Type
from . module import MessageHandler , Module
from . counter import Counter
mapping = { [string] : Counter }
repos/tikatoo/tikatwo/tikatwo/modules/counter.py
from typing import MutableMapping , List
import twitchio
import module
import typing
import builtins
import itertools
import functools
from typing import List , MutableMapping
import twitchio
from . module import MessageHandler , Module
class Counter ( Module ) :
counters = ...
def __init__ ( self , channel , options ) :
super ( ) . __init__ ( channel , options )
self . counters = { }
def __getitem__ ( self , name ) :
return self . counters [ name ]
def handler ( self , options ) :
incrs = [ ]
decrs = [ ]
if [string] in options :
incrs = options [ [string] ]
if isinstance ( incrs , str ) :
incrs = [ incrs ]
if [string] in options :
decrs = options [ [string] ]
if isinstance ( decrs , str ) :
decrs = [ decrs ]
for counter in itertools . chain ( incrs , decrs ) :
if counter not in self . counters :
self . counters [ counter ] = [number]
return functools . partial ( self . count , incrs , decrs )
async def count ( self , incrs , decrs , message , match ) :
for counter in incrs :
self . counters [ counter ] += [number]
for counter in decrs :
self . counters [ counter ] -= [number]
repos/tikatoo/tikatwo/tikatwo/modules/module.py
from typing import Any , Type , Callable , Dict
import twitchio
import typing
import builtins
from typing import Any , Awaitable , Callable , Optional
import twitchio
MessageHandler = Callable [ [ twitchio . Message , Any ] , Awaitable [ None ] ]
class Module :
options = ...
channel = ...
def __init__ ( self , channel , options ) :
self . options = options
self . channel = channel
def handler ( self , options ) :
raise NotImplementedError ( )
|
What is the type of variable matcher?
| 2,127
|
typing.Callable[[twitchio.Message],typing.Any]
| 2,081
|
SUNET/cnaas-nac
|
[
"repos/SUNET/cnaas-nac/setyp.py",
"repos/SUNET/cnaas-nac/alembic/env.py",
"repos/SUNET/cnaas-nac/alembic/versions/538170dfe877_added_freeradius_tables.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/run.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/__init__.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/version.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/tools/log.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/tools/db_replicator.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/tools/helpers.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/tools/rad_db.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/tools/db_cleanup.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/api/auth.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/api/app.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/api/generic.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/tests/test_api.py",
"repos/SUNET/cnaas-nac/src/cnaas_nac/db/session.py"
] |
from typing import List
import typing
import os
import setuptools
from src . cnaas_nms import version
here = os . path . abspath ( os . path . dirname ( __file__ ) )
README = open ( os . path . join ( here , [string] ) ) . read ( )
version = version . __version__
requires = open ( os . path . join ( here , [string] ) , [string] ) . read ( ) . split ( )
setuptools . setup ( name = [string] , author = [string] , author_email = [string] , version = version , description = [string] , long_description = README , long_description_content_type = [string] , url = [string] , classifiers = [ [string] , [string] , [string] , ] , package_dir = { [string] : [string] } , packages = setuptools . find_packages ( [string] , exclude = [ [string] ] ) , include_package_data = True , install_requires = requires , )
repos/SUNET/cnaas-nac/alembic/env.py
from typing import List , Any
import typing
import sys
sys . path . append ( [string] )
from logging . config import fileConfig
from sqlalchemy . ext . declarative import declarative_base
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
[comment]
[comment]
config = context . config
from cnaas_nac . db . session import get_sqlalchemy_conn_str
config . set_main_option ( [string] , get_sqlalchemy_conn_str ( ) )
[comment]
[comment]
fileConfig ( config . config_file_name )
[comment]
[comment]
[comment]
[comment]
from cnaas_nac . db . user import User , Reply
Base = declarative_base ( )
target_metadata = Base . metadata
[comment]
[comment]
[comment]
[comment]
def include_object ( object , name , type_ , reflected , compare_to ) :
ignore_names = [ [string] ]
if type_ == [string] and name in ignore_names :
return False
return True
def run_migrations_offline ( ) :
[docstring]
url = config . get_main_option ( [string] )
context . configure ( url = url , target_metadata = target_metadata , include_object = include_object , literal_binds = True )
with context . begin_transaction ( ) :
context . run_migrations ( )
def run_migrations_online ( ) :
[docstring]
connectable = engine_from_config ( config . get_section ( config . config_ini_section ) , prefix = [string] , poolclass = pool . NullPool , )
with connectable . connect ( ) as connection :
context . configure ( connection = connection , target_metadata = target_metadata , include_object = include_object )
with context . begin_transaction ( ) :
context . run_migrations ( )
if context . is_offline_mode ( ) :
run_migrations_offline ( )
else :
run_migrations_online ( )
repos/SUNET/cnaas-nac/alembic/versions/538170dfe877_added_freeradius_tables.py
[docstring]
from alembic import op
import sqlalchemy as sa
[comment]
revision = [string]
down_revision = [string]
branch_labels = None
depends_on = None
def upgrade ( ) :
pass
def downgrade ( ) :
pass
repos/SUNET/cnaas-nac/src/cnaas_nac/run.py
from typing import Any
import typing
import os
from cnaas_nac . api import app
os . environ [ [string] ] = os . getcwd ( )
def get_app ( ) :
return app . app
if __name__ == [string] :
get_app ( ) . run ( debug = True , host = [string] , port = [number] )
else :
cnaas_app = get_app ( )
repos/SUNET/cnaas-nac/src/cnaas_nac/__init__.py
[comment]
__import__ ( [string] ) . declare_namespace ( __name__ )
repos/SUNET/cnaas-nac/src/cnaas_nac/version.py
from typing import Tuple
import typing
__version__ = [string]
__version_info__ = tuple ( [ int ( num ) for num in __version__ . split ( [string] ) ] )
__api_version__ = [string]
repos/SUNET/cnaas-nac/src/cnaas_nac/tools/log.py
import logging
import logging
from flask import current_app
def get_logger ( ) :
if current_app :
logger = current_app . logger
else :
logger = logging . getLogger ( [string] )
if not logger . handlers :
formatter = logging . Formatter ( [string] )
[comment]
handler = logging . StreamHandler ( )
handler . setFormatter ( formatter )
logger . addHandler ( handler )
logger . setLevel ( logging . DEBUG ) [comment]
return logger
repos/SUNET/cnaas-nac/src/cnaas_nac/tools/db_replicator.py
from typing import List , Any
import typing
import builtins
import sys
import getopt
from cnaas_nac . db . session import sqla_session
from cnaas_nac . db . user import User , Reply , UserInfo
from cnaas_nac . db . nas import NasPort
from cnaas_nac . db . accounting import Accounting
from cnaas_nac . tools . log import get_logger
from cnaas_nac . tools . rad_db import edit_nas , edit_replies , edit_users , get_connstrs , get_rows , copy_accounting , edit_userinfo
logger = get_logger ( )
def diff_rows ( list_a , list_b ) :
diff = [ ]
for x in list_a :
if [string] in x :
del x [ [string] ]
for x in list_b :
if [string] in x :
del x [ [string] ]
for item in list_a :
if item not in list_b :
diff . append ( item )
return diff
def rad_replicate ( db_source , db_target , username , password , table ) :
connstr_source , connstr_target = get_connstrs ( db_source , db_target , username , password )
[comment]
[comment]
source = get_rows ( connstr_source , table = table )
if table == User :
logger . info ( [string] )
edit_users ( source , connstr_target )
elif table == Reply :
logger . info ( [string] )
edit_replies ( source , connstr_target )
elif table == NasPort :
logger . info ( [string] )
edit_nas ( source , connstr_target )
elif table == Accounting :
logger . info ( [string] )
copy_accounting ( source , connstr_target )
elif table == UserInfo :
logger . info ( [string] )
edit_userinfo ( source , connstr_target )
[comment]
[comment]
target = get_rows ( connstr_target , table = table )
diff = diff_rows ( target , source )
if table == User :
logger . info ( [string] )
edit_users ( diff , connstr_target , remove = True )
elif table == Reply :
logger . info ( [string] )
edit_replies ( diff , connstr_target , remove = True )
elif table == NasPort :
logger . info ( [string] )
edit_nas ( diff , connstr_target , remove = True )
elif table == UserInfo :
logger . info ( [string] )
edit_userinfo ( diff , connstr_target , remove = True )
def usage ( ) :
print ( [string] )
sys . exit ( [number] )
def main ( argv ) :
source = None
target = None
username = None
password = None
try :
opts , args = getopt . getopt ( argv , [string] )
except getopt . GetoptError as e :
print ( str ( e ) )
usage ( argv )
for opt , arg in opts :
if opt == [string] :
source = arg
if opt == [string] :
target = arg
if opt == [string] :
username = arg
if opt == [string] :
password = arg
if source is None or target is None or username is None or password is None :
usage ( )
try :
logger . info ( [string] . format ( source , target ) )
logger . info ( [string] )
rad_replicate ( source , target , username , password , User )
logger . info ( [string] )
rad_replicate ( source , target , username , password , Reply )
logger . info ( [string] )
rad_replicate ( source , target , username , password , NasPort )
logger . info ( [string] )
rad_replicate ( source , target , username , password , UserInfo )
[comment]
[comment]
[comment]
logger . info ( [string] )
except Exception as e :
logger . warning ( [string] + str ( e ) )
if __name__ == [string] :
main ( sys . argv [ [number] : ] )
repos/SUNET/cnaas-nac/src/cnaas_nac/tools/helpers.py
from typing import List , Any
import typing
def get_user_replies ( username , replies ) :
result = [ ]
for user_reply in replies :
if [string] not in user_reply :
continue
if user_reply [ [string] ] != username :
continue
del user_reply [ [string] ]
del user_reply [ [string] ]
result . append ( user_reply )
return result
def get_user_port ( username , nas_ports ) :
nas_port = None
for port in nas_ports :
if port [ [string] ] == username :
nas_port = port
return nas_port
def get_is_active ( username , users ) :
for user in users :
if [string] not in user :
continue
if user [ [string] ] != username :
continue
if user [ [string] ] == [string] :
return True
return False
def get_last_seen ( username , last_seen ) :
for user in last_seen :
if [string] not in user :
continue
if user [ [string] ] != username :
continue
return str ( user [ [string] ] )
return None
def get_last_reply ( username , last_seen ) :
for user in last_seen :
if [string] not in user :
continue
if user [ [string] ] != username :
continue
return user [ [string] ]
return None
repos/SUNET/cnaas-nac/src/cnaas_nac/tools/rad_db.py
from typing import List , Any
import typing
import builtins
from cnaas_nac . db . session import sqla_session
from cnaas_nac . db . user import User , Reply , UserInfo
from cnaas_nac . db . nas import NasPort
from cnaas_nac . db . accounting import Accounting
def get_connstrs ( source , target , username , password ) :
connstr_source = [string] . format ( username , password , source )
connstr_target = [string] . format ( username , password , target )
return ( connstr_source , connstr_target )
def get_rows ( connstr , table ) :
with sqla_session ( connstr ) as session :
rows_list = [ ]
if table == Accounting :
rows = session . query ( table ) . order_by ( table . radacctid ) . all ( )
else :
rows = session . query ( table ) . order_by ( table . id ) . all ( )
for row in rows :
rows_list . append ( row . as_dict ( ) )
return rows_list
def edit_users ( diff , connstr , remove = False ) :
if diff is None :
return
with sqla_session ( connstr ) as session :
for diff_user in diff :
username = diff_user [ [string] ]
attribute = diff_user [ [string] ]
op = diff_user [ [string] ]
value = diff_user [ [string] ]
user = session . query ( User ) . filter ( User . username == username ) . one_or_none ( )
if remove and user is not None :
print ( [string] . format ( username ) )
session . delete ( user )
elif user is None and not remove :
print ( [string] . format ( username ) )
new_user = User ( )
new_user . username = username
new_user . attribute = attribute
new_user . op = op
new_user . value = value
session . add ( new_user )
elif user is not None :
if attribute != user . attribute or op != user . op or value != user . value :
print ( [string] . format ( username ) )
user . attribute = attribute
user . op = op
user . value = value
session . commit ( )
def edit_replies ( diff , connstr , remove = False ) :
if diff is None :
return
with sqla_session ( connstr ) as session :
for diff_user in diff :
username = diff_user [ [string] ]
attribute = diff_user [ [string] ]
op = diff_user [ [string] ]
value = diff_user [ [string] ]
reply = session . query ( Reply ) . filter ( Reply . username == username ) . filter ( Reply . attribute == attribute ) . one_or_none ( )
if remove and reply is not None :
print ( [string] . format ( username ) )
session . delete ( reply )
elif reply is None and not remove :
print ( [string] . format ( username ) )
new_reply = Reply ( )
new_reply . username = username
new_reply . attribute = attribute
new_reply . op = op
new_reply . value = value
session . add ( new_reply )
elif reply is not None :
if attribute != reply . attribute or op != reply . op or value != reply . value :
print ( [string] . format ( username ) )
reply . attribute = attribute
reply . op = op
reply . value = value
session . commit ( )
def edit_nas ( nas_diff , connstr , table = NasPort , remove = False ) :
if nas_diff is None :
return
with sqla_session ( connstr ) as session :
for diff_user in nas_diff :
username = diff_user [ [string] ]
nas_identifier = diff_user [ [string] ]
nas_port_id = diff_user [ [string] ]
nas_ip_address = diff_user [ [string] ]
calling_station_id = diff_user [ [string] ]
called_station_id = diff_user [ [string] ]
nas = session . query ( table ) . filter ( NasPort . username == username ) . one_or_none ( )
if remove and nas is not None :
print ( [string] . format ( username ) )
session . delete ( nas )
elif nas is None and not remove :
print ( [string] . format ( username ) )
new_nas = table ( )
new_nas . username = username
new_nas . nas_identifier = nas_identifier
new_nas . nas_port_id = nas_port_id
new_nas . nas_ip_address = nas_ip_address
new_nas . calling_station_id = calling_station_id
new_nas . called_station_id = called_station_id
session . add ( new_nas )
elif nas is not None :
if nas_identifier != nas . nas_identifier or nas_port_id != nas . nas_port_id or nas_ip_address != nas . nas_ip_address or calling_station_id != nas . calling_station_id or called_station_id != nas . called_station_id :
print ( [string] . format ( username ) )
nas . nas_identifier = nas_identifier
nas . nas_port_id = nas_port_id
nas . nas_ip_address = nas_ip_address
nas . calling_station_id = calling_station_id
nas . called_station_id = called_station_id
session . commit ( )
def copy_accounting ( diffs , connstr , table = Accounting ) :
if diffs is None or diffs == [ ] :
return
with sqla_session ( connstr ) as session :
for diff in diffs :
if diff is None :
continue
radacctid = diff [ [string] ]
acctsessionid = diff [ [string] ]
acctuniqueid = diff [ [string] ]
username = diff [ [string] ]
groupname = diff [ [string] ]
realm = diff [ [string] ]
nasipaddress = diff [ [string] ]
nasportid = diff [ [string] ]
nasporttype = diff [ [string] ]
acctstarttime = diff [ [string] ]
acctupdatetime = diff [ [string] ]
acctstoptime = diff [ [string] ]
acctinterval = diff [ [string] ]
acctsessiontime = diff [ [string] ]
acctauthentic = diff [ [string] ]
connectinfo_start = diff [ [string] ]
connectinfo_stop = diff [ [string] ]
acctinputoctets = diff [ [string] ]
acctoutputoctets = diff [ [string] ]
calledstationid = diff [ [string] ]
callingstationid = diff [ [string] ]
acctterminatecause = diff [ [string] ]
servicetype = diff [ [string] ]
framedprotocol = diff [ [string] ]
framedipaddress = diff [ [string] ]
acct = session . query ( table ) . filter ( Accounting . acctuniqueid == acctuniqueid ) . one_or_none ( )
if acct is None :
print ( [string] . format ( acctsessionid ) )
new_acct = table ( )
new_acct . radacctid = radacctid
new_acct . acctsessionid = acctsessionid
new_acct . acctuniqueid = acctuniqueid
new_acct . username = username
new_acct . groupname = groupname
new_acct . realm = realm
new_acct . nasipaddress = nasipaddress
new_acct . nasportid = nasportid
new_acct . nasporttype = nasporttype
new_acct . acctstarttime = acctstarttime
new_acct . acctupdatetime = acctupdatetime
new_acct . acctstoptime = acctstoptime
new_acct . acctinterval = acctinterval
new_acct . acctsessiontime = acctsessiontime
new_acct . acctauthentic = acctauthentic
new_acct . connectinfo_start = connectinfo_start
new_acct . connectinfo_stop = connectinfo_stop
new_acct . acctinputoctets = acctinputoctets
new_acct . acctoutputoctets = acctoutputoctets
new_acct . calledstationid = calledstationid
new_acct . callingstationid = callingstationid
new_acct . acctterminatecause = acctterminatecause
new_acct . servicetype = servicetype
new_acct . framedprotocol = framedprotocol
new_acct . framedipaddress = framedipaddress
session . add ( new_acct )
session . commit ( )
def edit_userinfo ( userinfo_diff , connstr , table = UserInfo , remove = False ) :
if userinfo_diff is None :
return
with sqla_session ( connstr ) as session :
for diff_user in userinfo_diff :
username = diff_user [ [string] ]
comment = diff_user [ [string] ]
reason = diff_user [ [string] ]
userinfo = session . query ( table ) . filter ( UserInfo . username == username ) . one_or_none ( )
if remove and userinfo is not None :
print ( [string] . format ( username ) )
session . delete ( userinfo )
elif userinfo is None and not remove :
print ( [string] . format ( username ) )
new_userinfo = table ( )
new_userinfo . username = username
new_userinfo . comment = comment
new_userinfo . reason = reason
session . add ( new_userinfo )
elif userinfo is not None :
if comment != userinfo . comment or reason != userinfo . reason :
print ( [string] . format ( username ) )
userinfo . comment = comment
userinfo . reason = reason
session . commit ( )
repos/SUNET/cnaas-nac/src/cnaas_nac/tools/db_cleanup.py
from typing import Any
import typing
import os
import time
from cnaas_nac . tools . log import get_logger
from cnaas_nac . db . user import User , PostAuth
from cnaas_nac . db . nas import NasPort
logger = get_logger ( )
ONE_MONTH = [number]
def db_cleanup ( ) :
if [string] in os . environ :
logger . info ( [string] )
return [string]
users = User . get ( )
pattern = [string]
logger . info ( [string] )
for user in users :
[comment]
if user [ [string] ] != [string] :
logger . info ( [string] . format ( user [ [string] ] ) )
continue
postauth = PostAuth . get_last_seen ( username = user [ [string] ] , last = True )
if len ( postauth ) > [number] :
logger . info ( [string] . format ( user [ [string] ] ) )
continue
last_seen = postauth [ [number] ] [ [string] ]
last_seen_epoch = int ( time . mktime ( time . strptime ( last_seen , pattern ) ) )
current_epoch = int ( time . time ( ) )
if current_epoch - last_seen_epoch >= ONE_MONTH :
logger . info ( [string] . format ( user [ [string] ] ) )
User . delete ( user [ [string] ] )
User . reply_delete ( user [ [string] ] )
NasPort . delete ( user [ [string] ] )
return [string]
if __name__ == [string] :
[comment]
db_cleanup ( )
repos/SUNET/cnaas-nac/src/cnaas_nac/api/auth.py
from typing import List , Any
import typing
import os
from flask import request
from flask_restplus import Resource , Namespace , fields
from flask_jwt_extended import jwt_required
from cnaas_nac . api . generic import empty_result
from cnaas_nac . tools . log import get_logger
from cnaas_nac . db . user import User , get_users , UserInfo
from cnaas_nac . db . oui import DeviceOui
from cnaas_nac . db . nas import NasPort
from cnaas_nac . version import __api_version__
logger = get_logger ( )
api = Namespace ( [string] , description = [string] , prefix = [string] . format ( __api_version__ ) )
user_add = api . model ( [string] , { [string] : fields . String ( required = True ) , [string] : fields . String ( required = True ) , [string] : fields . Integer ( required = True ) , [string] : fields . String ( required = True ) , [string] : fields . String ( required = True ) , [string] : fields . String ( required = True ) , [string] : fields . String ( required = True ) , [string] : fields . String ( required = True ) } )
user_enable = api . model ( [string] , { [string] : fields . Boolean ( required = True ) } )
def accept ( username , data = { } ) :
UserInfo . add ( username , reason = [string] )
return empty_result ( status = [string] , data = data )
def reject ( username , errstr = [string] ) :
UserInfo . add ( username , reason = errstr )
return empty_result ( status = [string] , data = errstr ) , [number]
class AuthApi ( Resource ) :
def validate ( self , json_data ) :
if [string] not in json_data :
raise ValueError ( [string] )
else :
username = json_data [ [string] ]
if [string] in json_data :
password = json_data [ [string] ]
else :
password = username
if [string] in json_data :
vlan = json_data [ [string] ]
else :
vlan = [number]
if [string] not in json_data :
nas_identifier = None
else :
nas_identifier = json_data [ [string] ]
if [string] not in json_data :
nas_port_id = None
else :
nas_port_id = json_data [ [string] ]
if [string] not in json_data :
nas_ip_address = None
else :
nas_ip_address = json_data [ [string] ]
if [string] not in json_data :
calling_station_id = None
else :
calling_station_id = json_data [ [string] ]
if [string] not in json_data :
called_station_id = None
else :
called_station_id = json_data [ [string] ]
if nas_identifier == [string] or nas_identifier is None :
nas_identifier = username
return username , password , vlan , nas_identifier , nas_port_id , calling_station_id , called_station_id , nas_identifier , nas_ip_address
[comment]
def get ( self ) :
return empty_result ( status = [string] , data = get_users ( ) )
@ api . expect ( user_add ) def post ( self ) :
errors = [ ]
json_data = request . get_json ( )
try :
username , password , vlan , nas_identifier , nas_port_id , calling_station_id , called_station_id , nas_identifier , nas_ip_address = self . validate ( json_data )
except Exception as e :
return reject ( username , str ( e ) )
for user in User . get ( username ) :
[comment]
if user [ [string] ] != username :
continue
logger . info ( [string] . format ( user [ [string] ] ) )
[comment]
nas_ports = NasPort . get ( username )
if nas_ports is not None :
[comment]
[comment]
[comment]
for port in nas_ports :
if port [ [string] ] == nas_port_id and port [ [string] ] == called_station_id :
logger . info ( [string] . format ( nas_port_id , called_station_id , username ) )
[comment]
if User . is_enabled ( username ) :
logger . info ( [string] )
return accept ( username )
else :
[comment]
[comment]
logger . info ( [string] . format ( nas_port_id , called_station_id , port [ [string] ] , port [ [string] ] ) )
return reject ( username , [string] . format ( called_station_id , nas_port_id , port [ [string] ] , port [ [string] ] ) )
[comment]
[comment]
[comment]
if [string] in os . environ :
if os . environ [ [string] ] == [string] :
if User . is_enabled ( username ) :
return accept ( username )
else :
logger . info ( [string] )
return reject ( username , [string] )
[comment]
[comment]
if User . add ( username , password ) != [string] :
logger . info ( [string] . format ( username ) )
if User . reply_add ( username , vlan ) != [string] :
logger . info ( [string] . format ( username ) )
else :
if DeviceOui . exists ( username ) :
logger . info ( [string] )
oui_vlan = DeviceOui . get_vlan ( username )
User . reply_vlan ( username , oui_vlan )
User . enable ( username )
res = NasPort . add ( username , nas_ip_address , nas_identifier , nas_port_id , calling_station_id , called_station_id )
if res != [string] :
logger . info ( res )
if User . is_enabled ( username ) :
return accept ( username )
if errors != [ ] :
logger . info ( [string] . format ( errors ) )
return reject ( username , errors )
logger . info ( [string] )
return reject ( username , [string] )
class AuthApiByName ( Resource ) :
def error ( self , errstr ) :
return empty_result ( status = [string] , data = errstr ) , [number]
[comment]
def get ( self , username ) :
return empty_result ( status = [string] , data = get_users ( username ) )
[comment]
@ api . expect ( user_enable ) def put ( self , username ) :
json_data = request . get_json ( )
result = [string]
if [string] in json_data :
if json_data [ [string] ] is True :
result = User . enable ( username )
else :
result = User . disable ( username )
if [string] in json_data :
result = User . reply_vlan ( username , json_data [ [string] ] )
if result != [string] :
return empty_result ( status = [string] , data = result ) , [number]
return empty_result ( status = [string] )
[comment]
def delete ( self , username ) :
errors = [ ]
result = User . delete ( username )
if result != [string] :
errors . append ( result )
result = User . reply_delete ( username )
if result != [string] :
errors . append ( result )
result = NasPort . delete ( username )
if result != [string] :
errors . append ( result )
result = UserInfo . delete ( username )
if result != [string] :
errors . append ( result )
if errors != [ ] :
return reject ( errors )
return empty_result ( status = [string] )
api . add_resource ( AuthApi , [string] )
api . add_resource ( AuthApiByName , [string] )
repos/SUNET/cnaas-nac/src/cnaas_nac/api/app.py
from typing import Dict , Any
import flask
import typing
import src
import os
from flask import Flask , request , jsonify
from flask_restplus import Api
from flask_jwt_extended import JWTManager , decode_token
from flask_jwt_extended . exceptions import NoAuthorizationError
from cnaas_nac . api . auth import api as auth_api
from cnaas_nac . version import __api_version__
from cnaas_nac . tools . log import get_logger
from jwt . exceptions import DecodeError , InvalidSignatureError , InvalidTokenError
logger = get_logger ( )
authorizations = { [string] : { [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] } }
class CnaasApi ( Api ) :
def handle_error ( self , e ) :
if isinstance ( e , DecodeError ) :
data = { [string] : [string] , [string] : [string] }
elif isinstance ( e , InvalidTokenError ) :
data = { [string] : [string] , [string] : [string] }
elif isinstance ( e , InvalidSignatureError ) :
data = { [string] : [string] , [string] : [string] }
elif isinstance ( e , IndexError ) :
[comment]
[comment]
data = { [string] : [string] , [string] : [string] }
elif isinstance ( e , NoAuthorizationError ) :
data = { [string] : [string] , [string] : [string] }
else :
return super ( CnaasApi , self ) . handle_error ( e )
return jsonify ( data )
app = Flask ( __name__ )
app . config [ [string] ] = os . urandom ( [number] )
app . config [ [string] ] = open ( [string] ) . read ( )
app . config [ [string] ] = [string]
app . config [ [string] ] = [string]
app . config [ [string] ] = False
jwt = JWTManager ( app )
api = CnaasApi ( app , prefix = [string] . format ( __api_version__ ) , authorizations = authorizations , security = [string] )
api . add_namespace ( auth_api )
[comment]
@ app . after_request def log_request ( response ) :
try :
token = request . headers . get ( [string] ) . split ( [string] ) [ - [number] ]
user = decode_token ( token ) . get ( [string] )
except Exception :
user = [string]
logger . info ( [string] . format ( user , request . method , response . status_code , request . url , request . json ) )
return response
repos/SUNET/cnaas-nac/src/cnaas_nac/api/generic.py
from typing import Dict , Any
import typing
import werkzeug
import builtins
from flask import request
def limit_results ( ) :
[docstring]
limit = [number]
args = request . args
if [string] in args :
try :
r_limit = int ( args [ [string] ] )
limit = max ( [number] , min ( [number] , r_limit ) )
except Exception :
pass
return limit
def build_filter ( f_class , query ) :
args = request . args
if [string] not in args :
return query
split = args [ [string] ] . split ( [string] )
if not len ( split ) == [number] :
[comment]
return query
attribute , value = split
if attribute not in f_class . __table__ . _columns . keys ( ) :
[comment]
return query
kwargs = { attribute : value }
return query . filter_by ( ** kwargs )
def empty_result ( status = [string] , data = None ) :
if status == [string] :
return { [string] : status , [string] : data }
elif status == [string] :
return { [string] : status , [string] : data if data else [string] }
repos/SUNET/cnaas-nac/src/cnaas_nac/tests/test_api.py
from typing import Dict , Any
import typing
import unittest
import cnaas_nac . api . app
class ApiTests ( unittest . TestCase ) :
def setUp ( self ) :
self . app = cnaas_nac . api . app . app
self . client = self . app . test_client ( )
def tearDown ( self ) :
pass
def test_01_add_user ( self ) :
json = { [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] }
res = self . client . post ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_02_enable_user ( self ) :
json = { [string] : True }
res = self . client . put ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_03_authenticate_user ( self ) :
json = { [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] }
res = self . client . post ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
res = self . client . get ( [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
def test_04_set_vlan ( self ) :
json = { [string] : [string] }
res = self . client . put ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_05_authenticate_user_new_vlan ( self ) :
res = self . client . get ( [string] )
self . assertEqual ( res . status_code , [number] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
def test_06_wrong_port ( self ) :
json = { [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] }
res = self . client . post ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_06_wrong_station ( self ) :
json = { [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] }
res = self . client . post ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_07_disable_user ( self ) :
json = { [string] : False }
res = self . client . put ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_08_authenticate_user ( self ) :
json = { [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] , [string] : [string] }
res = self . client . post ( [string] , json = json )
self . assertEqual ( res . status_code , [number] )
def test_09_verify_user_data ( self ) :
res = self . client . get ( [string] )
self . assertEqual ( res . status_code , [number] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
self . assertEqual ( res . json [ [string] ] [ [number] ] [ [string] ] , [string] )
def test_99_delete_user ( self ) :
res = self . client . delete ( [string] )
self . assertEqual ( res . status_code , [number] )
res = self . client . get ( [string] )
self . assertEqual ( res . status_code , [number] )
self . assertEqual ( res . json [ [string] ] , [ ] )
repos/SUNET/cnaas-nac/src/cnaas_nac/db/session.py
from typing import Any
import typing
import builtins
import os
import yaml
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy . orm import sessionmaker
def get_dbdata ( config = [string] ) :
with open ( config , [string] ) as db_file :
return yaml . safe_load ( db_file )
def get_sqlalchemy_conn_str ( ** kwargs ) :
db_data = get_dbdata ( ** kwargs )
if [string] in os . environ :
db_data [ [string] ] = os . environ [ [string] ]
if [string] in os . environ :
db_data [ [string] ] = os . environ [ [string] ]
if [string] in os . environ :
db_data [ [string] ] = os . environ [ [string] ]
if [string] in os . environ :
db_data [ [string] ] = os . environ [ [string] ]
if [string] in os . environ :
db_data [ [string] ] = os . environ [ [string] ]
return ( f"{ db_data [ [string] ] } [string] { db_data [ [string] ] } [string] { db_data [ [string] ] } [string] " f"{ db_data [ [string] ] } [string] { db_data [ [string] ] } [string] { db_data [ [string] ] }" )
@ contextmanager def sqla_session ( conn_str = [string] , ** kwargs ) :
if conn_str == [string] :
conn_str = get_sqlalchemy_conn_str ( )
engine = create_engine ( conn_str , pool_size = [number] , max_overflow = [number] )
connection = engine . connect ( )
Session = sessionmaker ( bind = engine )
session = Session ( )
try :
yield session
session . commit ( )
except Exception :
session . rollback ( )
raise
finally :
session . close ( )
|
What is the type of variable depends_on?
| 9,434
|
None
| 9,288
|
gwax/mtg_ssm
|
[
"repos/gwax/mtg_ssm/setup.py",
"repos/gwax/mtg_ssm/mtg_ssm/__init__.py",
"repos/gwax/mtg_ssm/mtg_ssm/version.py",
"repos/gwax/mtg_ssm/mtg_ssm/scryfall/models.py",
"repos/gwax/mtg_ssm/mtg_ssm/scryfall/__init__.py",
"repos/gwax/mtg_ssm/mtg_ssm/scryfall/fetcher.py",
"repos/gwax/mtg_ssm/mtg_ssm/scryfall/third_party/__init__.py",
"repos/gwax/mtg_ssm/mtg_ssm/scryfall/third_party/marshmallow_fields.py"
] |
from typing import Dict , List
import typing
[docstring]
import sys
import setuptools
if sys . version_info < ( [number] , [number] ) :
raise Exception ( [string] )
[comment]
__version__ = None
exec ( open ( [string] , [string] , encoding = [string] ) . read ( ) )
SHORT_DESCRIPTION = [string]
LONG_DESCRIPTION = open ( [string] , [string] , encoding = [string] ) . read ( )
DEPENDENCIES = [ l . strip ( ) for l in open ( [string] , [string] , encoding = [string] ) ]
TEST_DEPENDENCIES = [ l . strip ( ) for l in open ( [string] , [string] , encoding = [string] ) ]
if sys . version_info < ( [number] , [number] ) :
DEPENDENCIES . append ( [string] )
CLASSIFIERS = [ [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , [string] , ]
EXTRAS = { [string] : [string] }
setuptools . setup ( name = [string] , version = __version__ , description = SHORT_DESCRIPTION , long_description = LONG_DESCRIPTION , author = [string] , author_email = [string] , url = [string] , packages = setuptools . find_packages ( exclude = ( [string] , ) ) , package_data = { [string] : [ [string] ] } , license = [string] , platforms = [ [string] ] , keywords = [string] , classifiers = CLASSIFIERS , install_requires = DEPENDENCIES , tests_require = TEST_DEPENDENCIES , extras_require = EXTRAS , entry_points = { [string] : [ [string] ] } , )
repos/gwax/mtg_ssm/mtg_ssm/__init__.py
[docstring]
from mtg_ssm . version import __version__
repos/gwax/mtg_ssm/mtg_ssm/version.py
[docstring]
__version__ = [string]
repos/gwax/mtg_ssm/mtg_ssm/scryfall/models.py
from typing import ClassVar
import typing
import builtins
[docstring]
from dataclasses import dataclass
import datetime as dt
from decimal import Decimal
from enum import Enum
from typing import ClassVar
from typing import Dict
from typing import NewType
from typing import Optional
from typing import Sequence
from uuid import UUID
URI = NewType ( [string] , str )
class ScryObject :
[docstring]
object = [string]
class ScryColor ( str , Enum ) :
[docstring]
WHITE = [string]
BLUE = [string]
BLACK = [string]
RED = [string]
GREEN = [string]
COLORLESS = [string]
class ScrySetType ( str , Enum ) :
[docstring]
CORE = [string]
EXPANSION = [string]
MASTERS = [string]
MASTERPIECE = [string]
FROM_THE_VAULT = [string]
SPELLBOOK = [string]
PREMIUM_DECK = [string]
DUEL_DECK = [string]
DRAFT_INNOVATION = [string]
TREASURE_CHEST = [string]
COMMANDER = [string]
PLANECHASE = [string]
ARCHENEMY = [string]
VANGUARD = [string]
FUNNY = [string]
STARTER = [string]
BOX = [string]
PROMO = [string]
TOKEN = [string]
MEMORABILIA = [string]
class ScryCardLayout ( str , Enum ) :
[docstring]
NORMAL = [string]
SPLIT = [string]
FLIP = [string]
TRANSFORM = [string]
MODAL_DFC = [string]
MELD = [string]
LEVELER = [string]
SAGA = [string]
ADVENTURE = [string]
PLANAR = [string]
SCHEME = [string]
VANGUARD = [string]
TOKEN = [string]
DOUBLE_FACED_TOKEN = [string]
EMBLEM = [string]
AUGMENT = [string]
HOST = [string]
ART_SERIES = [string]
DOUBLE_SIDED = [string]
class ScryCardFrame ( str , Enum ) :
[docstring]
Y1993 = [string]
Y1997 = [string]
Y2003 = [string]
Y2015 = [string]
FUTURE = [string]
class ScryFrameEffect ( str , Enum ) :
[docstring]
NONE = [string]
LEGENDARY = [string]
MIRACLE = [string]
NYXBORN = [string]
NYXTOUCHED = [string]
DRAFT = [string]
DEVOID = [string]
TOMBSTONE = [string]
COLORSHIFTED = [string]
INVERTED = [string]
SUNMOONDFC = [string]
COMPASSLANDDFC = [string]
ORIGINPWDFC = [string]
MOONELDRAZIDFC = [string]
MOONREVERSEMOONDFC = [string]
WAXINGANDWANINGMOONDFC = [string]
SHOWCASE = [string]
EXTENDEDART = [string]
COMPANION = [string]
FULLART = [string]
class ScryBorderColor ( str , Enum ) :
[docstring]
BLACK = [string]
BORDERLESS = [string]
GOLD = [string]
SILVER = [string]
WHITE = [string]
class ScryGame ( str , Enum ) :
[docstring]
PAPER = [string]
ARENA = [string]
MTGO = [string]
SEGA = [string]
ASTRAL = [string]
class ScryRarity ( str , Enum ) :
[docstring]
COMMON = [string]
UNCOMMON = [string]
RARE = [string]
MYTHIC = [string]
class ScryFormat ( str , Enum ) :
[docstring]
BRAWL = [string]
COMMANDER = [string]
DUEL = [string]
FRONTIER = [string]
FUTURE = [string]
LEGACY = [string]
MODERN = [string]
OLDSCHOOL = [string]
PAUPER = [string]
PENNY = [string]
STANDARD = [string]
VINTAGE = [string]
HISTORIC = [string]
PIONEER = [string]
class ScryLegality ( str , Enum ) :
[docstring]
LEGAL = [string]
NOT_LEGAL = [string]
RESTRICTED = [string]
BANNED = [string]
@ dataclass ( frozen = True ) class ScryObjectList ( ScryObject ) :
[docstring]
object = [string]
data = ...
has_more = ...
next_page = ...
total_cards = ...
warnings = ...
@ dataclass ( frozen = True ) class ScrySet ( ScryObject ) :
[docstring]
object = [string]
id = ...
code = ...
mtgo_code = ...
arena_code = ...
tcgplayer_id = ...
name = ...
set_type = ...
released_at = ...
block_code = ...
block = ...
parent_set_code = ...
card_count = ...
digital = ...
foil_only = ...
nonfoil_only = ...
icon_svg_uri = ...
search_uri = ...
scryfall_uri = ...
uri = ...
@ dataclass ( frozen = True ) class ScryRelatedCard ( ScryObject ) :
[docstring]
object = [string]
id = ...
component = ...
name = ...
type_line = ...
uri = ...
@ dataclass ( frozen = True ) class ScryCardFace ( ScryObject ) :
[docstring]
object = [string]
artist = ...
artist_id = ...
color_indicator = ...
colors = ...
flavor_text = ...
illustration_id = ...
image_uris = ...
loyalty = ...
mana_cost = ...
name = ...
oracle_text = ...
power = ...
printed_name = ...
printed_text = ...
printed_type_line = ...
toughness = ...
type_line = ...
watermark = ...
@ dataclass ( frozen = True ) class CardPreviewBlock :
[docstring]
source = ...
source_uri = ... [comment]
previewed_at = ...
@ dataclass ( frozen = True ) class ScryCard ( ScryObject ) :
[docstring]
object = [string]
[comment]
arena_id = ...
id = ...
lang = ...
mtgo_id = ...
mtgo_foil_id = ...
multiverse_ids = ...
tcgplayer_id = ...
oracle_id = ...
prints_search_uri = ...
rulings_uri = ...
scryfall_uri = ...
uri = ...
[comment]
all_parts = ...
card_faces = ...
cmc = ...
colors = ...
color_identity = ...
color_indicator = ...
edhrec_rank = ...
foil = ...
hand_modifier = ...
keywords = ...
layout = ...
legalities = ...
life_modifier = ...
loyalty = ...
mana_cost = ...
name = ...
nonfoil = ...
oracle_text = ...
oversized = ...
power = ...
produced_mana = ...
reserved = ...
toughness = ...
type_line = ...
[comment]
artist = ...
artist_ids = ...
booster = ...
border_color = ...
card_back_id = ...
collector_number = ...
content_warning = ...
digital = ...
flavor_name = ...
flavor_text = ...
frame_effect = ...
frame_effects = ...
frame = ...
full_art = ...
games = ...
highres_image = ...
illustration_id = ...
image_uris = ...
prices = ... [comment]
printed_name = ...
printed_text = ...
printed_type_line = ...
promo = ...
promo_types = ...
purchase_uris = ...
rarity = ...
related_uris = ...
released_at = ...
reprint = ...
scryfall_set_uri = ...
set_name = ...
set_search_uri = ...
set_type = ...
set_uri = ...
set = ...
story_spotlight = ...
textless = ...
variation = ...
variation_of = ...
watermark = ...
preview = ...
@ dataclass ( frozen = True ) class ScryBulkData ( ScryObject ) :
[docstring]
object = [string]
id = ...
uri = ...
type = ...
name = ...
description = ...
download_uri = ...
updated_at = ...
compressed_size = ...
content_type = ...
content_encoding = ...
repos/gwax/mtg_ssm/mtg_ssm/scryfall/__init__.py
[docstring]
repos/gwax/mtg_ssm/mtg_ssm/scryfall/fetcher.py
from typing import Iterator , Literal , Mapping , List , Any , Union
import requests
import uuid
import mtg_ssm
import builtins
import typing
import typing_extensions
[docstring]
from concurrent . futures import ProcessPoolExecutor
import gzip
import json
import os
import pickle
from typing import Any
from typing import List
from typing import Mapping
from typing import Union
from typing import cast
import uuid
import appdirs
import requests
from mtg_ssm . containers . bundles import ScryfallDataSet
from mtg_ssm . scryfall import schema
from mtg_ssm . scryfall . models import ScryBulkData
from mtg_ssm . scryfall . models import ScryCard
from mtg_ssm . scryfall . models import ScryObject
from mtg_ssm . scryfall . models import ScryObjectList
from mtg_ssm . scryfall . models import ScrySet
DEBUG = os . getenv ( [string] , [string] )
APP_AUTHOR = [string]
APP_NAME = [string]
CACHE_DIR = appdirs . user_cache_dir ( APP_NAME , APP_AUTHOR )
BULK_DATA_ENDPOINT = [string]
SETS_ENDPOINT = [string]
BULK_TYPE = [string]
OBJECT_CACHE_URL = [string]
CHUNK_SIZE = [number] * [number] * [number]
DESERIALIZE_BATCH_SIZE = [number]
_OBJECT_SCHEMA = schema . ScryfallUberSchema ( )
JSON = Union [ str , int , float , bool , None , Mapping [ str , Any ] , List [ Any ] ]
def _cache_path ( endpoint ) :
cache_id = uuid . uuid5 ( uuid . NAMESPACE_URL , endpoint )
return os . path . join ( CACHE_DIR , str ( cache_id ) )
def _fetch_endpoint ( endpoint , * , dirty , write_cache = True ) :
print ( f" [string] { endpoint }" )
os . makedirs ( CACHE_DIR , exist_ok = True )
cache_path = _cache_path ( endpoint )
if not os . path . exists ( cache_path ) :
dirty = True
if dirty :
print ( f" [string] { endpoint }" )
response = requests . get ( endpoint , stream = True )
response . raise_for_status ( )
if not write_cache :
return response . json ( )
print ( f" [string] { endpoint }" )
with gzip . open ( cache_path , [string] ) as cache_file :
for chunk in response . iter_content ( chunk_size = CHUNK_SIZE ) :
cache_file . write ( chunk )
else :
print ( [string] )
with gzip . open ( cache_path , [string] , encoding = [string] ) as cache_file :
return json . load ( cache_file )
def _deserialize_object ( obj_json ) :
return _OBJECT_SCHEMA . load ( obj_json ) . data
def _deserialize_cards ( card_jsons ) :
cards_data = ...
if DEBUG == [string] :
print ( [string] )
cards_data = [ ]
for card_json in card_jsons :
try :
cards_data . append ( cast ( ScryCard , _deserialize_object ( card_json ) ) )
except Exception :
print ( [string] , repr ( card_json ) )
raise
else :
with ProcessPoolExecutor ( ) as executor :
cards_futures = executor . map ( _deserialize_object , card_jsons , chunksize = DESERIALIZE_BATCH_SIZE )
cards_data = cast ( List [ ScryCard ] , list ( cards_futures ) )
return cards_data
def scryfetch ( ) :
[docstring]
cached_bulk_json = None
if os . path . exists ( _cache_path ( BULK_DATA_ENDPOINT ) ) :
cached_bulk_json = _fetch_endpoint ( BULK_DATA_ENDPOINT , dirty = False )
bulk_json = _fetch_endpoint ( BULK_DATA_ENDPOINT , dirty = True , write_cache = False )
cache_dirty = bulk_json != cached_bulk_json
bulk_list = cast ( ScryObjectList , _deserialize_object ( bulk_json ) )
sets_list = cast ( ScryObjectList , _deserialize_object ( _fetch_endpoint ( SETS_ENDPOINT , dirty = cache_dirty ) ) , )
sets_data = cast ( List [ ScrySet ] , sets_list . data )
while sets_list . has_more :
sets_list = cast ( ScryObjectList , _deserialize_object ( _fetch_endpoint ( str ( sets_list . next_page ) , dirty = cache_dirty ) ) , )
sets_data += cast ( List [ ScrySet ] , sets_list . data )
bulk_data = cast ( List [ ScryBulkData ] , bulk_list . data )
[ cards_endpoint ] = [ bd . download_uri for bd in bulk_data if bd . type == BULK_TYPE ]
cards_json = cast ( List [ JSON ] , _fetch_endpoint ( cards_endpoint , dirty = cache_dirty ) )
_fetch_endpoint ( BULK_DATA_ENDPOINT , dirty = cache_dirty , write_cache = True )
object_cache_path = _cache_path ( OBJECT_CACHE_URL )
if os . path . exists ( object_cache_path ) :
if cache_dirty or DEBUG == [string] :
os . remove ( object_cache_path )
else :
try :
with gzip . open ( object_cache_path , [string] ) as object_cache :
loaded_data = pickle . load ( object_cache )
if isinstance ( loaded_data , ScryfallDataSet ) :
return loaded_data
except ( OSError , pickle . UnpicklingError ) :
pass
print ( [string] )
print ( [string] )
cards_data = _deserialize_cards ( cards_json )
scryfall_data = ScryfallDataSet ( sets = sets_data , cards = cards_data )
with gzip . open ( object_cache_path , [string] ) as object_cache :
pickle . dump ( scryfall_data , object_cache )
return scryfall_data
repos/gwax/mtg_ssm/mtg_ssm/scryfall/third_party/__init__.py
[docstring]
repos/gwax/mtg_ssm/mtg_ssm/scryfall/third_party/marshmallow_fields.py
from typing import List , Dict , Any
import typing
[docstring]
[comment]
import collections
import datetime as dt
import decimal
import numbers
import uuid
import warnings
from marshmallow import class_registry
from marshmallow import utils
from marshmallow import validate
from marshmallow . base import FieldABC
from marshmallow . base import SchemaABC
from marshmallow . compat import basestring
from marshmallow . compat import text_type
from marshmallow . exceptions import ValidationError
from marshmallow . fields import Field
from marshmallow . utils import missing as missing_
from marshmallow . validate import Validator
class Dict ( Field ) :
[docstring]
default_error_messages = { [string] : [string] }
def __init__ ( self , values = None , keys = None , ** kwargs ) :
super ( Dict , self ) . __init__ ( ** kwargs )
if values is None :
self . value_container = None
elif isinstance ( values , type ) :
if not issubclass ( values , FieldABC ) :
raise ValueError ( [string] [string] )
self . value_container = values ( )
else :
if not isinstance ( values , FieldABC ) :
raise ValueError ( [string] [string] )
self . value_container = values
if keys is None :
self . key_container = None
elif isinstance ( keys , type ) :
if not issubclass ( keys , FieldABC ) :
raise ValueError ( [string] [string] )
self . key_container = keys ( )
else :
if not isinstance ( keys , FieldABC ) :
raise ValueError ( [string] [string] )
self . key_container = keys
def _serialize ( self , value , attr , obj ) :
if value is None :
return None
if not self . value_container and not self . key_container :
return value
if isinstance ( value , collections . Mapping ) :
values = value . values ( )
if self . value_container :
values = [ self . value_container . _serialize ( item , attr , obj ) for item in values ]
keys = value . keys ( )
if self . key_container :
keys = [ self . key_container . _serialize ( key , attr , obj ) for key in keys ]
return dict ( zip ( keys , values ) )
self . fail ( [string] )
def _deserialize ( self , value , attr , data ) :
if not isinstance ( value , collections . Mapping ) :
self . fail ( [string] )
if not self . value_container and not self . key_container :
return value
errors = { }
values = list ( value . values ( ) )
keys = list ( value . keys ( ) )
if self . key_container :
for idx , key in enumerate ( keys ) :
try :
keys [ idx ] = self . key_container . deserialize ( key )
except ValidationError as e :
errors [ key ] = [ [string] . format ( message ) for message in e . messages ]
if self . value_container :
for idx , item in enumerate ( values ) :
try :
values [ idx ] = self . value_container . deserialize ( item )
except ValidationError as e :
values [ idx ] = e . data
key = keys [ idx ]
if key not in errors :
errors [ key ] = [ ]
errors [ key ] . extend ( [ [string] . format ( message ) for message in e . messages ] )
result = dict ( zip ( keys , values ) )
if errors :
raise ValidationError ( errors , data = result )
return result
|
What is the type of variable games?
| 4,738
|
typing.Sequence[ScryGame]
| 4,701
|
kelsos/test-environment-scripts
|
[
"repos/kelsos/test-environment-scripts/setup.py",
"repos/kelsos/test-environment-scripts/scripts/start_private_chain.py",
"repos/kelsos/test-environment-scripts/scripts/open_channels.py",
"repos/kelsos/test-environment-scripts/scripts/transfers.py",
"repos/kelsos/test-environment-scripts/scripts/check_direct_connections.py",
"repos/kelsos/test-environment-scripts/scripts/deploy_testnet.py",
"repos/kelsos/test-environment-scripts/raiden_api/api.py",
"repos/kelsos/test-environment-scripts/raiden_api/model/data.py",
"repos/kelsos/test-environment-scripts/raiden_api/model/exceptions.py",
"repos/kelsos/test-environment-scripts/raiden_api/model/requests.py",
"repos/kelsos/test-environment-scripts/raiden_api/model/responses.py"
] |
[comment]
from setuptools import find_packages , setup
with open ( [string] , encoding = [string] ) as readme_file :
readme = readme_file . read ( )
history = [string]
version = [string] [comment]
setup ( name = [string] , description = [string] , version = version , long_description = readme , long_description_content_type = [string] , author = [string] , author_email = [string] , url = [string] , packages = find_packages ( ) , include_package_data = True , license = [string] , zip_safe = False , keywords = [string] , classifiers = [ [string] , [string] , [string] , [string] , [string] , [string] , ] , use_scm_version = True , setup_requires = [ [string] ] , python_requires = [string] , )
repos/kelsos/test-environment-scripts/scripts/start_private_chain.py
[comment]
from typing import Any , List
import threading
import scripts
import builtins
import typing
import web3
import signal
import subprocess
import sys
import threading
import time
from typing import List
import click
from web3 import HTTPProvider , Web3
WEI_TO_ETH = [number] ** [number]
class ServiceExit ( Exception ) :
pass
class MineJob ( threading . Thread ) :
def __init__ ( self , web3 , sender , receiver , block_time ) :
threading . Thread . __init__ ( self )
self . terminate = threading . Event ( )
self . web3 = web3
self . sender = sender
self . receiver = receiver
self . block_time = block_time
def run ( self ) :
while not self . terminate . is_set ( ) :
time . sleep ( self . block_time )
try :
send ( self . web3 , self . sender , self . receiver , [number] )
except ConnectionError as err :
print ( f' [string] { err }' )
def start_parity ( ) :
cmd = [string]
return subprocess . Popen ( cmd , shell = True , stdout = subprocess . PIPE )
def service_shutdown ( signum , frame ) :
print ( f' [string] { signum }' )
raise ServiceExit
def send ( web3 , sender , receiver , wei_value ) :
unlock_account ( sender , web3 )
transaction = web3 . eth . sendTransaction ( { [string] : receiver , [string] : sender , [string] : wei_value } )
print ( transaction )
print ( f'{ receiver } [string] { web3 . eth . getBalance ( receiver ) }' )
def unlock_account ( address , web3 ) :
if not web3 . personal . unlockAccount ( address , [string] ) :
print ( [string] )
sys . exit ( [number] )
@ click . command ( ) @ click . option ( [string] , default = [string] ) @ click . option ( [string] , envvar = [string] , required = True ) @ click . option ( [string] , type = int , default = [number] ) def main ( rpc_url , accounts , block_time ) :
testing_accounts = accounts . split ( [string] )
web3 = Web3 ( HTTPProvider ( rpc_url ) )
sender = web3 . toChecksumAddress ( [string] )
eth = [number] * WEI_TO_ETH
try :
chain = start_parity ( )
print ( f' [string] { chain . pid }' )
time . sleep ( [number] )
fund_accounts ( web3 , sender , testing_accounts , eth )
job = MineJob ( web3 , sender , testing_accounts [ [number] ] , block_time )
job . start ( )
while True :
time . sleep ( [number] )
except ServiceExit :
job . terminate . set ( )
job . join ( )
chain . terminate ( )
def fund_accounts ( web3 , sender , testing_accounts , eth ) :
print ( f' [string] { testing_accounts }' )
for receiver in testing_accounts :
balance = web3 . eth . getBalance ( receiver )
if balance > [number] :
continue
send ( web3 , sender , receiver , eth )
if __name__ == [string] :
signal . signal ( signal . SIGTERM , service_shutdown )
signal . signal ( signal . SIGINT , service_shutdown )
main ( ) [comment]
repos/kelsos/test-environment-scripts/scripts/open_channels.py
[comment]
from typing import List , Dict , Any , Iterator , Optional
import scripts
import builtins
import typing
import raiden_api
import io
import threading
import typing
from typing import List
import click
import yaml
from raiden_api . api import Api
from raiden_api . model . exceptions import HttpErrorException
from raiden_api . model . requests import OpenChannelRequest , ManageChannelRequest
class NodeConfig :
def __init__ ( self , address , port , funds , targets = None , ) :
self . address = address
self . port = port
self . funds = funds
self . targets = targets
@ classmethod def from_dict ( cls , data ) :
targets = None
if [string] in data :
targets = data [ [string] ]
response = cls ( address = str ( data [ [string] ] ) , port = int ( data [ [string] ] ) , funds = int ( data [ [string] ] ) , targets = targets , )
return response
class OpenJob ( threading . Thread ) :
def __init__ ( self , api , node , token_address ) :
threading . Thread . __init__ ( self )
self . __api = api
self . __node = node
self . __token_address = token_address
def run ( self ) :
address_response = self . __api . address ( )
address = address_response . our_address
if self . __node . targets is None :
print ( f' [string] { address } [string] ' )
return
for partner_address in self . __node . targets :
try :
request = OpenChannelRequest ( partner_address , self . __token_address , self . __node . funds , )
response = self . __api . open_channel ( request )
print ( f' [string] { address } [string] { response . partner_address }' )
except HttpErrorException as e :
print ( f' [string] { address } [string] { partner_address } [string] { str ( e ) } [string] ' )
@ click . command ( ) @ click . option ( [string] , required = True , type = str ) @ click . option ( [string] , required = True , type = click . Path ( exists = True , dir_okay = False ) ) def main ( token , config ) :
configuration_file = open ( config , [string] )
configuration = yaml . load ( configuration_file )
nodes_ = configuration [ [string] ]
if not nodes_ :
print ( [string] )
exit ( [number] )
token_address = token
nodes = list ( map ( lambda x : NodeConfig . from_dict ( x ) , nodes_ ) )
apis = list ( map ( lambda x : Api ( x . port ) , nodes ) )
jobs = [ ]
number_of_nodes = len ( nodes )
for index in range ( number_of_nodes ) :
job = OpenJob ( apis [ index ] , nodes [ index ] , token_address )
jobs . append ( job )
job . start ( )
for job in jobs :
job . join ( )
for index in range ( number_of_nodes ) :
node = nodes [ index ]
api = apis [ index ]
funds = node . funds
all_channels = filter ( lambda x : x . total_deposit == [number] , api . channels ( ) )
without_deposit = list ( map ( lambda x : x . partner_address , all_channels ) )
print ( f' [string] { len ( without_deposit ) } [string] { node . port }' )
for no_funds_partner_address in without_deposit :
try :
deposit = ManageChannelRequest ( total_deposit = funds )
channel = api . manage_channel ( deposit , token_address , no_funds_partner_address )
print ( f' [string] { channel . total_deposit } [string] { channel . partner_address }' )
except HttpErrorException as e :
print ( f' [string] { no_funds_partner_address } [string] { str ( e ) }' )
if __name__ == [string] :
main ( )
repos/kelsos/test-environment-scripts/scripts/transfers.py
[comment]
from typing import Any , Iterator
import threading
import scripts
import builtins
import typing
import raiden_api
import io
import signal
import threading
import time
import click
import tqdm
import yaml
from raiden_api . api import Api
from raiden_api . model . exceptions import HttpErrorException
from raiden_api . model . requests import PaymentRequest
class ServiceExit ( Exception ) :
pass
class TransferJob ( threading . Thread ) :
def __init__ ( self , port , receiver , position , total , single , errors_allowed , token , timeout , ) :
[docstring]
threading . Thread . __init__ ( self )
self . terminate = threading . Event ( )
self . __receiver = receiver
self . __position = position
self . __total = total
self . __single = single
self . __errors_allowed = errors_allowed
self . __token = token
self . __api = Api ( port , timeout )
def transfer ( self ) :
address_response = self . __api . address ( )
sender = address_response . our_address
pending_amount = self . __total
print ( f'{ pending_amount } [string] { self . __single } [string] { sender } [string] { self . __receiver }' )
time . sleep ( [number] )
trange = tqdm . trange ( int ( self . __total / self . __single ) , desc = f" [string] { self . __position } [string] { sender [ [number] : [number] ] . lower ( ) } [string] { self . __receiver [ [number] : [number] ] . lower ( ) }" , position = self . __position , )
total_time = [number]
performed_iterations = [number]
errors = [number]
while pending_amount > [number] and not self . terminate . is_set ( ) :
secs = [number] + ( [number] * errors )
try :
start_time = time . time ( )
identifier = int ( start_time )
payment_request = PaymentRequest ( amount = self . __single , identifier = identifier , )
payment_response = self . __api . payment ( self . __receiver , payment_request , self . __token , )
duration = time . time ( ) - start_time
response_identifier = payment_response . identifier
if response_identifier != identifier :
message = f' [string] ' f' [string] { identifier } [string] { response_identifier }'
raise Exception ( message )
pending_amount -= self . __single
total_time += duration
performed_iterations += [number]
trange . update ( )
except Exception as e :
print ( f' [string] { sender } [string] { secs } [string] { errors } [string] { e }' )
time . sleep ( secs )
errors += [number]
if isinstance ( e , HttpErrorException ) :
print ( f'{ e . code } [string] { str ( e ) }' )
if errors > self . __errors_allowed :
break
errors = [number]
throughput = [number]
if performed_iterations > [number] :
throughput = total_time / performed_iterations
print ( f' [string] { sender } [string] ' f' [string] { performed_iterations } [string] { total_time } [string] { throughput } [string] ' )
def run ( self ) :
self . transfer ( )
@ click . command ( ) @ click . option ( [string] , type = int , default = [number] ) @ click . option ( [string] , type = int , default = [number] ) @ click . option ( [string] , type = int , default = [number] ) @ click . option ( [string] , type = str , required = True ) @ click . option ( [string] , required = True , type = click . Path ( exists = True , dir_okay = False ) ) @ click . option ( [string] , type = int , default = [number] ) def main ( transfer_amount , per_transfer , allowed_errors , token , config , timeout , ) :
configuration_file = open ( config , [string] )
configuration = yaml . load ( configuration_file )
nodes_ = configuration [ [string] ]
if not nodes_ :
print ( [string] )
exit ( [number] )
jobs = [ ]
for position in range ( [number] , len ( nodes_ ) ) :
node = nodes_ [ position ]
port = node [ [string] ]
receiver = node [ [string] ]
job = TransferJob ( port , receiver , position , transfer_amount , per_transfer , allowed_errors , token , timeout , )
jobs . append ( job )
try :
for j in jobs :
j . start ( )
while True :
time . sleep ( [number] )
alive = filter ( lambda j : j . is_alive ( ) , jobs )
if len ( list ( alive ) ) == [number] :
break
except ServiceExit :
for j in jobs :
j . terminate . set ( )
j . join ( )
print ( [string] )
exit ( [number] )
print ( [string] )
def shutdown_handler ( _signo , _stackframe ) :
raise ServiceExit
if __name__ == [string] :
signal . signal ( signal . SIGTERM , shutdown_handler )
signal . signal ( signal . SIGINT , shutdown_handler )
main ( )
repos/kelsos/test-environment-scripts/scripts/check_direct_connections.py
[comment]
from typing import List
import typing
import builtins
import raiden_api
import time
import click
import requests
from raiden_api . api import Api
from raiden_api . model . requests import PaymentRequest
@ click . command ( ) @ click . option ( [string] , required = True , type = str ) @ click . option ( [string] , required = True , type = int ) @ click . option ( [string] , required = True , type = int ) def main ( token , start_port , end_port ) :
for port in range ( start_port , end_port ) :
api = Api ( port )
address_response = api . address ( )
our_address = address_response . our_address
channels = api . channels ( )
for channel in channels :
partner_address = channel . partner_address
try :
payment_request = PaymentRequest ( amount = [number] , identifier = int ( time . time ( ) ) )
payment_response = api . payment ( receiver = partner_address , request = payment_request , token = token , )
print ( f'{ our_address } [string] { partner_address } [string] { payment_response . identifier } [string] ' )
except requests . exceptions . RequestException :
print ( f'{ our_address } [string] { partner_address } [string] ' )
pass
if __name__ == [string] :
main ( )
repos/kelsos/test-environment-scripts/scripts/deploy_testnet.py
[comment]
from typing import Any , Dict
import builtins
import typing
import json
import click
from eth_account import Account
from raiden_contracts . constants import CONTRACT_TOKEN_NETWORK_REGISTRY
from raiden_contracts . contract_manager import contract_version_string
from raiden_contracts . deploy . __main__ import ( ContractDeployer , deploy_raiden_contracts , deploy_token_contract , register_token_network , )
from web3 import HTTPProvider , Web3
from web3 . middleware import geth_poa_middleware
TOKEN_DECIMALS = [number]
TOKEN_SUPPLY = [number]
GAS_LIMIT = [number]
GAS_PRICE = [number]
UNLIMITED = [number]
@ click . command ( ) @ click . option ( [string] , required = True , type = click . Path ( exists = True , dir_okay = False ) ) @ click . password_option ( [string] , envvar = [string] , confirmation_prompt = False , ) @ click . option ( [string] , default = [string] ) @ click . option ( [string] , is_flag = True ) def main ( keystore_file , password , rpc_url , development ) :
web3 = Web3 ( HTTPProvider ( rpc_url , request_kwargs = { [string] : [number] } ) )
web3 . middleware_stack . inject ( geth_poa_middleware , layer = [number] )
contract_version = [string]
channel_participant_deposit_limit = None
token_network_deposit_limit = None
max_num_of_networks = None
if development :
contract_version = None
channel_participant_deposit_limit = UNLIMITED
token_network_deposit_limit = UNLIMITED
max_num_of_networks = [number]
with open ( keystore_file , [string] ) as keystore :
encrypted_key = keystore . read ( )
private_key = web3 . eth . account . decrypt ( encrypted_key , password )
account = Account . privateKeyToAccount ( private_key )
if private_key is None :
print ( [string] )
exit ( [number] )
owner = account . address
if web3 . eth . getBalance ( owner ) == [number] :
print ( [string] )
exit ( [number] )
deployer = ContractDeployer ( web3 = web3 , private_key = private_key , gas_limit = GAS_LIMIT , gas_price = GAS_PRICE , wait = [number] , contracts_version = contract_version , )
print ( [string] )
deployed_contract_info = deploy_raiden_contracts ( deployer = deployer , max_num_of_token_networks = max_num_of_networks , )
deployed_contracts = { contract_name : info [ [string] ] for contract_name , info in deployed_contract_info [ [string] ] . items ( ) }
print ( [string] )
tokens = TOKEN_SUPPLY * ( [number] ** TOKEN_DECIMALS )
deployed_token = deploy_token_contract ( deployer , tokens , TOKEN_DECIMALS , [string] , [string] )
abi = deployer . contract_manager . get_contract_abi ( CONTRACT_TOKEN_NETWORK_REGISTRY )
token_address = deployed_token [ [string] ]
expected_version = contract_version_string ( deployer . contract_version_string ( ) )
print ( [string] )
register_token_network ( web3 = web3 , caller = deployer . owner , token_registry_abi = abi , token_registry_address = deployed_contracts [ CONTRACT_TOKEN_NETWORK_REGISTRY ] , token_address = token_address , token_registry_version = expected_version , channel_participant_deposit_limit = channel_participant_deposit_limit , token_network_deposit_limit = token_network_deposit_limit , contracts_version = deployer . contract_version_string ( ) , )
print ( f' [string] { token_address }' )
print ( json . dumps ( deployed_contracts , indent = [number] ) )
print ( [string] )
if __name__ == [string] :
main ( )
repos/kelsos/test-environment-scripts/raiden_api/api.py
from typing import Any , List , Dict
import raiden_api
import requests
import builtins
import typing
from typing import List
import requests
from raiden_api . model . data import Channel
from raiden_api . model . exceptions import HttpErrorException
from raiden_api . model . requests import ManageChannelRequest , OpenChannelRequest , PaymentRequest
from raiden_api . model . responses import AddressResponse , PaymentResponse
def get_errors ( json ) :
result = [string]
if [string] in json :
result = json [ [string] ]
return result
class Api :
__headers = { [string] : [string] }
def __init__ ( self , port , timeout = [number] ) :
self . port = port
self . timeout = timeout
self . __api_base = f' [string] { self . port } [string] '
def address ( self ) :
url = f'{ self . __api_base } [string] '
response = requests . get ( url , headers = self . __headers , timeout = self . timeout , )
json = response . json ( )
if response . status_code != [number] :
raise HttpErrorException ( response . status_code , get_errors ( json ) )
return AddressResponse . from_dict ( json )
def payment ( self , receiver , request , token ) :
url = f'{ self . __api_base } [string] { token } [string] { receiver }'
response = requests . post ( url , headers = self . __headers , json = request . to_dict ( ) , timeout = self . timeout , )
json = response . json ( )
if response . status_code != [number] :
raise HttpErrorException ( response . status_code , get_errors ( json ) )
return PaymentResponse . from_dict ( json )
def channels ( self ) :
url = f'{ self . __api_base } [string] '
response = requests . get ( url , headers = self . __headers , timeout = self . timeout , )
json = response . json ( )
if response . status_code != [number] :
raise HttpErrorException ( response . status_code , get_errors ( json ) )
channels = [ ]
for channel in json :
channels . append ( Channel . from_dict ( channel ) )
return channels
def open_channel ( self , request ) :
url = f'{ self . __api_base } [string] '
response = requests . put ( url , headers = self . __headers , json = request . to_dict ( ) , timeout = self . timeout , )
json = response . json ( )
if response . status_code != [number] :
raise HttpErrorException ( response . status_code , get_errors ( json ) )
return Channel . from_dict ( json )
def manage_channel ( self , request , token_address , partner_address , ) :
url = f'{ self . __api_base } [string] { token_address } [string] { partner_address }'
response = requests . patch ( url , headers = self . __headers , json = request . to_dict ( ) , timeout = self . timeout , )
json = response . json ( )
if response . status_code != [number] :
raise HttpErrorException ( response . status_code , get_errors ( json ) )
return Channel . from_dict ( json )
repos/kelsos/test-environment-scripts/raiden_api/model/data.py
from typing import Any , Dict
import raiden_api
import builtins
import typing
import typing
class Channel :
def __init__ ( self , token_network_identifier , channel_identifier , partner_address , token_address , balance , total_deposit , state , settle_timeout , reveal_timeout , ) :
self . token_network_identifier = token_network_identifier
self . channel_identifier = channel_identifier
self . partner_address = partner_address
self . token_address = token_address
self . balance = balance
self . total_deposit = total_deposit
self . state = state
self . settle_timeout = settle_timeout
self . reveal_timeout = reveal_timeout
@ classmethod def from_dict ( cls , data ) :
response = cls ( token_network_identifier = str ( data [ [string] ] ) , channel_identifier = int ( data [ [string] ] ) , partner_address = str ( data [ [string] ] ) , token_address = str ( data [ [string] ] ) , balance = int ( data [ [string] ] ) , total_deposit = int ( data [ [string] ] ) , state = str ( data [ [string] ] ) , settle_timeout = int ( data [ [string] ] ) , reveal_timeout = int ( data [ [string] ] ) , )
return response
repos/kelsos/test-environment-scripts/raiden_api/model/exceptions.py
import builtins
class HttpErrorException ( Exception ) :
def __init__ ( self , code , message ) :
self . code = code
self . message = message
repos/kelsos/test-environment-scripts/raiden_api/model/requests.py
from typing import Any , Union , Dict
import builtins
import typing
import time
import typing
class PaymentRequest :
def __init__ ( self , amount , identifier = None ) :
self . amount = amount
self . identifier = identifier
if identifier is None :
self . identifier = int ( time . time ( ) )
def to_dict ( self ) :
result = { [string] : self . amount , [string] : self . identifier , }
return result
class OpenChannelRequest :
def __init__ ( self , partner_address , token_address , total_deposit , settle_timeout = [number] , ) :
self . partner_address = partner_address
self . token_address = token_address
self . total_deposit = total_deposit
self . settle_timeout = settle_timeout
def to_dict ( self ) :
result = { [string] : self . partner_address , [string] : self . token_address , [string] : self . total_deposit , [string] : self . settle_timeout , }
return result
class ManageChannelRequest :
def __init__ ( self , total_deposit = None , state = None ) :
assert state is None or state == [string]
self . total_deposit = total_deposit
self . state = state
def to_dict ( self ) :
result = { }
if self . total_deposit :
result [ [string] ] = self . total_deposit
if self . state :
result [ [string] ] = self . state
return result
repos/kelsos/test-environment-scripts/raiden_api/model/responses.py
from typing import Any , Dict
import raiden_api
import builtins
import typing
import typing
class AddressResponse :
def __init__ ( self , our_address ) :
self . our_address = our_address
@ classmethod def from_dict ( cls , data ) :
return cls ( our_address = str ( data [ [string] ] ) )
class PaymentResponse :
def __init__ ( self , initiator_address , target_address , token_address , amount , identifier , ) :
self . initiator_address = initiator_address
self . target_address = target_address
self . token_address = token_address
self . amount = amount
self . identifier = identifier
@ classmethod def from_dict ( cls , data ) :
response = cls ( initiator_address = str ( data [ [string] ] ) , target_address = str ( data [ [string] ] ) , token_address = str ( data [ [string] ] ) , amount = int ( data [ [string] ] ) , identifier = int ( data [ [string] ] ) , )
return response
|
What is the type of variable payment_response?
| 6,140
|
raiden_api.model.responses.PaymentResponse
| 6,018
|
B2W-BIT/asgard-api
|
[
"repos/B2W-BIT/asgard-api/hollowman/plugins/__init__.py",
"repos/B2W-BIT/asgard-api/asgard/app.py",
"repos/B2W-BIT/asgard-api/asgard/conf.py",
"repos/B2W-BIT/asgard-api/asgard/math/__init__.py",
"repos/B2W-BIT/asgard-api/asgard/exceptions/__init__.py",
"repos/B2W-BIT/asgard-api/asgard/workers/converters/converter_interface.py",
"repos/B2W-BIT/asgard-api/asgard/workers/converters/asgard_converter.py",
"repos/B2W-BIT/asgard-api/asgard/workers/models/decision.py",
"repos/B2W-BIT/asgard-api/asgard/workers/models/app_stats.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/__main__.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/decision_component_interface.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/asgard_cloudinterface.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/app.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/simple_decision_component.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/periodicstatechecker.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/decision_events.py",
"repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/cloudinterface.py",
"repos/B2W-BIT/asgard-api/asgard/backends/users.py",
"repos/B2W-BIT/asgard-api/asgard/backends/accounts.py",
"repos/B2W-BIT/asgard-api/asgard/backends/__init__.py",
"repos/B2W-BIT/asgard-api/asgard/backends/base.py",
"repos/B2W-BIT/asgard-api/asgard/backends/jobs.py",
"repos/B2W-BIT/asgard-api/asgard/backends/mesos/impl.py",
"repos/B2W-BIT/asgard-api/asgard/backends/mesos/__init__.py",
"repos/B2W-BIT/asgard-api/asgard/backends/mesos/models/agent.py",
"repos/B2W-BIT/asgard-api/asgard/backends/mesos/models/app.py",
"repos/B2W-BIT/asgard-api/asgard/backends/mesos/models/task.py",
"repos/B2W-BIT/asgard-api/asgard/backends/models/converters.py",
"repos/B2W-BIT/asgard-api/asgard/clients/mesos/client.py",
"repos/B2W-BIT/asgard-api/asgard/clients/mesos/models/spec.py",
"repos/B2W-BIT/asgard-api/asgard/clients/apps/client.py",
"repos/B2W-BIT/asgard-api/asgard/clients/apps/dtos/app_stats_dto.py"
] |
[comment]
from typing import Dict , Any , Union
import typing
import logging
import sys
import traceback
from enum import Enum
import pkg_resources
from simple_json_logger import JsonLogger
from hollowman import conf
from hollowman . log import logger
class API_PLUGIN_TYPES ( Enum ) :
API_METRIC_PLUGIN = [string]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
[comment]
PLUGIN_REGISTRY = { }
PLUGINS_LOAD_STATUS = { [string] : { } , [string] : { [string] : [number] , [string] : [number] , [string] : [number] } , }
def register_plugin ( plugin_id ) :
plugin_data = { [string] : plugin_id , [string] : { [string] : [ [string] ] } }
PLUGIN_REGISTRY [ plugin_id ] = plugin_data
def get_plugin_registry_data ( ) :
return { [string] : list ( PLUGIN_REGISTRY . values ( ) ) }
def get_pulgin_load_status_data ( ) :
return PLUGINS_LOAD_STATUS
def load_entrypoint_group ( groupname ) :
return list ( pkg_resources . iter_entry_points ( group = groupname ) )
def get_plugin_logger_instance ( plugin_id ) :
json_logger = JsonLogger ( flatten = True , extra = { [string] : plugin_id } )
return json_logger
def load_all_metrics_plugins ( flask_application , get_plugin_logger_instance = get_plugin_logger_instance ) :
all_metric_plugins = load_entrypoint_group ( API_PLUGIN_TYPES . API_METRIC_PLUGIN . value )
for entrypoint in all_metric_plugins :
try :
package_name = entrypoint . dist . project_name
if package_name not in PLUGINS_LOAD_STATUS [ [string] ] :
PLUGINS_LOAD_STATUS [ [string] ] [ package_name ] = [ ]
entrypoint_function = entrypoint . load ( )
plugin_logger_instance = get_plugin_logger_instance ( plugin_id = package_name )
plugin_logger_instance . setLevel ( getattr ( logging , conf . LOGLEVEL , logging . INFO ) )
plugin_data = entrypoint_function ( logger = plugin_logger_instance )
url_prefix = f" [string] { package_name }"
flask_application . register_blueprint ( plugin_data [ [string] ] , url_prefix = url_prefix )
logger . info ( { [string] : [string] , [string] : entrypoint , [string] : package_name , [string] : url_prefix , } )
PLUGINS_LOAD_STATUS [ [string] ] [ package_name ] . append ( { [string] : [string] , [string] : package_name , [string] : { [string] : entrypoint . module_name , [string] : entrypoint . attrs [ [number] ] , } , } )
PLUGINS_LOAD_STATUS [ [string] ] [ [string] ] += [number]
PLUGINS_LOAD_STATUS [ [string] ] [ [string] ] += [number]
except Exception as e :
formatted_traceback = traceback . format_exc ( )
exception_type = sys . exc_info ( ) [ [number] ] . __name__
logger . error ( { [string] : [string] , [string] : entrypoint , [string] : package_name , [string] : formatted_traceback , [string] : exception_type , } )
PLUGINS_LOAD_STATUS [ [string] ] [ package_name ] . append ( { [string] : [string] , [string] : exception_type , [string] : formatted_traceback , [string] : package_name , [string] : { [string] : entrypoint . module_name , [string] : entrypoint . attrs [ [number] ] , } , } )
PLUGINS_LOAD_STATUS [ [string] ] [ [string] ] += [number]
repos/B2W-BIT/asgard-api/asgard/app.py
from typing import Any
import typing
import aiohttp_cors
from aiohttp import web
from asyncworker import App , RouteTypes
from asyncworker . conf import settings
from asyncworker . connections import AMQPConnection
from asgard import conf
conn = AMQPConnection ( hostname = conf . ASGARD_RABBITMQ_HOST , username = conf . ASGARD_RABBITMQ_USER , password = conf . ASGARD_RABBITMQ_PASS , prefetch = conf . ASGARD_RABBITMQ_PREFETCH , )
app = App ( connections = [ conn ] )
async def patched_startup ( app ) :
app [ RouteTypes . HTTP ] = { }
routes = app . routes_registry . http_routes
app [ RouteTypes . HTTP ] [ [string] ] = http_app = web . Application ( )
for route in routes :
for route_def in route . aiohttp_routes ( ) :
route_def . register ( http_app . router )
cors = aiohttp_cors . setup ( http_app , defaults = { [string] : aiohttp_cors . ResourceOptions ( allow_credentials = True , expose_headers = [string] , allow_headers = [string] ) } , )
[comment]
for route in list ( http_app . router . routes ( ) ) :
cors . add ( route )
app [ RouteTypes . HTTP ] [ [string] ] = web . AppRunner ( http_app )
await app [ RouteTypes . HTTP ] [ [string] ] . setup ( )
app [ RouteTypes . HTTP ] [ [string] ] = web . TCPSite ( runner = app [ RouteTypes . HTTP ] [ [string] ] , host = settings . HTTP_HOST , port = settings . HTTP_PORT , )
await app [ RouteTypes . HTTP ] [ [string] ] . start ( )
app . _on_startup . clear ( )
app . _on_startup . append ( patched_startup )
repos/B2W-BIT/asgard-api/asgard/conf.py
from typing import Optional , Literal , List , Type , Union
import builtins
import typing_extensions
import typing
import asgard
import os
from typing import List , Optional
from pydantic import BaseSettings , BaseModel
from asgard . models . spec . fetch import FetchURLSpec
ASGARD_RABBITMQ_HOST = [string]
ASGARD_RABBITMQ_USER = [string]
ASGARD_RABBITMQ_PASS = [string]
ASGARD_RABBITMQ_PREFETCH = [number]
ASGARD_HTTP_CLIENT_CONNECT_TIMEOUT = int ( os . getenv ( [string] , [number] ) )
ASGARD_HTTP_CLIENT_TOTAL_TIMEOUT = int ( os . getenv ( [string] , [number] ) )
[comment]
[comment]
SECRET_KEY = os . getenv ( [string] , [string] )
TASK_FILEREAD_MAX_OFFSET = int ( os . getenv ( [string] , [number] ) )
class AuthSpec ( BaseModel ) :
user = ...
password = ...
class Settings ( BaseSettings ) :
MESOS_API_URLS = ...
DB_URL = ...
STATS_API_URL = ...
ASGARD_API_ADDRESS = ...
SCHEDULED_JOBS_SERVICE_ADDRESS = ...
SCHEDULED_JOBS_SERVICE_AUTH = AuthSpec ( )
AUTOSCALER_AUTH_TOKEN = ...
AUTOSCALER_MARGIN_THRESHOLD = ...
SCHEDULED_JOBS_DEFAULT_FETCH_URIS = [ ]
MIN_CPU_SCALE_LIMIT = [number]
MAX_CPU_SCALE_LIMIT = float ( [string] )
MIN_MEM_SCALE_LIMIT = [number]
MAX_MEM_SCALE_LIMIT = float ( [string] )
class Config :
env_prefix = os . getenv ( [string] , [string] ) + [string]
settings = Settings ( )
repos/B2W-BIT/asgard-api/asgard/math/__init__.py
import builtins
import decimal
from decimal import ROUND_UP , Decimal
def round_up ( n , prec = [number] ) :
return n . quantize ( Decimal ( [string] + [string] * prec ) , rounding = ROUND_UP )
repos/B2W-BIT/asgard-api/asgard/exceptions/__init__.py
class DuplicateEntity ( Exception ) :
pass
class NotFoundEntity ( Exception ) :
pass
repos/B2W-BIT/asgard-api/asgard/workers/converters/converter_interface.py
from typing import List , TypeVar
import typing
from abc import ABC , abstractclassmethod
from typing import Generic , TypeVar , List
ModelObject = TypeVar ( [string] )
DtoObject = TypeVar ( [string] )
class Converter ( Generic [ ModelObject , DtoObject ] , ABC ) :
[docstring]
@ abstractclassmethod def to_model ( cls , dto_object ) :
[docstring]
raise NotImplementedError
@ abstractclassmethod def to_dto ( cls , model_object ) :
[docstring]
raise NotImplementedError
@ classmethod def all_to_model ( cls , dto_objects ) :
return list ( map ( cls . to_model , dto_objects ) )
@ classmethod def all_to_dto ( cls , model_objects ) :
return list ( map ( cls . to_dto , model_objects ) )
repos/B2W-BIT/asgard-api/asgard/workers/converters/asgard_converter.py
from typing import Optional
import asgard
import typing
from typing import Optional
from asgard . clients . apps . dtos . app_dto import AppDto
from asgard . clients . apps . dtos . app_stats_dto import AppStatsDto
from asgard . clients . apps . dtos . decision_dto import DecisionDto
from asgard . workers . models . app_stats import AppStats
from asgard . workers . models . decision import Decision
from asgard . workers . models . scalable_app import ScalableApp
from . converter_interface import Converter
class AppConverter ( Converter [ ScalableApp , AppDto ] ) :
@ classmethod def to_model ( cls , dto_object ) :
if dto_object . id [ [number] ] == [string] :
appid = dto_object . id [ [number] : ]
else :
appid = dto_object . id
scalable_app = ScalableApp ( appid )
scalable_app . cpu_allocated = dto_object . cpus
scalable_app . mem_allocated = dto_object . mem
if dto_object . labels is not None :
if [string] in dto_object . labels :
scalable_app . cpu_threshold = float ( dto_object . labels [ [string] ] )
if [string] in dto_object . labels :
scalable_app . mem_threshold = float ( dto_object . labels [ [string] ] )
if [string] in dto_object . labels :
if [string] in dto_object . labels [ [string] ] :
scalable_app . cpu_threshold = None
scalable_app . mem_threshold = None
else :
if [string] in dto_object . labels [ [string] ] :
scalable_app . cpu_threshold = None
if [string] in dto_object . labels [ [string] ] :
scalable_app . mem_threshold = None
if [string] in dto_object . labels :
scalable_app . max_cpu_scale_limit = float ( dto_object . labels [ [string] ] )
if [string] in dto_object . labels :
scalable_app . min_cpu_scale_limit = float ( dto_object . labels [ [string] ] )
if [string] in dto_object . labels :
scalable_app . max_mem_scale_limit = float ( dto_object . labels [ [string] ] )
if [string] in dto_object . labels :
scalable_app . min_mem_scale_limit = float ( dto_object . labels [ [string] ] )
return scalable_app
@ classmethod def to_dto ( cls , model_object ) :
[comment]
raise NotImplementedError
class AppStatsConverter ( Converter [ AppStats , AppStatsDto ] ) :
@ classmethod def to_model ( cls , dto_object ) :
if dto_object . was_not_found ( ) :
return None
app_stats = AppStats ( )
app_stats . cpu_usage = float ( dto_object . stats . cpu_pct ) / [number]
app_stats . mem_usage = float ( dto_object . stats . ram_pct ) / [number]
return app_stats
@ classmethod def to_dto ( cls , model_object ) :
[comment]
raise NotImplementedError
class DecisionConverter ( Converter [ Decision , DecisionDto ] ) :
@ classmethod def to_model ( cls , dto_object ) :
[comment]
raise NotImplementedError
@ classmethod def to_dto ( cls , model_object ) :
dto_object = DecisionDto ( id = model_object . id , cpus = model_object . cpu , mem = model_object . mem )
return dto_object
repos/B2W-BIT/asgard-api/asgard/workers/models/decision.py
import builtins
class Decision :
def __init__ ( self , appid , cpu = None , mem = None ) :
self . id = appid
self . cpu = cpu
self . mem = mem
repos/B2W-BIT/asgard-api/asgard/workers/models/app_stats.py
from typing import Optional
import builtins
import typing
from typing import Optional
class AppStats :
def __init__ ( self , cpu_usage = None , mem_usage = None , ) :
self . cpu_usage = cpu_usage
self . mem_usage = mem_usage
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/__main__.py
from asgard . workers . autoscaler . app import app
app . run ( )
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/decision_component_interface.py
from typing import List
import asgard
import typing
from abc import ABC , abstractmethod
from typing import List
from asgard . workers . models . decision import Decision
from asgard . workers . models . scalable_app import ScalableApp
class DecisionComponentInterface ( ABC ) :
@ abstractmethod def decide_scaling_actions ( self , apps_stats ) :
raise NotImplementedError
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/asgard_cloudinterface.py
from typing import Dict , List , Any , Optional
import asgard
import typing
from typing import List , Dict
from asgard . clients . apps . client import AppsClient
from asgard . workers . autoscaler . cloudinterface import CloudInterface
from asgard . workers . converters . asgard_converter import ( AppConverter , AppStatsConverter , DecisionConverter , )
from asgard . workers . models . app_stats import AppStats
from asgard . workers . models . decision import Decision
from asgard . workers . models . scalable_app import ScalableApp
class AsgardInterface ( CloudInterface ) :
def __init__ ( self ) :
self . _asgard_client = AppsClient ( )
async def fetch_all_apps ( self ) :
app_dtos = await self . _asgard_client . get_all_apps ( )
apps = AppConverter . all_to_model ( app_dtos )
return apps
async def get_all_scalable_apps ( self ) :
all_apps = await self . fetch_all_apps ( )
if all_apps :
return list ( filter ( ScalableApp . is_set_to_scale , all_apps ) )
return list ( )
async def get_app_stats ( self , app ) :
app_stats_dto = await self . _asgard_client . get_app_stats ( app . id )
app_stats = AppStatsConverter . to_model ( app_stats_dto )
return app_stats
async def apply_decisions ( self , scaling_decisions ) :
if scaling_decisions :
decision_dtos = DecisionConverter . all_to_dto ( scaling_decisions )
post_body = await self . _asgard_client . post_scaling_decisions ( decision_dtos )
return post_body
return [ ]
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/app.py
from typing import List , Any
import asyncworker
import asgard
import typing
from asyncworker import App
from asgard . workers . autoscaler . asgard_cloudinterface import AsgardInterface
from asgard . workers . autoscaler . periodicstatechecker import PeriodicStateChecker
from asgard . workers . autoscaler . simple_decision_component import ( DecisionComponent , )
from hollowman . log import logger
app = App ( )
@ app . run_every ( [number] * [number] ) async def scale_all_apps ( app ) :
cloud_interface = AsgardInterface ( )
state_checker = PeriodicStateChecker ( cloud_interface )
decision_maker = DecisionComponent ( )
logger . debug ( { [string] : [string] } )
apps_stats = await state_checker . get_scalable_apps_stats ( )
logger . debug ( { [string] : [ app . id for app in apps_stats ] } )
scaling_decisions = decision_maker . decide_scaling_actions ( apps_stats )
await cloud_interface . apply_decisions ( scaling_decisions )
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/simple_decision_component.py
from typing import List
import builtins
import asgard
import typing
from typing import List
from asgard . conf import settings
from asgard . workers . autoscaler . decision_component_interface import ( DecisionComponentInterface , )
from asgard . workers . autoscaler . decision_events import DecisionEvents
from asgard . workers . models . decision import Decision
from asgard . workers . models . scalable_app import ScalableApp
from hollowman . log import logger as default_logger
def _limit_number ( number , min_value , max_value ) :
return max ( min ( number , max_value ) , min_value )
class DecisionComponent ( DecisionComponentInterface ) :
def __init__ ( self , logger = default_logger ) :
self . logger = logger
def decide_scaling_actions ( self , apps ) :
decisions = [ ]
for app in apps :
if app . app_stats :
decision = Decision ( app . id )
if app . cpu_needs_scaling ( ) :
new_cpu = ( app . get_cpu_usage ( ) * app . cpu_allocated ) / app . cpu_threshold
new_cpu = _limit_number ( new_cpu , app . min_cpu_scale_limit , app . max_cpu_scale_limit , )
if new_cpu != app . cpu_allocated :
decision . cpu = new_cpu
event = ( DecisionEvents . CPU_SCALE_DOWN
if app . cpu_allocated > decision . cpu
else DecisionEvents . CPU_SCALE_UP )
self . logger . info ( { [string] : app . id , [string] : event , [string] : app . cpu_allocated , [string] : decision . cpu , } )
if app . is_set_to_scale_cpu ( ) and decision . cpu is None :
self . logger . debug ( { [string] : app . id , [string] : DecisionEvents . CPU_SCALE_NONE , [string] : [string] , [string] : app . get_cpu_usage ( ) , [string] : app . cpu_threshold , [string] : settings . AUTOSCALER_MARGIN_THRESHOLD , } )
if app . mem_needs_scaling ( ) :
new_mem = ( app . get_mem_usage ( ) * app . mem_allocated ) / app . mem_threshold
new_mem = _limit_number ( new_mem , app . min_mem_scale_limit , app . max_mem_scale_limit , )
if new_mem != app . mem_allocated :
decision . mem = new_mem
event = ( DecisionEvents . MEM_SCALE_DOWN
if app . mem_allocated > decision . mem
else DecisionEvents . MEM_SCALE_UP )
self . logger . info ( { [string] : app . id , [string] : event , [string] : app . mem_allocated , [string] : decision . mem , } )
if app . is_set_to_scale_mem ( ) and decision . mem is None :
self . logger . debug ( { [string] : app . id , [string] : DecisionEvents . MEM_SCALE_NONE , [string] : [string] , [string] : app . get_mem_usage ( ) , [string] : app . mem_threshold , [string] : settings . AUTOSCALER_MARGIN_THRESHOLD , } )
if decision . mem is not None or decision . cpu is not None :
decisions . append ( decision )
return decisions
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/periodicstatechecker.py
from typing import List , Optional
import asgard
import typing
from typing import List
from asgard . workers . autoscaler . cloudinterface import CloudInterface
from asgard . workers . models . scalable_app import ScalableApp
class PeriodicStateChecker :
def __init__ ( self , cloudinterface ) :
self . cloud_interface = cloudinterface
async def get_scalable_apps_stats ( self ) :
apps = await self . cloud_interface . get_all_scalable_apps ( )
if apps :
for app in apps :
app . app_stats = await self . cloud_interface . get_app_stats ( app )
return apps
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/decision_events.py
import builtins
class DecisionEvents :
CPU_SCALE_UP = [string]
CPU_SCALE_DOWN = [string]
CPU_SCALE_NONE = [string]
MEM_SCALE_UP = [string]
MEM_SCALE_DOWN = [string]
MEM_SCALE_NONE = [string]
repos/B2W-BIT/asgard-api/asgard/workers/autoscaler/cloudinterface.py
from typing import List
import asgard
import typing
from abc import ABC , abstractmethod
from typing import List
from asgard . workers . models . app_stats import AppStats
from asgard . workers . models . decision import Decision
from asgard . workers . models . scalable_app import ScalableApp
class CloudInterface ( ABC ) :
@ abstractmethod async def fetch_all_apps ( self ) :
raise NotImplementedError
@ abstractmethod async def get_all_scalable_apps ( self ) :
raise NotImplementedError
@ abstractmethod async def get_app_stats ( self , app ) :
raise NotImplementedError
@ abstractmethod async def apply_decisions ( self , scaling_decisions ) :
raise NotImplementedError
repos/B2W-BIT/asgard-api/asgard/backends/users.py
from typing import List , Any , Optional
import builtins
import asgard
import typing
from typing import List , Optional
import psycopg2
from sqlalchemy . orm . exc import NoResultFound
from asgard . db import AsgardDBSession
from asgard . exceptions import DuplicateEntity
from asgard . models . account import Account , AccountDB
from asgard . models . user import User , UserDB
from asgard . models . user_has_account import UserHasAccount
class UsersBackend :
async def get_alternate_accounts ( self , user , current_account ) :
_ , UserTable = await user . to_alchemy_obj ( )
_ , AccountTable = await current_account . to_alchemy_obj ( )
_join = UserTable . __table__ . join ( UserHasAccount , UserTable . id == UserHasAccount . c . user_id , isouter = True , ) . join ( AccountTable . __table__ , AccountTable . id == UserHasAccount . c . account_id , isouter = True , )
async with AsgardDBSession ( ) as s :
accounts = ( await s . query ( AccountTable ) . join ( _join ) . filter ( UserTable . tx_email == user . email ) . filter ( AccountTable . id != current_account . id ) . all ( ) )
all_acc = [ await Account . from_alchemy_obj ( acc ) for acc in accounts ]
return all_acc
async def get_accounts_from_user ( self , user ) :
async with AsgardDBSession ( ) as s :
_join = UserDB . __table__ . join ( UserHasAccount , UserDB . id == UserHasAccount . c . user_id ) . join ( AccountDB . __table__ , AccountDB . __table__ . c . id == UserHasAccount . c . account_id , )
accounts = ( await s . query ( AccountDB . __table__ ) . join ( _join ) . filter ( UserHasAccount . c . user_id == user . id ) . all ( ) )
return [ await Account . from_alchemy_obj ( a ) for a in accounts ]
async def get_user_by_id ( self , user_id ) :
try :
async with AsgardDBSession ( ) as s :
user = await s . query ( UserDB ) . filter ( UserDB . id == user_id ) . one ( )
return await User . from_alchemy_obj ( user )
except NoResultFound :
return None
async def get_users ( self ) :
[docstring]
async with AsgardDBSession ( ) as s :
return [ await User . from_alchemy_obj ( u ) for u in await s . query ( UserDB ) . all ( ) ]
async def create_user ( self , user ) :
user_db , userTable = await user . to_alchemy_obj ( )
try :
async with AsgardDBSession ( ) as s :
returned_values = await s . execute ( userTable . __table__ . insert ( ) . values ( tx_name = user . name , tx_email = user . email ) . return_defaults ( userTable . id ) )
created_id = list ( returned_values ) [ [number] ] . id
return User ( id = created_id , name = user . name , email = user . email )
except psycopg2 . IntegrityError as e :
raise DuplicateEntity ( e . pgerror )
async def delete_user ( self , user ) :
async with AsgardDBSession ( ) as s :
_ , userTable = await user . to_alchemy_obj ( )
delete_acc_relation = UserHasAccount . delete ( ) . where ( UserHasAccount . c . user_id == user . id )
delete_user = userTable . __table__ . delete ( ) . where ( userTable . id == user . id )
await s . execute ( delete_acc_relation )
await s . execute ( delete_user )
return user
async def update_user ( self , user ) :
async with AsgardDBSession ( ) as s :
user_db , userTable = await user . to_alchemy_obj ( )
update = ( userTable . __table__ . update ( ) . where ( userTable . id == user . id ) . values ( tx_name = user . name , tx_email = user . email ) )
try :
await s . execute ( update )
except psycopg2 . IntegrityError as e :
raise DuplicateEntity ( e . pgerror )
return user
repos/B2W-BIT/asgard-api/asgard/backends/accounts.py
from typing import List , Optional
import sqlalchemy
import asgard
import typing
import builtins
from typing import Optional , List
from sqlalchemy . orm . exc import NoResultFound
from sqlalchemy . sql . expression import Delete , Insert
from asgard . db import AsgardDBSession
from asgard . models . account import Account , AccountDB
from asgard . models . user import User , UserDB
from asgard . models . user_has_account import UserHasAccount
class AccountsBackend :
async def get_account_by_id ( self , acc_id ) :
try :
async with AsgardDBSession ( ) as s :
result = ( await s . query ( AccountDB ) . filter ( AccountDB . id == acc_id ) . one ( ) )
return await Account . from_alchemy_obj ( result )
except NoResultFound :
return None
async def get_accounts ( self ) :
async with AsgardDBSession ( ) as s :
result = await s . query ( AccountDB ) . all ( )
accounts = [ await Account . from_alchemy_obj ( item ) for item in result ]
return accounts
async def get_users_from_account ( self , account ) :
async with AsgardDBSession ( ) as s :
users = ( await s . query ( UserDB ) . join ( UserHasAccount . join ( UserDB , UserHasAccount . c . user_id == UserDB . id ) ) . filter ( UserHasAccount . c . account_id == account . id ) . all ( ) )
return [ await User . from_alchemy_obj ( u ) for u in users ]
async def add_user ( self , user , account ) :
if not await account . user_has_permission ( user ) :
async with AsgardDBSession ( ) as s :
insert = UserHasAccount . insert ( ) . values ( user_id = user . id , account_id = account . id )
await s . execute ( insert )
async def remove_user ( self , user , account ) :
async with AsgardDBSession ( ) as s :
delete = ( UserHasAccount . delete ( ) . where ( UserHasAccount . c . account_id == account . id ) . where ( UserHasAccount . c . user_id == user . id ) )
await s . execute ( delete )
repos/B2W-BIT/asgard-api/asgard/backends/__init__.py
import asgard
from asgard . backends . marathon . impl import MarathonAppsBackend
from asgard . backends . mesos . impl import MesosOrchestrator , MesosAgentsBackend
mesos = MesosOrchestrator ( MesosAgentsBackend ( ) , MarathonAppsBackend ( ) )
repos/B2W-BIT/asgard-api/asgard/backends/base.py
from typing import List , Optional
import builtins
import asgard
import typing
import abc
from enum import Enum
from typing import List , Optional
from asgard . models . account import Account
from asgard . models . agent import Agent
from asgard . models . app import App , AppStats
from asgard . models . user import User
class Interval ( str , Enum ) :
ONE_HOUR = [string]
ONE_MINUTE = [string]
class AppsBackend ( abc . ABC ) :
@ abc . abstractmethod async def get_app_stats ( self , app , interval , user , account ) :
raise NotImplementedError
class AgentsBackend ( abc . ABC ) :
@ abc . abstractmethod async def get_agents ( self , user , account ) :
[docstring]
raise NotImplementedError
@ abc . abstractmethod async def get_by_id ( self , agentd_id , user , account ) :
[docstring]
raise NotImplementedError
@ abc . abstractmethod async def get_apps_running ( self , user , agent ) :
[docstring]
raise NotImplementedError
class Orchestrator ( metaclass = abc . ABCMeta ) :
[docstring]
def __init__ ( self , agents_backend , apps_backend ) :
self . agents_backend = agents_backend
self . apps_backend = apps_backend
@ abc . abstractmethod async def get_agents ( self , user , account ) :
raise NotImplementedError
@ abc . abstractmethod async def get_apps_running_for_agent ( self , user , agent ) :
[docstring]
raise NotImplementedError
@ abc . abstractmethod async def get_agent_by_id ( self , agent_id , user , account ) :
raise NotImplementedError
@ abc . abstractmethod async def get_app_stats ( self , app , interval , user , account ) :
raise NotImplementedError
repos/B2W-BIT/asgard-api/asgard/backends/jobs.py
from typing import List , Optional
import builtins
import asgard
import typing
import abc
from typing import Optional , List
from asgard . models . account import Account
from asgard . models . job import ScheduledJob
from asgard . models . user import User
class ScheduledJobsBackend ( abc . ABC ) :
@ abc . abstractmethod async def get_job_by_id ( self , job_id , user , account ) :
raise NotImplementedError
@ abc . abstractmethod async def list_jobs ( self , user , account ) :
raise NotImplementedError
@ abc . abstractmethod async def create_job ( self , job , user , account ) :
raise NotImplementedError
@ abc . abstractmethod async def update_job ( self , job , user , account ) :
raise NotImplementedError
@ abc . abstractmethod async def delete_job ( self , job , user , account ) :
raise NotImplementedError
repos/B2W-BIT/asgard-api/asgard/backends/mesos/impl.py
from typing import List , Any , Optional
import builtins
import asgard
import typing
from typing import List , Optional
from asgard . backends . base import Orchestrator , AgentsBackend , Interval
from asgard . backends . mesos . models . agent import MesosAgent
from asgard . backends . mesos . models . converters import MesosAgentConverter
from asgard . clients . mesos . client import MesosClient
from asgard . conf import settings
from asgard . models . account import Account
from asgard . models . agent import Agent
from asgard . models . app import App , AppStats
from asgard . models . user import User
from hollowman import log
async def populate_apps ( agent ) :
try :
agent . applications = await agent . apps ( )
agent . total_apps = len ( agent . applications )
except Exception as e :
agent . add_error ( field_name = [string] , error_msg = [string] )
log . logger . exception ( { [string] : [string] , [string] : agent . id , [string] : agent . hostname , } )
class MesosAgentsBackend ( AgentsBackend ) :
async def get_agents ( self , user , account ) :
async with MesosClient ( * settings . MESOS_API_URLS ) as mesos :
filtered_agents = [ ]
client_agents = await mesos . get_agents ( )
for client_agent in client_agents :
agent = MesosAgentConverter . to_asgard_model ( client_agent )
if not agent . attr_has_value ( [string] , account . owner ) :
continue
await populate_apps ( agent )
await agent . calculate_stats ( )
filtered_agents . append ( agent )
return filtered_agents
async def get_by_id ( self , agent_id , user , account ) :
async with MesosClient ( * settings . MESOS_API_URLS ) as mesos :
client_agent = await mesos . get_agent_by_id ( agent_id = agent_id )
if client_agent :
agent = MesosAgentConverter . to_asgard_model ( client_agent )
if agent . attr_has_value ( [string] , account . owner ) :
await populate_apps ( agent )
await agent . calculate_stats ( )
return agent
return None
async def get_apps_running ( self , user , agent ) :
if agent :
return agent . applications
return [ ]
class MesosOrchestrator ( Orchestrator ) :
async def get_agents ( self , user , account ) :
return await self . agents_backend . get_agents ( user , account )
async def get_agent_by_id ( self , agent_id , user , account ) :
return await self . agents_backend . get_by_id ( agent_id , user , account )
async def get_apps_running_for_agent ( self , user , agent ) :
return await self . agents_backend . get_apps_running ( user , agent )
async def get_app_stats ( self , app , interval , user , account ) :
return await self . apps_backend . get_app_stats ( app , interval , user , account )
repos/B2W-BIT/asgard-api/asgard/backends/mesos/__init__.py
from asgard . backends . mesos . impl import MesosOrchestrator
repos/B2W-BIT/asgard-api/asgard/backends/mesos/models/agent.py
from typing import Any , Set , Optional , Dict , List
import builtins
import asgard
import typing
import decimal
from collections import defaultdict
from decimal import Decimal
from typing import Dict , List , Set
from asgard . backends . mesos . models . app import MesosApp
from asgard . backends . mesos . models . task import MesosTask
from asgard . http . client import HttpClient
from asgard . math import round_up
from asgard . models . agent import Agent
_http_client = HttpClient ( )
class MesosAgent ( Agent ) :
type = [string]
def filter_by_attrs ( self , kv ) :
pass
async def calculate_stats ( self ) :
[docstring]
cpu_pct = ( Decimal ( self . used_resources [ [string] ] ) / Decimal ( self . resources [ [string] ] ) * [number] )
ram_pct = ( Decimal ( self . used_resources [ [string] ] ) / Decimal ( self . resources [ [string] ] ) * [number] )
self . stats = { [string] : str ( round_up ( cpu_pct ) ) , [string] : str ( round_up ( ram_pct ) ) , }
async def apps ( self ) : [comment]
self_address = f" [string] { self . hostname } [string] { self . port }"
containers_url = f"{ self_address } [string] "
apps = [ ]
response = await _http_client . get ( containers_url )
data = await response . json ( )
all_apps = set ( )
for container_info in data :
app_id = MesosApp . transform_to_asgard_app_id ( container_info [ [string] ] )
if app_id not in all_apps :
apps . append ( MesosApp ( ** { [string] : app_id } ) )
all_apps . add ( app_id )
return apps
async def tasks ( self , app_id ) :
self_address = f" [string] { self . hostname } [string] { self . port }"
containers_url = f"{ self_address } [string] "
response = await _http_client . get ( containers_url )
data = await response . json ( )
tasks_per_app = defaultdict ( list )
for container_info in data :
app_id_ = MesosApp . transform_to_asgard_app_id ( container_info [ [string] ] )
tasks_per_app [ app_id_ ] . append ( MesosTask ( ** { [string] : MesosTask . transform_to_asgard_task_id ( container_info [ [string] ] ) } ) )
return tasks_per_app [ app_id ]
repos/B2W-BIT/asgard-api/asgard/backends/mesos/models/app.py
import builtins
from asgard . models . app import App
class MesosApp ( App ) :
type = [string]
@ classmethod def transform_to_asgard_app_id ( cls , executor_id ) :
task_name_part = executor_id . split ( [string] ) [ [number] ]
return [string] . join ( task_name_part . split ( [string] ) [ [number] : ] )
repos/B2W-BIT/asgard-api/asgard/backends/mesos/models/task.py
from typing import List
import builtins
import typing
from asgard . models . task import Task
class MesosTask ( Task ) :
type = [string]
name = ...
@ classmethod def transform_to_asgard_task_id ( cls , executor_id ) :
task_name_part = executor_id . split ( [string] ) [ [number] : ]
return [string] . join ( task_name_part )
repos/B2W-BIT/asgard-api/asgard/backends/models/converters.py
from typing import TypeVar
import typing
from abc import ABC , abstractclassmethod
from typing import Generic , TypeVar
AsgardModel = TypeVar ( [string] )
ClientModel = TypeVar ( [string] )
class ModelConverterInterface ( Generic [ AsgardModel , ClientModel ] , ABC ) :
[docstring]
@ abstractclassmethod def to_asgard_model ( cls , other ) :
[docstring]
raise NotImplementedError
@ abstractclassmethod def to_client_model ( cls , other ) :
[docstring]
raise NotImplementedError
repos/B2W-BIT/asgard-api/asgard/clients/mesos/client.py
from typing import Dict , List , Any , Optional
import builtins
import asgard
import typing
from typing import List , Optional , Dict
from asgard . clients . mesos . models . agent import MesosAgent
from asgard . http . client import HttpClient
class MesosClient :
def __init__ ( self , mesos_api_url , * aditional_api_urls ) :
self . mesos_adresses = tuple ( [ mesos_api_url ] ) + aditional_api_urls
self . http_client = HttpClient ( )
async def __aenter__ ( self , * args , ** kwargs ) :
return self
async def __aexit__ ( self , * args , ** kwargs ) :
pass
async def _json_response ( self , path ) :
for addr in self . mesos_adresses :
try :
response = await self . http_client . get ( f"{ addr }{ path }" )
return await response . json ( )
except Exception :
pass
raise Exception ( f" [string] { self . mesos_adresses }" )
async def get_agent_by_id ( self , agent_id ) :
data = await self . _json_response ( f" [string] { agent_id }" )
if data [ [string] ] :
agent = MesosAgent ( ** data [ [string] ] [ [number] ] )
return agent
return None
async def get_agents ( self ) :
data = await self . _json_response ( f" [string] " )
if [string] in data :
agents = [ MesosAgent ( ** agent_info ) for agent_info in data [ [string] ] ]
return agents
return [ ]
repos/B2W-BIT/asgard-api/asgard/clients/mesos/models/spec.py
from typing import Dict , Type
import typing
from typing import Dict , Optional
from pydantic import BaseModel
class MesosUsedResourcesSpec ( BaseModel ) :
disk = ...
mem = ...
gpus = ...
cpus = ...
ports = ...
class MesosResourcesSpec ( BaseModel ) :
disk = ...
mem = ...
gpus = ...
cpus = ...
ports = ...
MesosAttributesSpec = Dict [ str , str ]
repos/B2W-BIT/asgard-api/asgard/clients/apps/client.py
from typing import Dict , List , Any , Optional
import builtins
import asgard
import typing
from typing import List , Optional , Dict
from asgard . clients . apps . dtos . app_dto import AppDto
from asgard . clients . apps . dtos . app_stats_dto import AppStatsDto
from asgard . clients . apps . dtos . decision_dto import DecisionDto
from asgard . conf import settings
from asgard . http . client import HttpClient
def _truncate_decision ( decisionDto ) :
truncated_decision = decisionDto . copy ( )
if decisionDto . cpus :
truncated_decision . cpus = round ( decisionDto . cpus , [number] )
if decisionDto . mem :
truncated_decision . mem = round ( decisionDto . mem , [number] )
return truncated_decision
def _truncate_and_convert_to_dict ( decisionDto ) :
return _truncate_decision ( decisionDto ) . dict ( )
class AppsClient :
def __init__ ( self ) :
self . _http_client = HttpClient ( headers = { [string] : [string] , [string] : f" [string] { settings . AUTOSCALER_AUTH_TOKEN }" , } )
async def get_all_apps ( self ) :
response = await self . _http_client . get ( url = f"{ settings . ASGARD_API_ADDRESS } [string] " )
all_apps_data = await response . json ( )
app_dtos = [ AppDto ( ** app_data ) for app_data in all_apps_data [ [string] ] ]
return app_dtos
async def get_app_stats ( self , app_id ) :
response = await self . _http_client . get ( url = f"{ settings . ASGARD_API_ADDRESS } [string] { app_id } [string] " )
app_stats_data = await response . json ( )
app_stats_dto = AppStatsDto ( ** { [string] : app_id , ** app_stats_data } )
return app_stats_dto
async def post_scaling_decisions ( self , decisions ) :
post_body = list ( map ( _truncate_and_convert_to_dict , decisions ) )
await self . _http_client . put ( url = f"{ settings . ASGARD_API_ADDRESS } [string] " , json = post_body )
return list ( post_body )
repos/B2W-BIT/asgard-api/asgard/clients/apps/dtos/app_stats_dto.py
import builtins
from pydantic import BaseModel
class StatsSpec ( BaseModel ) :
type = ...
cpu_pct = ...
ram_pct = ...
cpu_thr_pct = ...
class AppStatsDto ( BaseModel ) :
id = ...
stats = ...
def was_not_found ( self ) :
return ( self . stats . cpu_pct == [string]
and self . stats . ram_pct == [string]
and self . stats . cpu_thr_pct == [string] )
|
What is the type of variable get_user_by_id?
| 10,203
|
typing.Optional[asgard.models.user.User]
| 9,896
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 5