hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bc710066cef80510ad81cb68d1ed3e70cec4fc2d
| 905
|
py
|
Python
|
bert_e/workflow/gitwaterflow/utils.py
|
tcarmet/bert-e
|
8e0623d9a8c7bd111790d72307862167eca18a23
|
[
"Apache-2.0"
] | null | null | null |
bert_e/workflow/gitwaterflow/utils.py
|
tcarmet/bert-e
|
8e0623d9a8c7bd111790d72307862167eca18a23
|
[
"Apache-2.0"
] | 35
|
2020-08-26T09:25:56.000Z
|
2022-01-10T20:38:15.000Z
|
bert_e/workflow/gitwaterflow/utils.py
|
tcarmet/bert-e
|
8e0623d9a8c7bd111790d72307862167eca18a23
|
[
"Apache-2.0"
] | 2
|
2021-08-17T15:56:50.000Z
|
2022-01-05T19:26:48.000Z
|
def bypass_incompatible_branch(job):
return (job.settings.bypass_incompatible_branch or
job.author_bypass.get('bypass_incompatible_branch', False))
def bypass_peer_approval(job):
return (job.settings.bypass_peer_approval or
job.author_bypass.get('bypass_peer_approval', False))
def bypass_leader_approval(job):
return (job.settings.bypass_leader_approval or
job.author_bypass.get('bypass_leader_approval', False))
def bypass_author_approval(job):
return (job.settings.bypass_author_approval or
job.author_bypass.get('bypass_author_approval', False))
def bypass_build_status(job):
return (job.settings.bypass_build_status or
job.author_bypass.get('bypass_build_status', False))
def bypass_jira_check(job):
return (job.settings.bypass_jira_check or
job.author_bypass.get('bypass_jira_check', False))
| 30.166667
| 71
| 0.742541
| 120
| 905
| 5.25
| 0.15
| 0.085714
| 0.114286
| 0.190476
| 0.571429
| 0.447619
| 0.161905
| 0
| 0
| 0
| 0
| 0
| 0.164641
| 905
| 29
| 72
| 31.206897
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.139381
| 0.077434
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 1
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
bc73bb29476960582e88da68ad24bf687cb2dd0e
| 65
|
py
|
Python
|
healthy_candies/load/__init__.py
|
striantafyllouEPFL/healthy-candies
|
fc7d9e05d54ba207e15d997acea44ff0bf9edb13
|
[
"BSD-2-Clause"
] | 1
|
2018-11-04T21:46:29.000Z
|
2018-11-04T21:46:29.000Z
|
healthy_candies/load/__init__.py
|
striantafyllouEPFL/healthy-candies
|
fc7d9e05d54ba207e15d997acea44ff0bf9edb13
|
[
"BSD-2-Clause"
] | null | null | null |
healthy_candies/load/__init__.py
|
striantafyllouEPFL/healthy-candies
|
fc7d9e05d54ba207e15d997acea44ff0bf9edb13
|
[
"BSD-2-Clause"
] | null | null | null |
from .load import load_data, NUTRI_COLS, load_clean_rel_to_nutri
| 32.5
| 64
| 0.861538
| 12
| 65
| 4.166667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 65
| 1
| 65
| 65
| 0.847458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bc8c6ccfc24c9f2c6b892349f506c390ec4d676f
| 8,400
|
py
|
Python
|
isiscb/curation/authority_views/relation_views.py
|
crispzips/IsisCB
|
72f5ad47bbc2c615f995df148f5b86550835efdb
|
[
"MIT"
] | 4
|
2016-01-25T20:35:33.000Z
|
2020-04-07T15:39:52.000Z
|
isiscb/curation/authority_views/relation_views.py
|
crispzips/IsisCB
|
72f5ad47bbc2c615f995df148f5b86550835efdb
|
[
"MIT"
] | 41
|
2015-08-19T17:34:41.000Z
|
2022-03-11T23:19:01.000Z
|
isiscb/curation/authority_views/relation_views.py
|
crispzips/IsisCB
|
72f5ad47bbc2c615f995df148f5b86550835efdb
|
[
"MIT"
] | 2
|
2020-11-25T20:18:18.000Z
|
2021-06-24T15:15:41.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse, QueryDict #, HttpResponseForbidden, Http404, , JsonResponse
from django.shortcuts import get_object_or_404, render, redirect
from django.urls import reverse
from django.contrib.admin.views.decorators import staff_member_required, user_passes_test
from rules.contrib.views import permission_required, objectgetter
from isisdata.models import *
from isisdata.utils import strip_punctuation, normalize
from isisdata import operations
from isisdata.filters import *
from isisdata import tasks as data_tasks
from curation import p3_port_utils
from curation.forms import *
from curation.contrib.views import check_rules
@user_passes_test(lambda u: u.is_superuser or u.is_staff)
@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))
def create_acrelation_for_authority(request, authority_id):
authority = get_object_or_404(Authority, pk=authority_id)
search_key = request.GET.get('search', request.POST.get('search'))
current_index = request.GET.get('current', request.POST.get('current'))
context = {
'curation_section': 'datasets',
'curation_subsection': 'authorities',
'instance': authority,
'search_key': search_key,
'current_index': current_index
}
if request.method == 'GET':
initial = {
'authority': authority.id,
'name_for_display_in_citation': authority.name
}
type_controlled = request.GET.get('type_controlled', None)
if type_controlled:
initial.update({'type_controlled': type_controlled.upper()})
form = ACRelationForm(prefix='acrelation', initial=initial)
elif request.method == 'POST':
form = ACRelationForm(request.POST, prefix='acrelation')
if form.is_valid():
form.save()
target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations'
if search_key and current_index:
target += '&search=%s¤t=%s' % (search_key, current_index)
return HttpResponseRedirect(target)
context.update({
'form': form,
})
template = 'curation/authority_acrelation_changeview.html'
return render(request, template, context)
@user_passes_test(lambda u: u.is_superuser or u.is_staff)
@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))
def create_aarelation_for_authority(request, authority_id):
authority = get_object_or_404(Authority, pk=authority_id)
search_key = request.GET.get('search', request.POST.get('search'))
current_index = request.GET.get('current', request.POST.get('current'))
context = {
'curation_section': 'datasets',
'curation_subsection': 'authorities',
'instance': authority,
'search_key': search_key,
'current_index': current_index
}
if request.method == 'GET':
initial = {
'subject': authority.id
}
aarelation=AARelation()
aarelation.subject = authority
type_controlled = request.GET.get('type_controlled', None)
if type_controlled:
aarelation = dict(AARelation.TYPE_CHOICES)[type_controlled]
form = AARelationForm(prefix='aarelation', instance=aarelation)
elif request.method == 'POST':
form = AARelationForm(request.POST, prefix='aarelation')
if form.is_valid():
form.save()
target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations'
if search_key and current_index:
target += '&search=%s¤t=%s' % (search_key, current_index)
return HttpResponseRedirect(target)
context.update({
'form': form,
})
template = 'curation/authority_aarelation_changeview.html'
return render(request, template, context)
@user_passes_test(lambda u: u.is_superuser or u.is_staff)
@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))
def acrelation_for_authority(request, authority_id, acrelation_id):
authority = get_object_or_404(Authority, pk=authority_id)
acrelation = get_object_or_404(ACRelation, pk=acrelation_id)
search_key = request.GET.get('search', request.POST.get('search'))
current_index = request.GET.get('current', request.POST.get('current'))
context = {
'curation_section': 'datasets',
'curation_subsection': 'authorities',
'instance': authority,
'acrelation': acrelation,
'search_key': search_key,
'current_index': current_index
}
if request.method == 'GET':
form = ACRelationForm(instance=acrelation, prefix='acrelation')
elif request.method == 'POST':
form = ACRelationForm(request.POST, instance=acrelation, prefix='acrelation')
if form.is_valid():
form.save()
target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=acrelations'
if search_key and current_index:
target += '&search=%s¤t=%s' % (search_key, current_index)
return HttpResponseRedirect(target)
context.update({
'form': form,
})
template = 'curation/authority_acrelation_changeview.html'
return render(request, template, context)
@user_passes_test(lambda u: u.is_superuser or u.is_staff)
@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))
def aarelation_for_authority(request, authority_id, aarelation_id):
authority = get_object_or_404(Authority, pk=authority_id)
aarelation = get_object_or_404(AARelation, pk=aarelation_id)
search_key = request.GET.get('search', request.POST.get('search'))
current_index = request.GET.get('current', request.POST.get('current'))
context = {
'curation_section': 'datasets',
'curation_subsection': 'authorities',
'instance': authority,
'aarelation': aarelation,
'search_key': search_key,
'current_index': current_index
}
if request.method == 'GET':
form = AARelationForm(instance=aarelation, prefix='aarelation')
elif request.method == 'POST':
form = AARelationForm(request.POST, instance=aarelation, prefix='aarelation')
if form.is_valid():
form.save()
target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations'
if search_key and current_index:
target += '&search=%s¤t=%s' % (search_key, current_index)
return HttpResponseRedirect(target)
context.update({
'form': form,
})
template = 'curation/authority_aarelation_changeview.html'
return render(request, template, context)
@user_passes_test(lambda u: u.is_superuser or u.is_staff)
@check_rules('can_access_view_edit', fn=objectgetter(Authority, 'authority_id'))
def delete_aarelation_for_authority(request, authority_id, aarelation_id, format=None):
authority = get_object_or_404(Authority, pk=authority_id)
aarelation = get_object_or_404(AARelation, pk=aarelation_id)
search_key = request.GET.get('search', request.POST.get('search'))
current_index = request.GET.get('current', request.POST.get('current'))
context = {
'curation_section': 'datasets',
'curation_subsection': 'authorities',
'instance': authority,
'aarelation': aarelation,
'search_key': search_key,
'current_index': current_index
}
if request.POST.get('confirm', False) == 'true':
if not aarelation.modified_on:
aarelation.modified_on = datetime.datetime.now()
aarelation.delete()
if format == 'json':
return JsonResponse({'result': True})
target = reverse('curation:curate_authority', args=(authority.id,)) + '?tab=aarelations'
if search_key and current_index:
target += '&search=%s¤t=%s' % (search_key, current_index)
return HttpResponseRedirect(target)
if format == 'json':
return JsonResponse({'result': False})
template = 'curation/authority_aarelation_delete.html'
return render(request, template, context)
| 39.810427
| 133
| 0.68619
| 942
| 8,400
| 5.880042
| 0.138004
| 0.040621
| 0.028164
| 0.037913
| 0.753746
| 0.753746
| 0.723055
| 0.723055
| 0.686586
| 0.664921
| 0
| 0.004604
| 0.198452
| 8,400
| 210
| 134
| 40
| 0.81806
| 0.005714
| 0
| 0.700565
| 0
| 0
| 0.182014
| 0.057358
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028249
| false
| 0.033898
| 0.096045
| 0
| 0.19209
| 0.00565
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
bc92d9002e07294919b14cfdd4a1703514d8c845
| 53
|
py
|
Python
|
server/api/src/db/migrate/versions/v_2.py
|
mminamina/311-data
|
9a3e4dc6e14c7500fc3f75f583c7fc4b01108b29
|
[
"MIT"
] | null | null | null |
server/api/src/db/migrate/versions/v_2.py
|
mminamina/311-data
|
9a3e4dc6e14c7500fc3f75f583c7fc4b01108b29
|
[
"MIT"
] | null | null | null |
server/api/src/db/migrate/versions/v_2.py
|
mminamina/311-data
|
9a3e4dc6e14c7500fc3f75f583c7fc4b01108b29
|
[
"MIT"
] | null | null | null |
def migrate():
print('migrating to version 2')
| 10.6
| 35
| 0.641509
| 7
| 53
| 4.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.226415
| 53
| 4
| 36
| 13.25
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
bcb5f8a3494a7c1dd73bdaa2595e97b680531db5
| 256
|
py
|
Python
|
Notebooks/SentinelUtilities/SentinelAnomalyLookup/__init__.py
|
ytognder/Azure-Sentinel
|
7345560f178e731d7ba5a5541fd3383bca285311
|
[
"MIT"
] | 266
|
2019-10-18T00:41:39.000Z
|
2022-03-18T05:44:01.000Z
|
Notebooks/SentinelUtilities/SentinelAnomalyLookup/__init__.py
|
ytognder/Azure-Sentinel
|
7345560f178e731d7ba5a5541fd3383bca285311
|
[
"MIT"
] | 113
|
2020-03-10T16:56:10.000Z
|
2022-03-28T21:54:26.000Z
|
Notebooks/SentinelUtilities/SentinelAnomalyLookup/__init__.py
|
ytognder/Azure-Sentinel
|
7345560f178e731d7ba5a5541fd3383bca285311
|
[
"MIT"
] | 93
|
2020-01-07T20:28:43.000Z
|
2022-03-23T04:09:39.000Z
|
# pylint: disable-msg=C0103
"""
SentinelAnomalyLookup: This package is developed for Azure Sentinel Anomaly lookup
"""
# __init__.py
from .anomaly_lookup_view_helper import AnomalyLookupViewHelper
from .anomaly_finder import AnomalyQueries, AnomalyFinder
| 28.444444
| 82
| 0.832031
| 29
| 256
| 7.068966
| 0.827586
| 0.126829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017467
| 0.105469
| 256
| 8
| 83
| 32
| 0.877729
| 0.472656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bcfa0b019701139c1bd20ee0f0d8361e7deda90e
| 90
|
py
|
Python
|
Ad-Hoc/2454.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Ad-Hoc/2454.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Ad-Hoc/2454.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
P, R = input().split()
if P == '0': print('C')
elif R == '0': print('B')
else: print('A')
| 18
| 25
| 0.488889
| 17
| 90
| 2.588235
| 0.705882
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.177778
| 90
| 5
| 26
| 18
| 0.567568
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4c16fb50407c0d81665fb35d2265d078805475a6
| 6,185
|
py
|
Python
|
tensorflow/contrib/metrics/__init__.py
|
DEVESHTARASIA/tensorflow
|
d3edb8c60ed4fd831d62833ed22f5c23486c561c
|
[
"Apache-2.0"
] | 384
|
2017-02-21T18:38:04.000Z
|
2022-02-22T07:30:25.000Z
|
tensorflow/contrib/metrics/__init__.py
|
ChenAugustus/tensorflow
|
5828e285209ff8c3d1bef2e4bd7c55ca611080d5
|
[
"Apache-2.0"
] | 15
|
2017-03-01T20:18:43.000Z
|
2020-05-07T10:33:51.000Z
|
udacity-car/lib/python2.7/site-packages/tensorflow/contrib/metrics/__init__.py
|
808brick/CarND-Capstone
|
f9e536b4a9d96322d7e971073602c8969dbd9369
|
[
"MIT"
] | 81
|
2017-02-21T19:31:19.000Z
|
2022-02-22T07:30:24.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for evaluation metrics and summary statistics.
See the @{$python/contrib.metrics} guide.
@@streaming_accuracy
@@streaming_mean
@@streaming_recall
@@streaming_recall_at_thresholds
@@streaming_precision
@@streaming_precision_at_thresholds
@@streaming_auc
@@streaming_curve_points
@@streaming_recall_at_k
@@streaming_mean_absolute_error
@@streaming_mean_iou
@@streaming_mean_relative_error
@@streaming_mean_squared_error
@@streaming_mean_tensor
@@streaming_root_mean_squared_error
@@streaming_covariance
@@streaming_pearson_correlation
@@streaming_mean_cosine_distance
@@streaming_percentage_less
@@streaming_sensitivity_at_specificity
@@streaming_sparse_average_precision_at_k
@@streaming_sparse_average_precision_at_top_k
@@streaming_sparse_precision_at_k
@@streaming_sparse_precision_at_top_k
@@streaming_sparse_recall_at_k
@@streaming_specificity_at_sensitivity
@@streaming_concat
@@streaming_false_negatives
@@streaming_false_negatives_at_thresholds
@@streaming_false_positives
@@streaming_false_positives_at_thresholds
@@streaming_true_negatives
@@streaming_true_negatives_at_thresholds
@@streaming_true_positives
@@streaming_true_positives_at_thresholds
@@auc_using_histogram
@@accuracy
@@aggregate_metrics
@@aggregate_metric_map
@@confusion_matrix
@@set_difference
@@set_intersection
@@set_size
@@set_union
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,g-importing-member,wildcard-import
from tensorflow.contrib.metrics.python.metrics import *
# pylint: enable=wildcard-import
from tensorflow.contrib.metrics.python.ops.confusion_matrix_ops import confusion_matrix
from tensorflow.contrib.metrics.python.ops.histogram_ops import auc_using_histogram
from tensorflow.contrib.metrics.python.ops.metric_ops import aggregate_metric_map
from tensorflow.contrib.metrics.python.ops.metric_ops import aggregate_metrics
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_accuracy
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_auc
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_concat
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_covariance
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_curve_points
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_false_negatives
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_false_negatives_at_thresholds
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_false_positives
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_false_positives_at_thresholds
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean_absolute_error
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean_cosine_distance
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean_iou
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean_relative_error
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean_squared_error
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_mean_tensor
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_pearson_correlation
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_percentage_less
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_precision
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_precision_at_thresholds
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_recall
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_recall_at_k
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_recall_at_thresholds
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_root_mean_squared_error
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_sensitivity_at_specificity
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_sparse_average_precision_at_k
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_sparse_average_precision_at_top_k
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_sparse_precision_at_k
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_sparse_precision_at_top_k
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_sparse_recall_at_k
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_specificity_at_sensitivity
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_true_negatives
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_true_negatives_at_thresholds
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_true_positives
from tensorflow.contrib.metrics.python.ops.metric_ops import streaming_true_positives_at_thresholds
from tensorflow.contrib.metrics.python.ops.set_ops import set_difference
from tensorflow.contrib.metrics.python.ops.set_ops import set_intersection
from tensorflow.contrib.metrics.python.ops.set_ops import set_size
from tensorflow.contrib.metrics.python.ops.set_ops import set_union
# pylint: enable=unused-import,line-too-long
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
| 51.97479
| 104
| 0.864834
| 850
| 6,185
| 5.967059
| 0.165882
| 0.124211
| 0.182177
| 0.242902
| 0.662855
| 0.615931
| 0.589511
| 0.563289
| 0.563289
| 0.560923
| 0
| 0.001375
| 0.059499
| 6,185
| 118
| 105
| 52.415254
| 0.870552
| 0.344705
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.979592
| 0
| 0.979592
| 0.020408
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4c30fdedde14a46b90015527caf9d689634cdfab
| 6,504
|
py
|
Python
|
apps/proportions.py
|
harmkenn/PST_Deploy_Test
|
2484acf13f1f998c98fa94fad98c1f75c27d292b
|
[
"MIT"
] | null | null | null |
apps/proportions.py
|
harmkenn/PST_Deploy_Test
|
2484acf13f1f998c98fa94fad98c1f75c27d292b
|
[
"MIT"
] | null | null | null |
apps/proportions.py
|
harmkenn/PST_Deploy_Test
|
2484acf13f1f998c98fa94fad98c1f75c27d292b
|
[
"MIT"
] | null | null | null |
import streamlit as st
import math
from scipy.stats import *
import pandas as pd
import numpy as np
from plotnine import *
def app():
# title of the app
st.subheader("Proportions")
st.sidebar.subheader("Proportion Settings")
prop_choice = st.sidebar.radio("",["One Proportion","Two Proportions"])
if prop_choice == "One Proportion":
c1,c2,c3 = st.columns(3)
with c1:
x = int(st.text_input("Hits",20))
n = int(st.text_input("Tries",25))
with c2:
nullp = float(st.text_input("Null:",.7))
alpha = float(st.text_input("Alpha",.05))
with c3:
st.markdown("Pick a test:")
tail_choice = st.radio("",["Left Tail","Two Tails","Right Tail"])
one = st.columns(1)
with one[0]:
p_hat = x/n
tsd = math.sqrt(nullp*(1-nullp)/n)
cise = math.sqrt(p_hat*(1-p_hat)/n)
z = (p_hat - nullp)/tsd
x = np.arange(-4,4,.1)
y = norm.pdf(x)
ndf = pd.DataFrame({"x":x,"y":y})
normp = ggplot(ndf) + coord_fixed(ratio = 4)
if tail_choice == "Left Tail":
pv = norm.cdf(z)
cz = norm.ppf(alpha)
rcz = cz
cl = 1 - 2*alpha
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (-4,z))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (-4,cz))
if tail_choice == "Two Tails":
pv = 2*(1-norm.cdf(abs(z)))
cz = abs(norm.ppf(alpha/2))
rcz = "±" + str(abs(norm.ppf(alpha/2)))
cl = 1 - alpha
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (-4,-1*abs(z)))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (abs(z),4))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (-4,-1*abs(cz)))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (abs(cz),4))
if tail_choice == "Right Tail":
pv = 1 - norm.cdf(z)
cz = -1 * norm.ppf(alpha)
rcz = cz
cl = 1 - 2*alpha
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (z,4))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (cz,4))
me = cz * cise
rme = "±" + str(abs(me))
data = pd.DataFrame({"p-Hat":p_hat,"z-Score":z,"p-Value":pv,"CV":rcz,"Test SD":tsd,"C-Level":cl,"CI SE":cise,"ME":rme},index = [0])
st.write(data)
normp = normp + geom_segment(aes(x = z, y = 0, xend = z, yend = norm.pdf(z)),color="red")
normp = normp + geom_line(aes(x=x,y=y))
st.pyplot(ggplot.draw(normp))
lower = p_hat - abs(me)
upper = p_hat + abs(me)
st.write(str(100*cl) + "'%' confidence interval is (" + str(lower) +", "+str(upper)+")")
if prop_choice == "Two Proportions":
c1,c2,c3 = st.columns(3)
with c1:
x1 = int(st.text_input("Hits 1",20))
n1 = int(st.text_input("Tries 1",25))
with c2:
x2 = int(st.text_input("Hits 2",30))
n2 = int(st.text_input("Tries 2",50))
with c3:
alpha = float(st.text_input("Alpha",.05))
st.markdown("Pick a test:")
tail_choice = st.radio("",["Left Tail","Two Tails","Right Tail"])
one = st.columns(1)
with one[0]:
p_hat1 = x1/n1
q_hat1 = 1 -p_hat1
p_hat2 = x2/n2
q_hat2 = 1 - p_hat2
pp_hat = (x1+x2)/(n1+n2)
dp_hat = p_hat1 - p_hat2
pq_hat = 1-pp_hat
tsd = math.sqrt(pp_hat*pq_hat*(1/n1+1/n2))
cise = math.sqrt(p_hat1*q_hat1/n1+p_hat2*q_hat2/n2)
z = (p_hat1 - p_hat2)/tsd
x = np.arange(-4,4,.1)
y = norm.pdf(x)
ndf = pd.DataFrame({"x":x,"y":y})
normp = ggplot(ndf) + coord_fixed(ratio = 4)
if tail_choice == "Left Tail":
pv = norm.cdf(z)
cz = norm.ppf(alpha)
rcz = cz
cl = 1 - 2*alpha
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (-4,z))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (-4,cz))
if tail_choice == "Two Tails":
pv = 2*(1-norm.cdf(abs(z)))
cz = abs(norm.ppf(alpha/2))
rcz = "±" + str(abs(norm.ppf(alpha/2)))
cl = 1 - alpha
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (-4,-1*abs(z)))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (abs(z),4))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (-4,-1*abs(cz)))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (abs(cz),4))
if tail_choice == "Right Tail":
pv = 1 - norm.cdf(z)
cz = -1 * norm.ppf(alpha)
rcz = cz
cl = 1 - 2*alpha
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "steelblue", xlim = (z,4))
normp = normp + stat_function(fun = norm.pdf, geom = "area",fill = "orange", xlim = (cz,4))
me = cz * cise
rme = "±" + str(abs(me))
data = pd.DataFrame({"p-Hat 1":p_hat1,"p-Hat 2":p_hat2,"Pooled p-Hat":pp_hat,"Diff p-Hat":dp_hat,"z-Score":z,"p-Value":pv,"CV":rcz,"Test SD":tsd,"C-Level":cl,"CI SE":cise,"ME":rme},index = [0])
st.write(data)
normp = normp + geom_segment(aes(x = z, y = 0, xend = z, yend = norm.pdf(z)),color="red")
normp = normp + geom_line(aes(x=x,y=y))
st.pyplot(ggplot.draw(normp))
lower = dp_hat - abs(me)
upper = dp_hat + abs(me)
st.write(str(100*cl) + "'%' confidence interval is (" + str(lower) +", "+str(upper)+")")
| 48.177778
| 207
| 0.482934
| 901
| 6,504
| 3.407325
| 0.154273
| 0.045603
| 0.072964
| 0.114658
| 0.791857
| 0.7557
| 0.7557
| 0.737459
| 0.723127
| 0.723127
| 0
| 0.033034
| 0.348401
| 6,504
| 134
| 208
| 48.537313
| 0.69042
| 0.00246
| 0
| 0.688
| 0
| 0
| 0.100062
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008
| false
| 0
| 0.048
| 0
| 0.056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4c3c325909dda45d25ada2b46ed9a46e19b99dfc
| 4,154
|
py
|
Python
|
temporal_transforms.py
|
LijiangLong/3D-ResNets-PyTorch
|
89d2cba0b52d55aaa834635a81c172bc38771cd3
|
[
"MIT"
] | null | null | null |
temporal_transforms.py
|
LijiangLong/3D-ResNets-PyTorch
|
89d2cba0b52d55aaa834635a81c172bc38771cd3
|
[
"MIT"
] | null | null | null |
temporal_transforms.py
|
LijiangLong/3D-ResNets-PyTorch
|
89d2cba0b52d55aaa834635a81c172bc38771cd3
|
[
"MIT"
] | null | null | null |
import random
import math
class LoopPadding(object):
def __init__(self, size):
self.size = size
def __call__(self, frame_indices):
out = frame_indices
for index in out:
if len(out) >= self.size:
break
out.append(index)
return out
class TemporalBeginCrop(object):
"""Temporally crop the given frame indices at a beginning.
If the number of frames is less than the size,
loop the indices as many times as necessary to satisfy the size.
Args:
size (int): Desired output size of the crop.
"""
def __init__(self, size):
self.size = size
def __call__(self, frame_indices):
out = frame_indices[:self.size]
for index in out:
if len(out) >= self.size:
break
out.append(index)
return out
class TemporalCenterCrop(object):
"""Temporally crop the given frame indices at a center.
If the number of frames is less than the size,
loop the indices as many times as necessary to satisfy the size.
Args:
size (int): Desired output size of the crop.
"""
def __init__(self, size):
self.size = size
def __call__(self, frame_indices):
"""
Args:
frame_indices (list): frame indices to be cropped.
Returns:
list: Cropped frame indices.
"""
center_index = len(frame_indices) // 2
begin_index = max(0, center_index - (self.size // 2))
end_index = min(begin_index + self.size, len(frame_indices))
out = frame_indices[begin_index:end_index]
for index in out:
if len(out) >= self.size:
break
out.append(index)
return out
class TemporalRandomCrop(object):
"""Temporally crop the given frame indices at a random location.
If the number of frames is less than the size,
loop the indices as many times as necessary to satisfy the size.
Args:
size (int): Desired output size of the crop.
"""
def __init__(self, size):
self.size = size
def __call__(self, frame_indices):
"""
Args:
frame_indices (list): frame indices to be cropped.
Returns:
list: Cropped frame indices.
"""
rand_end = max(0, len(frame_indices) - self.size - 1)
begin_index = random.randint(0, rand_end)
end_index = min(begin_index + self.size, len(frame_indices))
out = frame_indices[begin_index:end_index]
for index in out:
if len(out) >= self.size:
break
out.append(index)
return out
class TemporalCenterCropFlexible(object):
def __init__(self, begin=15, step=3, end=108):
self.begin = begin
self.step = step
self.end = end
assert (end - begin) / step + 1 == 32
def __call__(self, frame_indices):
out = frame_indices[slice(self.begin, self.end+1, self.step)]
return out
class TemporalCenterRandomCrop(object):
"""Temporally crop the given frame indices at a random location.
If the number of frames is less than the size,
loop the indices as many times as necessary to satisfy the size.
Args:
size (int): Desired output size of the crop.
"""
def __init__(self, size):
self.size = size
def __call__(self, frame_indices):
"""
Args:
frame_indices (list): frame indices to be cropped.
Returns:
list: Cropped frame indices.
"""
spacing = int((len(frame_indices) - self.size)/2) # i.e. if 120 and 90: = 30
offset = random.randint(-1*int(spacing/2) + 1, int(spacing/2) - 1) # i.e if 120 and 90, -14 to 14
begin_index = int(len(frame_indices)/2) - int(self.size/2) + offset # i.e. 120: 60 - 45 + offset (-1 to 29)
end_index = begin_index + self.size
out = frame_indices[begin_index:end_index]
for index in out:
if len(out) >= self.size:
break
out.append(index)
return out
| 26.974026
| 115
| 0.590515
| 547
| 4,154
| 4.323583
| 0.155393
| 0.157294
| 0.027907
| 0.040592
| 0.771247
| 0.741226
| 0.731078
| 0.731078
| 0.715011
| 0.678647
| 0
| 0.017687
| 0.319451
| 4,154
| 154
| 116
| 26.974026
| 0.818889
| 0.314877
| 0
| 0.661972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 1
| 0.169014
| false
| 0
| 0.028169
| 0
| 0.366197
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5b2899060598acf5361fb2c9db968e61435c9da
| 2,181
|
py
|
Python
|
env/lib/python3.6/site-packages/odf/meta.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
env/lib/python3.6/site-packages/odf/meta.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
env/lib/python3.6/site-packages/odf/meta.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from odf.namespaces import METANS
from odf.element import Element
# Autogenerated
def AutoReload(**args):
return Element(qname = (METANS,'auto-reload'), **args)
def CreationDate(**args):
return Element(qname = (METANS,'creation-date'), **args)
def DateString(**args):
return Element(qname = (METANS,'date-string'), **args)
def DocumentStatistic(**args):
return Element(qname = (METANS,'document-statistic'), **args)
def EditingCycles(**args):
return Element(qname = (METANS,'editing-cycles'), **args)
def EditingDuration(**args):
return Element(qname = (METANS,'editing-duration'), **args)
def Generator(**args):
return Element(qname = (METANS,'generator'), **args)
def HyperlinkBehaviour(**args):
return Element(qname = (METANS,'hyperlink-behaviour'), **args)
def InitialCreator(**args):
return Element(qname = (METANS,'initial-creator'), **args)
def Keyword(**args):
return Element(qname = (METANS,'keyword'), **args)
def PrintDate(**args):
return Element(qname = (METANS,'print-date'), **args)
def PrintedBy(**args):
return Element(qname = (METANS,'printed-by'), **args)
def Template(**args):
args.setdefault('type', 'simple')
return Element(qname = (METANS,'template'), **args)
def UserDefined(**args):
return Element(qname = (METANS,'user-defined'), **args)
| 32.073529
| 80
| 0.707474
| 285
| 2,181
| 5.414035
| 0.477193
| 0.117952
| 0.163318
| 0.217758
| 0.309786
| 0.110175
| 0.04407
| 0
| 0
| 0
| 0
| 0.011931
| 0.154516
| 2,181
| 67
| 81
| 32.552239
| 0.824837
| 0.374599
| 0
| 0
| 0
| 0
| 0.136364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.451613
| true
| 0
| 0.064516
| 0.419355
| 0.967742
| 0.064516
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d5b8367e1c83c38e170646eb1abb34d55d607542
| 240
|
py
|
Python
|
invert-binary-tree/invert-binary-tree.py
|
Atri10/Leet-code---Atri_Patel
|
49fc59b9147a44ab04a66128fbb2ef259b5f7b7c
|
[
"MIT"
] | 1
|
2021-10-10T20:21:18.000Z
|
2021-10-10T20:21:18.000Z
|
invert-binary-tree/invert-binary-tree.py
|
Atri10/Leet-code---Atri_Patel
|
49fc59b9147a44ab04a66128fbb2ef259b5f7b7c
|
[
"MIT"
] | null | null | null |
invert-binary-tree/invert-binary-tree.py
|
Atri10/Leet-code---Atri_Patel
|
49fc59b9147a44ab04a66128fbb2ef259b5f7b7c
|
[
"MIT"
] | null | null | null |
class Solution:
def invertTree(self, root: Optional[TreeNode]) -> Optional[TreeNode]:
if root:
root.left,root.right = self.invertTree(root.right),self.invertTree(root.left)
return root
return None
| 40
| 89
| 0.641667
| 28
| 240
| 5.5
| 0.464286
| 0.207792
| 0.168831
| 0.298701
| 0.324675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 240
| 6
| 90
| 40
| 0.855556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
d5c051b72ce68a91896ab21b2fd4b6e93e7e9a10
| 174
|
py
|
Python
|
SG_GetDataForClassifier.py
|
shubha1593/MovieReviewAnalysis
|
c485eea0c8b35e554027cce7a431212b406e672c
|
[
"MIT"
] | 7
|
2015-04-01T12:41:55.000Z
|
2019-08-01T18:13:56.000Z
|
SG_GetDataForClassifier.py
|
shubha1593/MovieReviewAnalysis
|
c485eea0c8b35e554027cce7a431212b406e672c
|
[
"MIT"
] | null | null | null |
SG_GetDataForClassifier.py
|
shubha1593/MovieReviewAnalysis
|
c485eea0c8b35e554027cce7a431212b406e672c
|
[
"MIT"
] | null | null | null |
from SG_GetFeatureMatrix import *
from SG_VectorY import *
featureMatrix = featureMatrixFromReviews()
Y = getYVector()
def getDataForClassifier() :
return featureMatrix, Y
| 21.75
| 42
| 0.804598
| 17
| 174
| 8.117647
| 0.705882
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126437
| 174
| 8
| 43
| 21.75
| 0.907895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
d5d747b80a8ea5e6c6c092c35a44d7f1c0635eb8
| 117
|
py
|
Python
|
music_api/apps/music_app/admin.py
|
fejiroofficial/Simple_music
|
2dd9dcf8e5c7374e29dcf96987c053eebf1cba2a
|
[
"MIT"
] | null | null | null |
music_api/apps/music_app/admin.py
|
fejiroofficial/Simple_music
|
2dd9dcf8e5c7374e29dcf96987c053eebf1cba2a
|
[
"MIT"
] | 8
|
2019-12-04T23:40:12.000Z
|
2022-02-10T07:58:28.000Z
|
music_api/apps/music_app/admin.py
|
fejiroofficial/simple_music
|
2dd9dcf8e5c7374e29dcf96987c053eebf1cba2a
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Songs
admin.site.register(Songs)
# Register your models here.
| 16.714286
| 32
| 0.794872
| 17
| 117
| 5.470588
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136752
| 117
| 6
| 33
| 19.5
| 0.920792
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d5ecb68fc8ba51b00e1a946759c8f1a77d41211f
| 1,635
|
py
|
Python
|
RunIt/airt/poker_cards.py
|
antx-code/funcode
|
a8a9b99274e169562771b488a3a9551277ef4b99
|
[
"MIT"
] | 3
|
2021-09-27T08:07:07.000Z
|
2022-03-11T04:46:30.000Z
|
RunIt/airt/poker_cards.py
|
antx-code/funcode
|
a8a9b99274e169562771b488a3a9551277ef4b99
|
[
"MIT"
] | null | null | null |
RunIt/airt/poker_cards.py
|
antx-code/funcode
|
a8a9b99274e169562771b488a3a9551277ef4b99
|
[
"MIT"
] | null | null | null |
# Square 方片 => sq => RGB蓝色(Blue)
# Plum 梅花 => pl => RGB绿色(Green)
# Spade 黑桃 => sp => RGB黑色(Black)
# Heart 红桃 => he => RGB红色(Red)
init_poker = {
'local': {
'head': [-1, -1, -1],
'mid': [-1, -1, -1, -1, -1],
'tail': [-1, -1, -1, -1, -1],
'drop': [-1, -1, -1, -1],
'hand': [-1, -1, -1]
},
'player1': {
'head': [-1, -1, -1],
'mid': [-1, -1, -1, -1, -1],
'tail': [-1, -1, -1, -1, -1],
'drop': [-1, -1, -1, -1],
'hand': [-1, -1, -1]
},
'player2': {
'head': [-1, -1, -1],
'mid': [-1, -1, -1, -1, -1],
'tail': [-1, -1, -1, -1, -1],
'drop': [-1, -1, -1, -1],
'hand': [-1, -1, -1]
}
}
# Square
Blue = {
'2': 0,
'3': 1,
'4': 2,
'5': 3,
'6': 4,
'7': 5,
'8': 6,
'9': 7,
'10': 8,
'J': 9,
'Q': 10,
'K': 11,
'A': 12
}
# Plum
Green = {
'2': 13,
'3': 14,
'4': 15,
'5': 16,
'6': 17,
'7': 18,
'8': 19,
'9': 20,
'10': 21,
'J': 22,
'Q': 23,
'K': 24,
'A': 25
}
# Heart
Red = {
'2': 26,
'3': 27,
'4': 28,
'5': 29,
'6': 30,
'7': 31,
'8': 32,
'9': 33,
'10': 34,
'J': 35,
'Q': 36,
'K': 37,
'A': 38
}
# Spade
Black = {
'2': 39,
'3': 40,
'4': 41,
'5': 42,
'6': 43,
'7': 44,
'8': 45,
'9': 46,
'10': 47,
'J': 48,
'Q': 49,
'K': 50,
'A': 51
}
POKER_SCOPE = [
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'10',
'J',
'Q',
'K',
'A'
]
| 14.469027
| 40
| 0.263609
| 227
| 1,635
| 1.889868
| 0.38326
| 0.20979
| 0.20979
| 0.13986
| 0.272727
| 0.272727
| 0.272727
| 0.272727
| 0.272727
| 0.272727
| 0
| 0.22103
| 0.429969
| 1,635
| 112
| 41
| 14.598214
| 0.23927
| 0.107645
| 0
| 0.153061
| 0
| 0
| 0.10069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5fc2fcc2b0439d566be57074eaeae0f3e82e072
| 129
|
py
|
Python
|
deepa2/preptrain/__init__.py
|
debatelab/deepa2
|
1a9e8c357d7e3924808c703ec9f4a6611a4b5f93
|
[
"Apache-2.0"
] | null | null | null |
deepa2/preptrain/__init__.py
|
debatelab/deepa2
|
1a9e8c357d7e3924808c703ec9f4a6611a4b5f93
|
[
"Apache-2.0"
] | null | null | null |
deepa2/preptrain/__init__.py
|
debatelab/deepa2
|
1a9e8c357d7e3924808c703ec9f4a6611a4b5f93
|
[
"Apache-2.0"
] | null | null | null |
"""Preprocessing DeepA2 datasets for LM training"""
# flake8: noqa
from deepa2.preptrain.t2tpreprocessor import T2TPreprocessor
| 25.8
| 60
| 0.813953
| 14
| 129
| 7.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.108527
| 129
| 4
| 61
| 32.25
| 0.869565
| 0.457364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
910239e4d64bcd7a23fd58a2e98cbfc09b91c703
| 65
|
py
|
Python
|
IsraeliQueue/__init__.py
|
YonLiud/Israeli-Queue
|
53e14e68701c06efdd23ba6584a2e8a561e60cd9
|
[
"MIT"
] | 2
|
2021-06-20T23:47:58.000Z
|
2021-06-28T19:15:41.000Z
|
IsraeliQueue/__init__.py
|
YonLiud/Israeli-Queue
|
53e14e68701c06efdd23ba6584a2e8a561e60cd9
|
[
"MIT"
] | null | null | null |
IsraeliQueue/__init__.py
|
YonLiud/Israeli-Queue
|
53e14e68701c06efdd23ba6584a2e8a561e60cd9
|
[
"MIT"
] | null | null | null |
from .IsraeliQueue import IsraeliQueue, Item, IsraeliQueueByType
| 32.5
| 64
| 0.861538
| 6
| 65
| 9.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 65
| 1
| 65
| 65
| 0.949153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
91095212fe94005bb0badaf0b1144da0c2a0e7f0
| 300
|
py
|
Python
|
freehackquest_libclient_py/__init__.py
|
freehackquest/libfhqcli-py
|
382242943047b63861aad0f41bb89c82e755963c
|
[
"Apache-2.0"
] | null | null | null |
freehackquest_libclient_py/__init__.py
|
freehackquest/libfhqcli-py
|
382242943047b63861aad0f41bb89c82e755963c
|
[
"Apache-2.0"
] | null | null | null |
freehackquest_libclient_py/__init__.py
|
freehackquest/libfhqcli-py
|
382242943047b63861aad0f41bb89c82e755963c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 FreeHackQuest Team <freehackquest@gmail.com>
"""This file was automatically generated by fhq-server
Version: v0.2.47
Date: 2022-01-01 07:15:35
"""
from freehackquest_libclient_py.freehackquest_client import FreeHackQuestClient
| 33.333333
| 79
| 0.77
| 43
| 300
| 5.302326
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104089
| 0.103333
| 300
| 8
| 80
| 37.5
| 0.743494
| 0.69
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
91439c7735cd8dec720dbcbb904a5ff89db7c69f
| 17,382
|
py
|
Python
|
PySS/fem.py
|
manpan-1/PySS
|
1e4b13de3b2aed13ecf9818f9084a2fedb295cf1
|
[
"MIT"
] | 2
|
2018-12-03T13:53:00.000Z
|
2019-10-20T14:30:57.000Z
|
PySS/fem.py
|
manpan-1/PySS
|
1e4b13de3b2aed13ecf9818f9084a2fedb295cf1
|
[
"MIT"
] | null | null | null |
PySS/fem.py
|
manpan-1/PySS
|
1e4b13de3b2aed13ecf9818f9084a2fedb295cf1
|
[
"MIT"
] | 1
|
2018-03-23T19:58:21.000Z
|
2018-03-23T19:58:21.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import pickle
# import csv
# from collections import namedtuple
# from mpl_toolkits.mplot3d import Axes3D
# import matplotlib.animation as animation
# import matplotlib.colors as mc
class FEModel:
def __init__(self, name=None, hist_data=None):
self.name = name
self.hist_outs = hist_data
def tuple2dict(self, data):
"""
Used to convert the load-displacement data exported from models to a dictionary
"""
ld_data = []
for specimen in data:
sp_dict = dict()
load = []
disp = []
for action in specimen[0]:
load.append(action[1])
for action in specimen[1]:
disp.append(action[1])
sp_dict["Load"] = np.array(load)
sp_dict["Disp"] = -1 * np.array(disp)
ld_data.append(sp_dict)
def plot_history(self, x_axis, y_axis):
"""
XXXXXXXXXXXXXXXXXXXXXXXXXX
"""
plt.figure()
plt.plot(self.hist_outs[x_axis], self.hist_outs[y_axis])
@classmethod
def from_hist_pkl(cls, filename):
"""
Creates an object and imports history output data.
"""
with open(filename, "rb") as fh:
history_data = pickle.load(fh)
return cls(name=filename, hist_data=history_data)
#
# class ParametricDB:
# def __init__(self, dimensions, responses):
# self.responses = responses
# self.dimensions = dimensions
#
# @classmethod
# def from_file(cls, filename):
# """
# Create from file.
#
# The file should be comma separated, first row titles, subsequent rows only numbers.
#
# Parameters
# ----------
# filename : str
# Relative path/filename.
#
# Return
# ------
# ParametricDB
#
# """
# # with open(filename, 'rU') as infile:
# # reader = csv.reader(infile)
# # n_dim = int(next(reader)[0].split()[0])
# # db = {c[0]: c[1:] for c in zip(*reader)}
#
# with open(filename, 'rU') as infile:
# reader = csv.reader(infile, delimiter=";")
# n_dim = int(next(reader)[0].split()[0])
# db = [c for c in zip(*reader)]
#
# all_responses = {i[0]: i[1:] for i in db[n_dim:]}
#
# dim_ticks = np.array([i[1:] for i in db[:n_dim]]).T
# dim_lengths = [len(set(dim_ticks[:, i])) for i in range(n_dim)]
# dim_names = [db[i][0] for i in range(n_dim)]
#
# # with open(filename, 'r') as infile:
# # all_lines = [[c.split(sep=":")[0]] + c.split(sep=":")[1].split(sep=",") for c in infile]
# # db = {c[0]: c[1:] for c in zip(*all_lines)}
#
# # for key in db.keys():
# # if len(key.split(",")) > 1:
# # n_dim = len(key.split(","))
# # dim_str = key
# # dim_ticks = np.array([c.split(sep=",") for c in db[dim_str]])
# # dim_lengths = [len(set(dim_ticks[:, i])) for i in range(n_dim)]
# # dim_names = dim_str.split(sep=",")
# full_list = {i[0]: i[1:][0] for i in zip(dim_names, dim_ticks.T)}
#
# # del db[dim_str]
#
# #df = pd.DataFrame(full_dict)
#
# Address = namedtuple("map", " ".join(dim_names))
# args = [tuple(sorted(set(dim_ticks[:, i]))) for i, j in enumerate(dim_names)]
# addressbook = Address(*args)
#
# mtx = {i: np.empty(dim_lengths) for i in all_responses.keys()}
# for response in all_responses.keys():
# for i, response_value in enumerate(all_responses[response]):
# current_idx = tuple(addressbook[idx].index(full_list[name][i]) for idx, name in enumerate(dim_names))
# mtx[response][current_idx] = response_value
# mtx[response].flags.writeable = False
#
# return cls(addressbook, mtx)
#
# def get_slice(self, slice_at, response):
# """
# Get a slice of the database.
#
# Parameters
# ----------
# slice_at : dict of int
# A dictionary of the keys to be sliced at the assigned values.
# response : str
# The name of the requested response to be sliced.
#
# """
#
# idx_arr = [0]*len(self.dimensions)
#
# for key in self.dimensions._fields:
# if key not in slice_at.keys():
# idx_arr[self.get_idx(key)] = slice(None, None)
# for name, value in zip(slice_at.keys(), slice_at.values()):
# idx_arr[self.get_idx(name)] = value
#
# return self.responses[response][idx_arr]
#
# def get_idx(self, attrname):
# """
# Get the index number of a parameter (dimension) in the database.
#
# Parameters
# ----------
# attrname : str
#
# """
# return(self.dimensions.index(self.dimensions.__getattribute__(attrname)))
#
# def contour_2d(self, slice_at, response, transpose=False, fig=None, sbplt=None):
# """
# Contour plot.
# :param slice_at:
# :return:
# """
# plt.rc('text', usetex=True)
# if fig is None:
# fig = plt.figure()
# if sbplt is None:
# ax = fig.add_subplot(111)
# else:
# ax = fig.add_subplot(sbplt)
# else:
# if sbplt is None:
# ax = fig.add_subplot(111)
# else:
# ax = fig.add_subplot(sbplt)
#
# axes = [key for key in self.dimensions._fields if key not in slice_at.keys()]
#
# if transpose:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[1])], self.dimensions[self.get_idx(axes[0])])
# Z = self.get_slice(slice_at, response).T
# x_label, y_label = axes[1], axes[0]
# else:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[0])], self.dimensions[self.get_idx(axes[1])])
# Z = self.get_slice(slice_at, response)
# x_label, y_label = axes[0], axes[1]
#
# ttl_values = [self.dimensions[self.get_idx(i)][slice_at[i]] for i in slice_at.keys()]
#
# # levels = np.arange(0, 2., 0.025)
# # sbplt = ax.contour(X.astype(np.float), Y.astype(np.float), Z.T, vmin=0.4, vmax=1., levels=levels, cmap=plt.cm.inferno)
# sbplt = ax.contour(X.astype(np.float), Y.astype(np.float), Z.T, cmap=plt.cm.gray_r)
# sbplt2 = ax.contourf(X.astype(np.float), Y.astype(np.float), Z.T, cmap=plt.cm.inferno)
# plt.clabel(sbplt, inline=1, fontsize=10)
# ttl = [i for i in zip(slice_at.keys(), ttl_values)]
# ttl = ", ".join(["=".join(i) for i in ttl])
# ax.set_title("$" + response + "$" + " for : " + "$" + ttl + "$")
# ax.set_xlabel("$"+x_label+"$")
# ax.set_ylabel("$"+y_label+"$")
#
# return fig
#
# def surf_3d(self, slice_at, response, transpose=False, fig=None, sbplt=None):
# """
# Surface plot.
# :param slice_at:
# :return:
# """
# #Convenient window dimensions
# # one subplot:
# # 2 side by side: Bbox(x0=0.0, y0=0.0, x1=6.79, y1=2.57)
# # azim elev = -160 30
# # 3 subplots side by side
# # 4 subplots: Bbox(x0=0.0, y0=0.0, x1=6.43, y1=5.14)
# #azim elev -160 30
# plt.rc('text', usetex=True)
# if fig is None:
# fig = plt.figure()
# if sbplt is None:
# ax = fig.add_subplot(111, projection='3d')
# else:
# ax = fig.add_subplot(sbplt, projection='3d')
# else:
# if sbplt is None:
# ax = fig.add_subplot(111, projection='3d')
# else:
# ax = fig.add_subplot(sbplt, projection='3d')
#
#
# axes = [key for key in self.dimensions._fields if key not in slice_at.keys()]
#
# if transpose:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[1])], self.dimensions[self.get_idx(axes[0])])
# Z = self.get_slice(slice_at, response).T
# x_label, y_label = axes[1], axes[0]
# else:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[0])], self.dimensions[self.get_idx(axes[1])])
# Z = self.get_slice(slice_at, response)
# x_label, y_label = axes[0], axes[1]
#
# ttl_values = [self.dimensions[self.get_idx(i)][slice_at[i]] for i in slice_at.keys()]
#
# sbplt = ax.plot_surface(X.astype(np.float), Y.astype(np.float), Z.T, cmap=plt.cm.inferno)
# # plt.clabel(sbplt, inline=1, fontsize=10)
# ttl = [i for i in zip(slice_at.keys(), ttl_values)]
# ttl = ", ".join(["=".join(i) for i in ttl])
# ax.set_title("$" + response + "$" + " for : " + "$" + ttl + "$")
# ax.set_xlabel("$"+x_label+"$")
# ax.set_ylabel("$"+y_label+"$")
#
# return fig
#
# def match_viewports(fig=None):
# if fig is None:
# fig = plt.gcf()
# fig.axes[1].view_init(azim=fig.axes[0].azim, elev=fig.axes[0].elev)
def main():
lambda01 = ParametricDB.from_file("data/fem/fem-results_lambda01.dat")
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 355 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 355 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 355 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 700 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 700 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 700 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 2])
lambda02 = ParametricDB.from_file("data/fem/fem-results-lambda02.dat")
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 355 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 355 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 355 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 700 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 700 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 700 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 2])
return
| 47.884298
| 130
| 0.56921
| 2,713
| 17,382
| 3.468485
| 0.095466
| 0.071413
| 0.10712
| 0.130074
| 0.738363
| 0.718916
| 0.713815
| 0.705951
| 0.702976
| 0.696918
| 0
| 0.059203
| 0.233287
| 17,382
| 362
| 131
| 48.016575
| 0.646882
| 0.441721
| 0
| 0.09375
| 0
| 0
| 0.286353
| 0.007021
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039063
| false
| 0
| 0.023438
| 0
| 0.085938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e6ee19c46029883010bf024e3e8dd551854a83e8
| 80
|
py
|
Python
|
LINETOKEN/__init__.py
|
pratannaimjoi/tokenIpad
|
f03969c05427bc1804d05c42823a28725c7e38a0
|
[
"Apache-2.0"
] | null | null | null |
LINETOKEN/__init__.py
|
pratannaimjoi/tokenIpad
|
f03969c05427bc1804d05c42823a28725c7e38a0
|
[
"Apache-2.0"
] | null | null | null |
LINETOKEN/__init__.py
|
pratannaimjoi/tokenIpad
|
f03969c05427bc1804d05c42823a28725c7e38a0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from .LineApi import LINE
from .lib.Gen.ttypes import *
| 20
| 29
| 0.6625
| 12
| 80
| 4.416667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.1625
| 80
| 3
| 30
| 26.666667
| 0.776119
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e6f6e592f45ce51ed72972736b1981a35d6ad662
| 81
|
py
|
Python
|
pynn/__init__.py
|
jkae/knn-exercise
|
ae569e3f6a0e23669369d99e032270e72f8fbb66
|
[
"MIT"
] | null | null | null |
pynn/__init__.py
|
jkae/knn-exercise
|
ae569e3f6a0e23669369d99e032270e72f8fbb66
|
[
"MIT"
] | null | null | null |
pynn/__init__.py
|
jkae/knn-exercise
|
ae569e3f6a0e23669369d99e032270e72f8fbb66
|
[
"MIT"
] | null | null | null |
from .nearest_neighbor_index import NearestNeighborIndex
from .kd_tree import *
| 20.25
| 56
| 0.851852
| 10
| 81
| 6.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 81
| 3
| 57
| 27
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fc0c40028b9c4945addfec469dd5871c8f82e05b
| 52
|
py
|
Python
|
gemucator/__init__.py
|
philipwfowler/genucator
|
d43a79afe1aa81ca24d7ab4370ed230e08aa89bf
|
[
"MIT"
] | null | null | null |
gemucator/__init__.py
|
philipwfowler/genucator
|
d43a79afe1aa81ca24d7ab4370ed230e08aa89bf
|
[
"MIT"
] | null | null | null |
gemucator/__init__.py
|
philipwfowler/genucator
|
d43a79afe1aa81ca24d7ab4370ed230e08aa89bf
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
from .core import gemucator
| 13
| 27
| 0.730769
| 8
| 52
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 3
| 28
| 17.333333
| 0.863636
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fc1d95b3a3f568e9cf0561a8f283914e5b1db140
| 1,815
|
py
|
Python
|
skopt/tests/test_transformers.py
|
sqbl/scikit-optimize
|
c1866d5a9ad67efe93ac99736bfc2dc659b561d4
|
[
"BSD-3-Clause"
] | null | null | null |
skopt/tests/test_transformers.py
|
sqbl/scikit-optimize
|
c1866d5a9ad67efe93ac99736bfc2dc659b561d4
|
[
"BSD-3-Clause"
] | null | null | null |
skopt/tests/test_transformers.py
|
sqbl/scikit-optimize
|
c1866d5a9ad67efe93ac99736bfc2dc659b561d4
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
import numbers
import numpy as np
from numpy.testing import assert_raises
from numpy.testing import assert_array_equal
from numpy.testing import assert_equal
from numpy.testing import assert_raises_regex
from skopt.space import LogN, Normalize
@pytest.mark.fast_test
def test_logn2_integer():
transformer = LogN(2)
for X in range(2, 31):
X_orig = transformer.inverse_transform(transformer.transform(X))
assert_array_equal(int(np.round(X_orig)), X)
@pytest.mark.fast_test
def test_logn10_integer():
transformer = LogN(2)
for X in range(2, 31):
X_orig = transformer.inverse_transform(transformer.transform(X))
assert_array_equal(int(np.round(X_orig)), X)
@pytest.mark.fast_test
def test_normalize_integer():
transformer = Normalize(1, 20, is_int=True)
assert transformer.transform(19.8) == 1.0
assert transformer.transform(20.2) == 1.0
assert transformer.transform(1.2) == 0.0
assert transformer.transform(0.9) == 0.0
assert_raises(ValueError, transformer.transform, 20.6)
assert_raises(ValueError, transformer.transform, 0.4)
assert transformer.inverse_transform(0.99) == 20
assert transformer.inverse_transform(0.01) == 1
assert_raises(ValueError, transformer.inverse_transform, 1. + 1e-8)
assert_raises(ValueError, transformer.transform, 0. - 1e-8)
@pytest.mark.fast_test
def test_normalize():
transformer = Normalize(1, 20, is_int=False)
assert transformer.transform(20.) == 1.0
assert transformer.transform(1.) == 0.0
assert_raises(ValueError, transformer.transform, 20. + 1e-7)
assert_raises(ValueError, transformer.transform, 1.0 - 1e-7)
assert_raises(ValueError, transformer.inverse_transform, 1. + 1e-8)
assert_raises(ValueError, transformer.transform, 0. - 1e-8)
| 34.245283
| 72
| 0.738292
| 256
| 1,815
| 5.074219
| 0.203125
| 0.21555
| 0.135489
| 0.203233
| 0.836798
| 0.734411
| 0.482679
| 0.449577
| 0.378753
| 0.378753
| 0
| 0.049351
| 0.151515
| 1,815
| 52
| 73
| 34.903846
| 0.794156
| 0
| 0
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.52381
| 1
| 0.095238
| false
| 0
| 0.190476
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc3539d71d659a16209a54fcd5f9758f5e36c76b
| 3,993
|
py
|
Python
|
tests/test_server.py
|
m-bo-one/ethereumd-proxy
|
1d1eb3905dac4b28a8e23c283214859a13f6e020
|
[
"MIT"
] | 21
|
2017-07-24T15:45:03.000Z
|
2019-09-21T16:18:48.000Z
|
tests/test_server.py
|
m-bo-one/ethereumd-proxy
|
1d1eb3905dac4b28a8e23c283214859a13f6e020
|
[
"MIT"
] | 11
|
2017-07-24T20:14:16.000Z
|
2019-02-10T22:52:32.000Z
|
tests/test_server.py
|
DeV1doR/ethereumd-proxy
|
1d1eb3905dac4b28a8e23c283214859a13f6e020
|
[
"MIT"
] | 8
|
2018-02-17T13:33:15.000Z
|
2020-08-16T05:21:34.000Z
|
from collections import namedtuple
import json
from asynctest.mock import patch
import pytest
from ethereumd.server import RPCServer
from ethereumd.proxy import EthereumProxy
from aioethereum.errors import BadResponseError
from .base import BaseTestRunner
Request = namedtuple('Request', ['json'])
class TestServer(BaseTestRunner):
run_with_node = True
async def init_server(self, loop):
server = RPCServer()
with patch('ethereumd.poller.Poller.poll'):
await server.before_server_start()(None, loop)
return server
@pytest.mark.asyncio
async def test_server_handler_index_success_call(self, event_loop):
server = await self.init_server(event_loop)
data = {
'jsonrpc': '2.0',
'method': 'getblockcount',
'params': [],
'id': 'test',
}
request = Request(json=data)
response = await server.handler_index(request)
parsed = json.loads(response.body)
assert parsed['error'] is None
assert isinstance(parsed['result'], int)
@pytest.mark.asyncio
async def test_server_handler_index_invalid_rpc_data(self, event_loop):
server = await self.init_server(event_loop)
data = {
'jsonrpc': '2.0',
'method': 'getblockcount',
'id': 'test',
}
request = Request(json=data)
response = await server.handler_index(request)
parsed = json.loads(response.body)
assert parsed['error']['code'] == -32602
assert parsed['error']['message'] == 'Invalid rpc 2.0 structure'
assert parsed['result'] is None
@pytest.mark.asyncio
async def test_server_handler_index_attr_error_call(self, event_loop):
server = await self.init_server(event_loop)
data = {
'jsonrpc': '2.0',
'method': 'getblockcount',
'params': [],
'id': 'test',
}
request = Request(json=data)
def _raise_error():
raise AttributeError('bla bla method not found')
with patch.object(EthereumProxy, 'getblockcount',
side_effect=_raise_error):
response = await server.handler_index(request)
parsed = json.loads(response.body)
assert parsed['error']['code'] == -32601
assert parsed['error']['message'] == 'Method not found'
assert parsed['result'] is None
@pytest.mark.asyncio
async def test_server_handler_index_type_error_call(self, event_loop):
server = await self.init_server(event_loop)
data = {
'jsonrpc': '2.0',
'method': 'getblockcount',
'params': [],
'id': 'test',
}
request = Request(json=data)
def _raise_error():
raise TypeError('test')
with patch.object(EthereumProxy, 'getblockcount',
side_effect=_raise_error):
response = await server.handler_index(request)
parsed = json.loads(response.body)
assert parsed['error']['code'] == -1
assert parsed['error']['message'] == 'test'
assert parsed['result'] is None
@pytest.mark.asyncio
async def test_server_handler_index_bad_response_call(self, event_loop):
server = await self.init_server(event_loop)
data = {
'jsonrpc': '2.0',
'method': 'getblockcount',
'params': [],
'id': 'test',
}
request = Request(json=data)
def _raise_error():
raise BadResponseError('test', code=-99999999)
with patch.object(EthereumProxy, 'getblockcount',
side_effect=_raise_error):
response = await server.handler_index(request)
parsed = json.loads(response.body)
assert parsed['error']['code'] == -99999999
assert parsed['error']['message'] == 'test'
assert parsed['result'] is None
| 33.554622
| 76
| 0.596043
| 421
| 3,993
| 5.489311
| 0.2019
| 0.067503
| 0.077888
| 0.047598
| 0.707486
| 0.707486
| 0.707486
| 0.707486
| 0.707486
| 0.666811
| 0
| 0.013708
| 0.287503
| 3,993
| 118
| 77
| 33.838983
| 0.798594
| 0
| 0
| 0.633663
| 0
| 0
| 0.119459
| 0.007012
| 0
| 0
| 0
| 0
| 0.138614
| 1
| 0.029703
| false
| 0
| 0.079208
| 0
| 0.138614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc4539e7bc135f9ebeba5ee7c487446b450f5f15
| 35
|
py
|
Python
|
Python/Tests/TestData/ProjectHomeProjects/Subfolder/ProgramB.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 404
|
2019-05-07T02:21:57.000Z
|
2022-03-31T17:03:04.000Z
|
Python/Tests/TestData/ProjectHomeProjects/Subfolder/ProgramB.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 1,672
|
2019-05-06T21:09:38.000Z
|
2022-03-31T23:16:04.000Z
|
Python/Tests/TestData/ProjectHomeProjects/Subfolder/ProgramB.py
|
techkey/PTVS
|
8355e67eedd8e915ca49bd38a2f36172696fd903
|
[
"Apache-2.0"
] | 186
|
2019-05-13T03:17:37.000Z
|
2022-03-31T16:24:05.000Z
|
# ProgramB.py
print('Hello World')
| 11.666667
| 20
| 0.714286
| 5
| 35
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 2
| 21
| 17.5
| 0.806452
| 0.314286
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
fc69e4e9cacf2317d6b062809fbe0cb9a22ea2b1
| 72
|
py
|
Python
|
hydrobox/discharge/__init__.py
|
VForWaTer/hydrobox
|
ae7d10bf5aa48bf7daf3d1094e6bb66f0a7ce96b
|
[
"MIT"
] | 4
|
2020-10-08T15:31:36.000Z
|
2021-06-25T00:46:40.000Z
|
hydrobox/discharge/__init__.py
|
joergmeyer-kit/hydrobox
|
af75a5ba87147e00656435c170535c69fc3298a8
|
[
"MIT"
] | 5
|
2020-05-12T08:45:18.000Z
|
2021-05-20T07:18:47.000Z
|
hydrobox/discharge/__init__.py
|
joergmeyer-kit/hydrobox
|
af75a5ba87147e00656435c170535c69fc3298a8
|
[
"MIT"
] | 3
|
2020-07-27T07:16:14.000Z
|
2021-04-28T21:57:48.000Z
|
from .catchment import regime, flow_duration_curve
from . import indices
| 36
| 50
| 0.847222
| 10
| 72
| 5.9
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 72
| 2
| 51
| 36
| 0.921875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fc7738cdaacc95969a1834885a266a49c73d4c6b
| 12,361
|
py
|
Python
|
coffeine/pipelines.py
|
dengemann/meegpowreg
|
e9cc8f2372f8b8ef4b372bfea113ed0b9646cb39
|
[
"MIT"
] | 6
|
2021-07-19T12:17:59.000Z
|
2021-08-09T15:50:18.000Z
|
coffeine/pipelines.py
|
dengemann/meegpowreg
|
e9cc8f2372f8b8ef4b372bfea113ed0b9646cb39
|
[
"MIT"
] | 23
|
2021-04-16T21:41:36.000Z
|
2021-07-13T10:08:47.000Z
|
coffeine/pipelines.py
|
dengemann/meegpowreg
|
e9cc8f2372f8b8ef4b372bfea113ed0b9646cb39
|
[
"MIT"
] | 5
|
2021-04-15T15:28:51.000Z
|
2021-06-28T21:17:11.000Z
|
import numpy as np
from coffeine.covariance_transformers import (
Diag,
LogDiag,
ExpandFeatures,
Riemann,
RiemannSnp,
NaiveVec)
from coffeine.spatial_filters import (
ProjIdentitySpace,
ProjCommonSpace,
ProjLWSpace,
ProjRandomSpace,
ProjSPoCSpace)
from sklearn.compose import make_column_transformer
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import RidgeCV, LogisticRegression
def make_filter_bank_transformer(names, method='riemann',
projection_params=None,
vectorization_params=None,
categorical_interaction=None):
"""Generate pipeline for filterbank models.
Prepare filter bank models as used in [1]_. These models take as input
sensor-space covariance matrices computed from M/EEG signals in different
frequency bands. Then transformations are applied to improve the
applicability of linear regression techniques by reducing the impact of
field spread.
In terms of implementation, this involves 1) projection
(e.g. spatial filters) and 2) vectorization (e.g. taking the log on the
diagonal).
.. note::
The resulting model expects as inputs data frames in which different
covarances (e.g. for different frequencies) are stored inside columns
indexed by ``names``.
Other columns will be passed through by the underlying column
transformers.
The pipeline also supports fitting categorical interaction effects
after projection and vectorization steps are performed.
.. note::
All essential methods from [1]_ are implemented here. In practice,
we recommend comparing `riemann', `spoc' and `diag' as a baseline.
Parameters
----------
names : list of str
The column names of the data frame corresponding to different
covariances.
method : str
The method used for extracting features from covariances. Defaults
to ``'riemann'``. Can be ``'riemann'``, ``'lw_riemann'``, ``'diag'``,
``'log_diag'``, ``'random'``, ``'naive'``, ``'spoc'``,
``'riemann_wasserstein'``.
projection_params : dict | None
The parameters for the projection step.
vectorization_params : dict | None
The parameters for the vectorization step.
categorical_interaction : str
The column in the input data frame containing a binary descriptor
used to fit 2-way interaction effects.
References
----------
[1] D. Sabbagh, P. Ablin, G. Varoquaux, A. Gramfort, and D.A. Engemann.
Predictive regression modeling with MEG/EEG: from source power
to signals and cognitive states.
*NeuroImage*, page 116893,2020. ISSN 1053-8119.
https://doi.org/10.1016/j.neuroimage.2020.116893
"""
# put defaults here for projection and vectorization step
projection_defaults = {
'riemann': dict(scale=1, n_compo='full', reg=1.e-05),
'lw_riemann': dict(shrink=1),
'diag': dict(),
'log_diag': dict(),
'random': dict(n_compo='full'),
'naive': dict(),
'spoc': dict(n_compo='full', scale='auto', reg=1.e-05, shrink=1),
'riemann_wasserstein': dict()
}
vectorization_defaults = {
'riemann': dict(metric='riemann'),
'lw_riemann': dict(metric='riemann'),
'diag': dict(),
'log_diag': dict(),
'random': dict(),
'naive': dict(method='upper'),
'spoc': dict(),
'riemann_wasserstein': dict(rank='full')
}
assert set(projection_defaults) == set(vectorization_defaults)
if method not in projection_defaults:
raise ValueError(
f"The `method` ('{method}') you specified is unknown.")
# update defaults
projection_params_ = projection_defaults[method]
if projection_params is not None:
projection_params_.update(**projection_params)
vectorization_params_ = vectorization_defaults[method]
if vectorization_params is not None:
vectorization_params_.update(**vectorization_params)
def _get_projector_vectorizer(projection, vectorization):
return [(make_pipeline(*
[projection(**projection_params_),
vectorization(**vectorization_params_)]),
name) for name in names]
# setup pipelines (projection + vectorization step)
steps = tuple()
if method == 'riemann':
steps = (ProjCommonSpace, Riemann)
elif method == 'lw_riemann':
steps = (ProjLWSpace, Riemann)
elif method == 'diag':
steps = (ProjIdentitySpace, Diag)
elif method == 'log_diag':
steps = (ProjIdentitySpace, LogDiag)
elif method == 'random':
steps = (ProjRandomSpace, LogDiag)
elif method == 'naive':
steps = (ProjIdentitySpace, NaiveVec)
elif method == 'spoc':
steps = (ProjSPoCSpace, LogDiag)
elif method == 'riemann_wasserstein':
steps = (ProjIdentitySpace, RiemannSnp)
filter_bank_transformer = make_column_transformer(
*_get_projector_vectorizer(*steps), remainder='passthrough')
if categorical_interaction is not None:
filter_bank_transformer = ExpandFeatures(
filter_bank_transformer, expander_column=categorical_interaction)
return filter_bank_transformer
def make_filter_bank_regressor(names, method='riemann',
projection_params=None,
vectorization_params=None,
categorical_interaction=None, scaling=None,
estimator=None):
"""Generate pipeline for regression with filter bank model.
Prepare filter bank models as used in [1]_. These models take as input
sensor-space covariance matrices computed from M/EEG signals in different
frequency bands. Then transformations are applied to improve the
applicability of linear regression techniques by reducing the impact of
field spread.
In terms of implementation, this involves 1) projection
(e.g. spatial filters) and 2) vectorization (e.g. taking the log on the
diagonal).
.. note::
The resulting model expects as inputs data frames in which different
covarances (e.g. for different frequencies) are stored inside columns
indexed by ``names``.
Other columns will be passed through by the underlying column
transformers.
The pipeline also supports fitting categorical interaction effects
after projection and vectorization steps are performed.
.. note::
All essential methods from [1]_ are implemented here. In practice,
we recommend comparing `riemann', `spoc' and `diag' as a baseline.
Parameters
----------
names : list of str
The column names of the data frame corresponding to different
covariances.
method : str
The method used for extracting features from covariances. Defaults
to ``'riemann'``. Can be ``'riemann'``, ``'lw_riemann'``, ``'diag'``,
``'log_diag'``, ``'random'``, ``'naive'``, ``'spoc'``,
``'riemann_wasserstein'``.
projection_params : dict | None
The parameters for the projection step.
vectorization_params : dict | None
The parameters for the vectorization step.
categorical_interaction : str
The column in the input data frame containing a binary descriptor
used to fit 2-way interaction effects.
scaling : scikit-learn Transformer object | None
Method for re-rescaling the features. Defaults to None. If None,
StandardScaler is used.
estimator : scikit-learn Estimator object.
The estimator object. Defaults to None. If None, RidgeCV
is performed with default values.
References
----------
[1] D. Sabbagh, P. Ablin, G. Varoquaux, A. Gramfort, and D.A. Engemann.
Predictive regression modeling with MEG/EEG: from source power
to signals and cognitive states.
*NeuroImage*, page 116893,2020. ISSN 1053-8119.
https://doi.org/10.1016/j.neuroimage.2020.116893
"""
filter_bank_transformer = make_filter_bank_transformer(
names=names, method=method, projection_params=projection_params,
vectorization_params=vectorization_params,
categorical_interaction=categorical_interaction
)
scaling_ = scaling
if scaling_ is None:
scaling_ = StandardScaler()
estimator_ = estimator
if estimator_ is None:
estimator_ = RidgeCV(alphas=np.logspace(-3, 5, 100))
filter_bank_regressor = make_pipeline(
filter_bank_transformer,
scaling_,
estimator_
)
return filter_bank_regressor
def make_filter_bank_classifier(names, method='riemann',
projection_params=None,
vectorization_params=None,
categorical_interaction=None, scaling=None,
estimator=None):
"""Generate pipeline for classification with filter bank model.
Prepare filter bank models as used in [1]_. These models take as input
sensor-space covariance matrices computed from M/EEG signals in different
frequency bands. Then transformations are applied to improve the
applicability of linear regression techniques by reducing the impact of
field spread.
In terms of implementation, this involves 1) projection
(e.g. spatial filters) and 2) vectorization (e.g. taking the log on the
diagonal).
.. note::
The resulting model expects as inputs data frames in which different
covarances (e.g. for different frequencies) are stored inside columns
indexed by ``names``.
Other columns will be passed through by the underlying column
transformers.
The pipeline also supports fitting categorical interaction effects
after projection and vectorization steps are performed.
.. note::
All essential methods from [1]_ are implemented here. In practice,
we recommend comparing `riemann', `spoc' and `diag' as a baseline.
Parameters
----------
names : list of str
The column names of the data frame corresponding to different
covariances.
method : str
The method used for extracting features from covariances. Defaults
to ``'riemann'``. Can be ``'riemann'``, ``'lw_riemann'``, ``'diag'``,
``'log_diag'``, ``'random'``, ``'naive'``, ``'spoc'``,
``'riemann_wasserstein'``.
projection_params : dict | None
The parameters for the projection step.
vectorization_params : dict | None
The parameters for the vectorization step.
categorical_interaction : str
The column in the input data frame containing a binary descriptor
used to fit 2-way interaction effects.
scaling : scikit-learn Transformer object | None
Method for re-rescaling the features. Defaults to None. If None,
StandardScaler is used.
estimator : scikit-learn Estimator object.
The estimator object. Defaults to None. If None, LogisticRegression
is performed with default values.
References
----------
[1] D. Sabbagh, P. Ablin, G. Varoquaux, A. Gramfort, and D.A. Engemann.
Predictive regression modeling with MEG/EEG: from source power
to signals and cognitive states.
*NeuroImage*, page 116893,2020. ISSN 1053-8119.
https://doi.org/10.1016/j.neuroimage.2020.116893
"""
filter_bank_transformer = make_filter_bank_transformer(
names=names, method=method, projection_params=projection_params,
vectorization_params=vectorization_params,
categorical_interaction=categorical_interaction
)
scaling_ = scaling
if scaling_ is None:
scaling_ = StandardScaler()
estimator_ = estimator
if estimator_ is None:
estimator_ = LogisticRegression(solver='liblinear')
filter_bank_regressor = make_pipeline(
filter_bank_transformer,
scaling_,
estimator_
)
return filter_bank_regressor
| 37.685976
| 77
| 0.659574
| 1,374
| 12,361
| 5.820961
| 0.171761
| 0.027507
| 0.028882
| 0.012753
| 0.745561
| 0.736434
| 0.736434
| 0.729182
| 0.729182
| 0.729182
| 0
| 0.014614
| 0.258232
| 12,361
| 327
| 78
| 37.801223
| 0.857673
| 0.548338
| 0
| 0.33871
| 0
| 0
| 0.064218
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 1
| 0.032258
| false
| 0.008065
| 0.056452
| 0.008065
| 0.120968
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc91ef07b59bde91306bd73bcec484e360b1298a
| 108
|
py
|
Python
|
wouso/core/security/admin.py
|
AlexandruGhergut/wouso
|
f26244ff58ae626808ae8c58ccc93d21f9f2666f
|
[
"Apache-2.0"
] | 117
|
2015-01-02T18:07:33.000Z
|
2021-01-06T22:36:25.000Z
|
wouso/core/security/admin.py
|
AlexandruGhergut/wouso
|
f26244ff58ae626808ae8c58ccc93d21f9f2666f
|
[
"Apache-2.0"
] | 229
|
2015-01-12T07:07:58.000Z
|
2019-10-12T08:27:01.000Z
|
wouso/core/security/admin.py
|
AlexandruGhergut/wouso
|
f26244ff58ae626808ae8c58ccc93d21f9f2666f
|
[
"Apache-2.0"
] | 96
|
2015-01-07T05:26:09.000Z
|
2020-06-25T07:28:51.000Z
|
from django.contrib import admin
from wouso.core.security.models import Report
admin.site.register(Report)
| 21.6
| 45
| 0.833333
| 16
| 108
| 5.625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092593
| 108
| 4
| 46
| 27
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5d75f5d30780e5997d5df3ca87b964d9add7b705
| 47
|
py
|
Python
|
blog/migrations/__init__.py
|
Amohammadi2/django-SPA-blog
|
5dc10894ba360569b4849cfda0c3340ea5a15fb8
|
[
"MIT"
] | 2
|
2020-12-14T08:46:35.000Z
|
2021-06-03T17:26:45.000Z
|
blog/migrations/__init__.py
|
Amohammadi2/django-SPA-blog
|
5dc10894ba360569b4849cfda0c3340ea5a15fb8
|
[
"MIT"
] | null | null | null |
blog/migrations/__init__.py
|
Amohammadi2/django-SPA-blog
|
5dc10894ba360569b4849cfda0c3340ea5a15fb8
|
[
"MIT"
] | null | null | null |
# you just need to add some informations here
| 23.5
| 46
| 0.765957
| 8
| 47
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.212766
| 47
| 1
| 47
| 47
| 0.972973
| 0.914894
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5d8e72c2a2b92c4afc6d55b1c762592baf4c02a2
| 147
|
py
|
Python
|
talleres_inov_docente/figures/plot_helpers.py
|
jfcaballero/Tutorial-sobre-scikit-learn-abreviado
|
1e2aa1f9132c277162135a5463068801edab8d15
|
[
"CC0-1.0"
] | 576
|
2016-03-20T10:05:58.000Z
|
2022-03-20T05:58:32.000Z
|
talleres_inov_docente/figures/plot_helpers.py
|
jfcaballero/Tutorial-sobre-scikit-learn-abreviado
|
1e2aa1f9132c277162135a5463068801edab8d15
|
[
"CC0-1.0"
] | 64
|
2016-03-20T08:56:49.000Z
|
2019-03-13T15:37:55.000Z
|
talleres_inov_docente/figures/plot_helpers.py
|
jfcaballero/Tutorial-sobre-scikit-learn-abreviado
|
1e2aa1f9132c277162135a5463068801edab8d15
|
[
"CC0-1.0"
] | 570
|
2016-03-20T19:23:07.000Z
|
2021-12-12T12:22:14.000Z
|
from matplotlib.colors import ListedColormap
cm3 = ListedColormap(['#0000aa', '#ff2020', '#50ff50'])
cm2 = ListedColormap(['#0000aa', '#ff2020'])
| 29.4
| 55
| 0.707483
| 14
| 147
| 7.428571
| 0.714286
| 0.384615
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165414
| 0.095238
| 147
| 4
| 56
| 36.75
| 0.616541
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5da47e4e4410b3e8309f308ed349c9a9599c9032
| 2,225
|
py
|
Python
|
dymos/utils/test/test_hermite.py
|
kaushikponnapalli/dymos
|
3fba91d0fc2c0e8460717b1bec80774676287739
|
[
"Apache-2.0"
] | 104
|
2018-09-08T16:52:27.000Z
|
2022-03-10T23:35:30.000Z
|
dymos/utils/test/test_hermite.py
|
kaushikponnapalli/dymos
|
3fba91d0fc2c0e8460717b1bec80774676287739
|
[
"Apache-2.0"
] | 628
|
2018-06-27T20:32:59.000Z
|
2022-03-31T19:24:32.000Z
|
dymos/utils/test/test_hermite.py
|
kaushikponnapalli/dymos
|
3fba91d0fc2c0e8460717b1bec80774676287739
|
[
"Apache-2.0"
] | 46
|
2018-06-27T20:54:07.000Z
|
2021-12-19T07:23:32.000Z
|
import unittest
import numpy as np
from numpy.testing import assert_almost_equal
from dymos.utils.hermite import hermite_matrices
class TestHermiteMatrices(unittest.TestCase):
def test_quadratic(self):
# Interpolate with values and rates provided at [-1, 1] in tau space
tau_given = [-1.0, 1.0]
tau_eval = np.linspace(-1, 1, 100)
# In time space use the boundaries [-2, 2]
dt_dtau = 4.0 / 2.0
# Provide values for y = t**2 and its time-derivative
y_given = [4.0, 4.0]
ydot_given = [-4.0, 4.0]
# Get the hermite matrices.
Ai, Bi, Ad, Bd = hermite_matrices(tau_given, tau_eval)
# Interpolate y and ydot at tau_eval points in tau space.
y_i = np.dot(Ai, y_given) + dt_dtau * np.dot(Bi, ydot_given)
ydot_i = (1.0 / dt_dtau) * np.dot(Ad, y_given) + np.dot(Bd, ydot_given)
# Compute our function as a point of comparison.
y_computed = (tau_eval * dt_dtau)**2
ydot_computed = 2.0 * (tau_eval * dt_dtau)
# Check results
assert_almost_equal(y_i, y_computed)
assert_almost_equal(ydot_i, ydot_computed)
def test_cubic(self):
# Interpolate with values and rates provided at [-1, 1] in tau space
tau_given = [-1.0, 0.0, 1.0]
tau_eval = np.linspace(-1, 1, 101)
# In time space use the boundaries [-2, 2]
dt_dtau = 4.0 / 2.0
# Provide values for y = t**2 and its time-derivative
y_given = [-8.0, 0.0, 8.0]
ydot_given = [12.0, 0.0, 12.0]
# Get the hermite matrices.
Ai, Bi, Ad, Bd = hermite_matrices(tau_given, tau_eval)
# Interpolate y and ydot at tau_eval points in tau space.
y_i = np.dot(Ai, y_given) + dt_dtau * np.dot(Bi, ydot_given)
ydot_i = (1.0 / dt_dtau) * np.dot(Ad, y_given) + np.dot(Bd, ydot_given)
# Compute our function as a point of comparison.
y_computed = (tau_eval * dt_dtau)**3
ydot_computed = 3.0 * (tau_eval * dt_dtau)**2
# Check results
assert_almost_equal(y_i, y_computed)
assert_almost_equal(ydot_i, ydot_computed)
if __name__ == '__main__': # pragma: no cover
unittest.main()
| 31.785714
| 79
| 0.613483
| 357
| 2,225
| 3.613445
| 0.226891
| 0.054264
| 0.065891
| 0.034109
| 0.788372
| 0.755814
| 0.755814
| 0.755814
| 0.755814
| 0.722481
| 0
| 0.044264
| 0.279101
| 2,225
| 69
| 80
| 32.246377
| 0.759975
| 0.279551
| 0
| 0.363636
| 0
| 0
| 0.005041
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 1
| 0.060606
| false
| 0
| 0.121212
| 0
| 0.212121
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5db48c51cdf7033a6dcea32b1d26408dd6d2dbc0
| 1,891
|
py
|
Python
|
avod/datasets/kitti/kitti_aug_test.py
|
Ascend-Huawei/AVOD
|
ea62372517bbfa9d4020bc5ab2739ee182c63c56
|
[
"BSD-2-Clause"
] | null | null | null |
avod/datasets/kitti/kitti_aug_test.py
|
Ascend-Huawei/AVOD
|
ea62372517bbfa9d4020bc5ab2739ee182c63c56
|
[
"BSD-2-Clause"
] | null | null | null |
avod/datasets/kitti/kitti_aug_test.py
|
Ascend-Huawei/AVOD
|
ea62372517bbfa9d4020bc5ab2739ee182c63c56
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from npu_bridge.npu_init import *
import unittest
import numpy as np
from avod.datasets.kitti import kitti_aug
class KittiAugTest(unittest.TestCase):
def test_flip_boxes_3d(self):
boxes_3d = np.array([
[1, 2, 3, 4, 5, 6, np.pi / 4],
[1, 2, 3, 4, 5, 6, -np.pi / 4]
])
exp_flipped_boxes_3d = np.array([
[-1, 2, 3, 4, 5, 6, 3 * np.pi / 4],
[-1, 2, 3, 4, 5, 6, -3 * np.pi / 4]
])
flipped_boxes_3d = kitti_aug.flip_boxes_3d(boxes_3d)
np.testing.assert_almost_equal(flipped_boxes_3d, exp_flipped_boxes_3d)
| 33.767857
| 78
| 0.670016
| 285
| 1,891
| 4.364912
| 0.368421
| 0.096463
| 0.041801
| 0.051447
| 0.728296
| 0.728296
| 0.728296
| 0.728296
| 0.728296
| 0.715434
| 0
| 0.035952
| 0.205711
| 1,891
| 55
| 79
| 34.381818
| 0.792277
| 0.647277
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.25
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5de81bead5f0058007dc4a5e3ad313c7ed6b6535
| 191
|
py
|
Python
|
01-basic-programs/04-lines.py
|
ncodeitgithub1/python-get-hands-dirty-programs
|
c9edb9e0bc9b2580737ca185935427343c550f01
|
[
"Apache-2.0"
] | null | null | null |
01-basic-programs/04-lines.py
|
ncodeitgithub1/python-get-hands-dirty-programs
|
c9edb9e0bc9b2580737ca185935427343c550f01
|
[
"Apache-2.0"
] | null | null | null |
01-basic-programs/04-lines.py
|
ncodeitgithub1/python-get-hands-dirty-programs
|
c9edb9e0bc9b2580737ca185935427343c550f01
|
[
"Apache-2.0"
] | 1
|
2021-07-19T13:20:34.000Z
|
2021-07-19T13:20:34.000Z
|
#4 lines: Fibonacci, tuple assignment
parents, babies = (1, 1)
while babies < 100:
print ('This generation has {0} babies'.format(babies))
parents, babies = (babies, parents + babies)
| 38.2
| 59
| 0.691099
| 25
| 191
| 5.28
| 0.64
| 0.295455
| 0.287879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044586
| 0.17801
| 191
| 5
| 60
| 38.2
| 0.796178
| 0.188482
| 0
| 0
| 0
| 0
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5df24f88464dca8942f1f032db545a5522ed1674
| 8,796
|
py
|
Python
|
pyabsa/utils/preprocess.py
|
jackie930/PyABSA
|
3cf733f8b95610a69c985b4650309c24f42b44b5
|
[
"MIT"
] | null | null | null |
pyabsa/utils/preprocess.py
|
jackie930/PyABSA
|
3cf733f8b95610a69c985b4650309c24f42b44b5
|
[
"MIT"
] | null | null | null |
pyabsa/utils/preprocess.py
|
jackie930/PyABSA
|
3cf733f8b95610a69c985b4650309c24f42b44b5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# file: preprocess.py
# author: jackie
# Copyright (C) 2021. All Rights Reserved.
import os
import pandas as pd
import argparse
import emoji
import re
from sklearn.model_selection import train_test_split
parser = argparse.ArgumentParser()
parser.add_argument("--inpath", type=str, required=True, default='./raw_data/data1.csv')
parser.add_argument("--folder_name", type=str, required=False, default='./custom')
parser.add_argument("--task", type=str, required=False, default='aptepc')
args = parser.parse_args()
def convert(text, labels):
# convert label to list
try:
labels = eval(labels)
tags = ['O'] * len(text)
sentiment = ['-999'] * len(text)
for j in range(len(labels)):
label = labels[j]
sentiment_key = labels[j][3]
if sentiment_key == '正':
sentiment_value = 'Positive'
elif sentiment_key == '负':
sentiment_value = 'Negative'
else:
sentiment_value = 'Others'
tags[label[4][0]] = 'B-ASP'
sentiment[label[4][0]] = sentiment_value
k = label[4][0] + 1
while k < label[4][1]:
tags[k] = 'I-ASP'
sentiment[k] = sentiment_value
k += 1
return text, tags, sentiment
except:
print ("labels", labels)
print ("text", text)
def convert_tag(text, labels):
# convert label to list
try:
labels = eval(labels)
tags = ['O'] * len(text)
sentiment = ['-999'] * len(text)
for j in range(len(labels)):
label = labels[j]
sentiment_key = labels[j][3]
if sentiment_key == '正':
sentiment_value = 'Positive'
elif sentiment_key == '负':
sentiment_value = 'Negative'
else:
sentiment_value = 'Others'
tags[label[4][0]] = 'B-'+label[1]
sentiment[label[4][0]] = sentiment_value
k = label[4][0] + 1
while k < label[4][1]:
tags[k] = 'I-'+label[1]
sentiment[k] = sentiment_value
k += 1
return text, tags, sentiment
except:
print ("labels", labels)
print ("text", text)
def convert_sentiment(sentiment_key):
if sentiment_key == '正':
sentiment_value = 'Positive'
else:
sentiment_value = 'Negative'
return sentiment_value
def convert_apc(text, label):
label_update = [(i[0], i[3], i[4]) for i in eval(label)]
label_update = list(set(label_update))
str1_list = []
str2_list = []
str3_list = []
for j in range(len(label_update)):
str1 = text[:label_update[j][2][0]] + '$T$ ' + text[label_update[j][2][1]:]
str1_list.append(str1)
str2_list.append(label_update[j][0])
str3_list.append(convert_sentiment(label_update[j][1]))
return str1_list, str2_list, str3_list
def filter_emoji(desstr, restr=''):
# 过滤表情
try:
co = re.compile(u'[\U00010000-\U0010ffff]')
except re.error:
co = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
return co.sub(restr, desstr)
def convert_to_atepc(inpath, dist_fname, flag):
# 写之前,先检验文件是否存在,存在就删掉
if os.path.exists(dist_fname):
os.remove(dist_fname)
f1 = open(dist_fname, 'w', encoding='utf8')
data = pd.read_csv(inpath)
data.columns = ['text', 'tag_sentiment_list']
# preprocess for emoji
data['text'] = data['text'].map(lambda x: filter_emoji(x, restr='xx'))
# 只保留review的长度小于600的
data = data[data['text'].str.len() <= 600]
# train test split
x_train, x_test = train_test_split(data, test_size=0.2, random_state=42)
if flag == 'train':
data_res = x_train.iloc[:, :].reset_index()
else:
data_res = x_test.iloc[:, :].reset_index()
# print (data_res.head())
for i in range(len(data_res)):
text, label = data_res['text'][i], data_res['tag_sentiment_list'][i]
text, tags, sentiment = convert(text, label)
for word, tag, sen in zip(text, tags, sentiment):
if word not in [',', '。', ' ', '\xa0', '\u2006', '\u3000', '\u2002', '\u2003', '\u2005', '\x0c', '\u2028',
'\u2009', '\u200a']:
f1.write(word + ' ' + tag + ' ' + sen + '\n')
else:
f1.write("\n")
f1.write("\n")
f1.close()
print ("process atepc finished!")
def convert_to_atepc_tag(inpath, dist_fname, flag):
# 写之前,先检验文件是否存在,存在就删掉
if os.path.exists(dist_fname):
os.remove(dist_fname)
f1 = open(dist_fname, 'w', encoding='utf8')
data = pd.read_csv(inpath)
data.columns = ['text', 'tag_sentiment_list']
# preprocess for emoji
data['text'] = data['text'].map(lambda x: filter_emoji(x, restr='xx'))
# drop id list not able to process
# print (data.iloc[8832,:])
# data = data.drop([8832])
# 只保留review的长度小于600的
data = data[data['text'].str.len() <= 600]
# train test split
x_train, x_test = train_test_split(data, test_size=0.2, random_state=42)
if flag == 'train':
data_res = x_train.iloc[:, :].reset_index()
else:
data_res = x_test.iloc[:, :].reset_index()
# print (data_res.head())
for i in range(len(data_res)):
text, label = data_res['text'][i], data_res['tag_sentiment_list'][i]
text, tags, sentiment = convert(text, label)
for word, tag, sen in zip(text, tags, sentiment):
if word not in [',', '。', ' ', '\xa0', '\u2006', '\u3000', '\u2002', '\u2003', '\u2005', '\x0c', '\u2028',
'\u2009', '\u200a']:
f1.write(word + ' ' + tag + ' ' + sen + '\n')
else:
f1.write("\n")
f1.write("\n")
f1.close()
print ("process atepc finished!")
def convert_to_apc(inpath, dist_fname, flag):
# 写之前,先检验文件是否存在,存在就删掉
if os.path.exists(dist_fname):
os.remove(dist_fname)
f1 = open(dist_fname, 'w', encoding='utf8')
data = pd.read_csv(inpath)
# train test split
x_train, x_test = train_test_split(data, test_size=0.2, random_state=42)
if flag == 'train':
data_res = x_train.iloc[:, :].reset_index()
else:
data_res = x_test.iloc[:, :].reset_index()
# print (data_res.head())
for i in range(len(data_res)):
text, label = data_res['text'][i], data_res['tag_sentiment_list'][i]
str1_list, str2_list, str3_list = convert_apc(text, label)
for x1, x2, x3 in zip(str1_list, str2_list, str3_list):
f1.write(x1 + '\n')
f1.write(x2 + '\n')
f1.write(x3 + '\n')
f1.close()
print ("process apc finished!")
def main(inpath, folder_name, task):
if not os.path.exists(folder_name):
os.makedirs(folder_name)
if task == 'aptepc':
# get folder name
print ("start process for an aptepc task")
folder_name_prefix = folder_name.split('/')[-1]
dist_train_fname = os.path.join(folder_name_prefix, folder_name_prefix + '.train.txt.atepc')
dist_test_fname = os.path.join(folder_name_prefix, folder_name_prefix + '.test.txt.atepc')
# process train
convert_to_atepc(inpath, dist_train_fname, 'train')
print ("<<< finish training data preprocess")
# process test
convert_to_atepc(inpath, dist_test_fname, 'test')
print ("<<< finish test data preprocess")
elif task == 'apc':
# get folder name
folder_name_prefix = folder_name.split('/')[-1]
dist_train_fname = os.path.join(folder_name_prefix, folder_name_prefix + '.train.txt')
dist_test_fname = os.path.join(folder_name_prefix, folder_name_prefix + '.test.txt')
# process train
convert_to_apc(inpath, dist_train_fname, 'train')
print ("<<< finish training data preprocess")
# process test
convert_to_apc(inpath, dist_test_fname, 'test')
print ("<<< finish test data preprocess")
elif task == 'aptepc-tag':
# get folder name
print ("start process for an aptepc tag task")
folder_name_prefix = folder_name.split('/')[-1]
dist_train_fname = os.path.join(folder_name_prefix, folder_name_prefix + '.train.txt.atepc')
dist_test_fname = os.path.join(folder_name_prefix, folder_name_prefix + '.test.txt.atepc')
# process train
convert_to_atepc_tag(inpath, dist_train_fname, 'train')
print ("<<< finish training data preprocess")
# process test
convert_to_atepc_tag(inpath, dist_test_fname, 'test')
print ("<<< finish test data preprocess")
main(args.inpath, args.folder_name, args.task)
| 32.820896
| 118
| 0.582651
| 1,140
| 8,796
| 4.30614
| 0.161404
| 0.052964
| 0.04889
| 0.040334
| 0.783866
| 0.752495
| 0.730088
| 0.72255
| 0.72255
| 0.705846
| 0
| 0.031726
| 0.268986
| 8,796
| 268
| 119
| 32.820896
| 0.731726
| 0.070714
| 0
| 0.663043
| 0
| 0
| 0.122574
| 0.006509
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048913
| false
| 0
| 0.032609
| 0
| 0.108696
| 0.081522
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f8e61b754a032cf61ead46cd66c6dc6f3690b256
| 121
|
py
|
Python
|
pytest_capture_log_error/test_file.py
|
butla/experiments
|
8c8ade15bb01978763d6618342fa42ad7563e38f
|
[
"MIT"
] | 1
|
2020-06-01T02:41:45.000Z
|
2020-06-01T02:41:45.000Z
|
pytest_capture_log_error/test_file.py
|
butla/experiments
|
8c8ade15bb01978763d6618342fa42ad7563e38f
|
[
"MIT"
] | 48
|
2019-12-26T16:38:19.000Z
|
2021-07-06T13:29:50.000Z
|
pytest_capture_log_error/test_file.py
|
butla/experiments
|
8c8ade15bb01978763d6618342fa42ad7563e38f
|
[
"MIT"
] | null | null | null |
import a_file
def test_a(capsys):
assert a_file.bla() == 5
assert a_file.LOG_MESSAGE in capsys.readouterr().err
| 20.166667
| 56
| 0.719008
| 21
| 121
| 3.904762
| 0.666667
| 0.182927
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01
| 0.173554
| 121
| 5
| 57
| 24.2
| 0.81
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5d1e0c02d27663acdb4392c5b988ee86f8972b53
| 147
|
py
|
Python
|
climbproject/climbapp/admin.py
|
javawolfpack/ClimbProject
|
508cf822a1eb0b78f7120a3d469ceb65e3b423f7
|
[
"MIT"
] | null | null | null |
climbproject/climbapp/admin.py
|
javawolfpack/ClimbProject
|
508cf822a1eb0b78f7120a3d469ceb65e3b423f7
|
[
"MIT"
] | 5
|
2018-11-24T16:15:24.000Z
|
2022-02-11T03:40:48.000Z
|
climbproject/climbapp/admin.py
|
javawolfpack/ClimbProject
|
508cf822a1eb0b78f7120a3d469ceb65e3b423f7
|
[
"MIT"
] | 1
|
2018-11-24T16:13:49.000Z
|
2018-11-24T16:13:49.000Z
|
from django.contrib import admin
#from .models import *
from . import models
# Register your models here.
admin.site.register(models.ClimbModel)
| 18.375
| 38
| 0.782313
| 20
| 147
| 5.75
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136054
| 147
| 7
| 39
| 21
| 0.905512
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5d314e81984e8fdd23c8fa9711722c873d27574a
| 160
|
py
|
Python
|
fieldservice/fieldservice/doctype/fieldservice_settings/test_fieldservice_settings.py
|
itsdaveit/fieldservice
|
90bd813fb01f23a18df3b24fc67ec86c4d8be5a5
|
[
"MIT"
] | null | null | null |
fieldservice/fieldservice/doctype/fieldservice_settings/test_fieldservice_settings.py
|
itsdaveit/fieldservice
|
90bd813fb01f23a18df3b24fc67ec86c4d8be5a5
|
[
"MIT"
] | null | null | null |
fieldservice/fieldservice/doctype/fieldservice_settings/test_fieldservice_settings.py
|
itsdaveit/fieldservice
|
90bd813fb01f23a18df3b24fc67ec86c4d8be5a5
|
[
"MIT"
] | 1
|
2021-11-09T10:26:06.000Z
|
2021-11-09T10:26:06.000Z
|
# Copyright (c) 2022, itsdve GmbH and Contributors
# See license.txt
# import frappe
import unittest
class TestFieldserviceSettings(unittest.TestCase):
pass
| 17.777778
| 50
| 0.79375
| 19
| 160
| 6.684211
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0.1375
| 160
| 8
| 51
| 20
| 0.891304
| 0.4875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
5d43ba93812ece31b158196b6ad2d32a374bd0f8
| 147
|
py
|
Python
|
annotate/backend/admin.py
|
hopeogbons/image-annotation
|
2d8b1799bc791428fd3ab29d8052195996923130
|
[
"Apache-2.0"
] | null | null | null |
annotate/backend/admin.py
|
hopeogbons/image-annotation
|
2d8b1799bc791428fd3ab29d8052195996923130
|
[
"Apache-2.0"
] | 11
|
2021-03-09T10:15:39.000Z
|
2022-02-26T13:53:51.000Z
|
annotate/backend/admin.py
|
hopeogbons/image-annotation
|
2d8b1799bc791428fd3ab29d8052195996923130
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from annotate.backend.models import Image, Annotation
admin.site.register(Image)
admin.site.register(Annotation)
| 24.5
| 53
| 0.836735
| 20
| 147
| 6.15
| 0.6
| 0.146341
| 0.276423
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 147
| 5
| 54
| 29.4
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
53a80bedba1fa544dba66c5282310b99391dfaba
| 917
|
py
|
Python
|
MathPainting_OOP/shapes.py
|
matbocz/kurs-python-udemy
|
bbc53d0b2073b400aaad5ff908b3e1c09b815121
|
[
"MIT"
] | null | null | null |
MathPainting_OOP/shapes.py
|
matbocz/kurs-python-udemy
|
bbc53d0b2073b400aaad5ff908b3e1c09b815121
|
[
"MIT"
] | null | null | null |
MathPainting_OOP/shapes.py
|
matbocz/kurs-python-udemy
|
bbc53d0b2073b400aaad5ff908b3e1c09b815121
|
[
"MIT"
] | null | null | null |
class Rectangle:
"""A rectangle shape that can be drawn on a Canvas object"""
def __init__(self, x, y, width, height, color):
self.x = x
self.y = y
self.width = width
self.height = height
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
# Changes a slice of the array with new values
canvas.data[self.x: self.x + self.height, self.y: self.y + self.width] = self.color
class Square:
"""A square shape that can be drawn on a Canvas object"""
def __init__(self, x, y, side, color):
self.x = x
self.y = y
self.side = side
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
# Changes a slice of the array with new values
canvas.data[self.x: self.x + self.side, self.y: self.y + self.side] = self.color
| 30.566667
| 91
| 0.591058
| 138
| 917
| 3.869565
| 0.246377
| 0.074906
| 0.067416
| 0.052434
| 0.7603
| 0.707865
| 0.707865
| 0.707865
| 0.629213
| 0.629213
| 0
| 0
| 0.2988
| 917
| 29
| 92
| 31.62069
| 0.830482
| 0.293348
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235294
| false
| 0
| 0
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
53b7cf475edf549606a00bf10c8b39ab817c0d94
| 72
|
py
|
Python
|
testjpkg/jsonify/hij.py
|
thisisishara/test_pypi_cli
|
15b22ed8943a18a6d9de9ee4ba6a84249a633e2e
|
[
"MIT"
] | null | null | null |
testjpkg/jsonify/hij.py
|
thisisishara/test_pypi_cli
|
15b22ed8943a18a6d9de9ee4ba6a84249a633e2e
|
[
"MIT"
] | null | null | null |
testjpkg/jsonify/hij.py
|
thisisishara/test_pypi_cli
|
15b22ed8943a18a6d9de9ee4ba6a84249a633e2e
|
[
"MIT"
] | null | null | null |
print("hiiiiiiiiiiiiiiiix")
def sayhi():
print("2nd pkg said hi")
| 12
| 28
| 0.666667
| 9
| 72
| 5.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016949
| 0.180556
| 72
| 5
| 29
| 14.4
| 0.79661
| 0
| 0
| 0
| 0
| 0
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
54db106024a4f46cf548821fe280245ccaf57da7
| 114
|
py
|
Python
|
azbankgateways/views/__init__.py
|
lordmahyar/az-iranian-bank-gateways
|
e9eb7101f2b91318847d63d783c22c4a8d430ba3
|
[
"MIT"
] | 196
|
2020-12-07T11:29:19.000Z
|
2022-03-23T09:32:56.000Z
|
azbankgateways/views/__init__.py
|
lordmahyar/az-iranian-bank-gateways
|
e9eb7101f2b91318847d63d783c22c4a8d430ba3
|
[
"MIT"
] | 25
|
2021-01-13T11:56:35.000Z
|
2022-03-14T19:41:51.000Z
|
azbankgateways/views/__init__.py
|
lordmahyar/az-iranian-bank-gateways
|
e9eb7101f2b91318847d63d783c22c4a8d430ba3
|
[
"MIT"
] | 44
|
2021-01-08T18:27:47.000Z
|
2022-03-22T03:36:04.000Z
|
from .banks import callback_view, go_to_bank_gateway
from .samples import sample_payment_view, sample_result_view
| 38
| 60
| 0.877193
| 18
| 114
| 5.111111
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087719
| 114
| 2
| 61
| 57
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
54f4e0fec59282b2d1c7f1cba1c1b99fa606ce17
| 70
|
py
|
Python
|
nemo/collections/nlp/losses/__init__.py
|
KalifiaBillal/NeMo
|
4fc670ad0c886be2623247921d4311ba30f486f8
|
[
"Apache-2.0"
] | 1
|
2021-01-26T21:54:36.000Z
|
2021-01-26T21:54:36.000Z
|
nemo/collections/nlp/losses/__init__.py
|
aiskumo/NeMo
|
b51a39f9834ad50db77c4246aeb6e2349695add5
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/losses/__init__.py
|
aiskumo/NeMo
|
b51a39f9834ad50db77c4246aeb6e2349695add5
|
[
"Apache-2.0"
] | 2
|
2021-02-04T14:45:50.000Z
|
2021-02-04T14:56:05.000Z
|
from nemo.collections.nlp.losses.sgd_loss import SGDDialogueStateLoss
| 35
| 69
| 0.885714
| 9
| 70
| 6.777778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 70
| 1
| 70
| 70
| 0.924242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4ad35edb76ff8aacbd63002439bbf9d2f5995fd2
| 59
|
py
|
Python
|
pyecsca/sca/re/__init__.py
|
scrambler-crypto/pyecsca
|
491abfb548455669abd470382a48dcd07b2eda87
|
[
"MIT"
] | 24
|
2019-07-01T00:27:24.000Z
|
2022-02-17T00:46:28.000Z
|
pyecsca/sca/re/__init__.py
|
scrambler-crypto/pyecsca
|
491abfb548455669abd470382a48dcd07b2eda87
|
[
"MIT"
] | 18
|
2020-12-10T15:08:56.000Z
|
2022-03-01T11:44:37.000Z
|
pyecsca/sca/re/__init__.py
|
scrambler-crypto/pyecsca
|
491abfb548455669abd470382a48dcd07b2eda87
|
[
"MIT"
] | 7
|
2020-02-20T18:44:29.000Z
|
2021-11-30T21:16:44.000Z
|
"""Package for reverse-engineering."""
from .rpa import *
| 14.75
| 38
| 0.694915
| 7
| 59
| 5.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 3
| 39
| 19.666667
| 0.803922
| 0.542373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4afc50cce044689d528dbbb6c10aa634c6f79ad7
| 87
|
py
|
Python
|
src/server_py3/aps/src/wes/api/v1/users/__init__.py
|
kfrime/yonder
|
cd2f491c24f8552aeadd6ee48c601e1194a2e082
|
[
"MIT"
] | null | null | null |
src/server_py3/aps/src/wes/api/v1/users/__init__.py
|
kfrime/yonder
|
cd2f491c24f8552aeadd6ee48c601e1194a2e082
|
[
"MIT"
] | 12
|
2020-01-04T03:30:02.000Z
|
2021-06-02T01:22:45.000Z
|
src/server_py3/aps/src/wes/api/v1/users/__init__.py
|
kfrime/yonder
|
cd2f491c24f8552aeadd6ee48c601e1194a2e082
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from . import signup, signin, signout, update, info, detail
| 21.75
| 60
| 0.701149
| 12
| 87
| 5.083333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.172414
| 87
| 3
| 61
| 29
| 0.833333
| 0.241379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ab30352abcf50690534a3f85202149cd132e631c
| 46
|
py
|
Python
|
src/webpy1/src/manage/checkPic.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | 1
|
2020-02-17T08:18:29.000Z
|
2020-02-17T08:18:29.000Z
|
src/webpy1/src/manage/checkPic.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
src/webpy1/src/manage/checkPic.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on 2011-6-22
@author: dholer
'''
| 7.666667
| 20
| 0.608696
| 7
| 46
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 0.173913
| 46
| 5
| 21
| 9.2
| 0.552632
| 0.804348
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab30973f8a964fee614a5ec7df1f83c6a91d145f
| 122
|
py
|
Python
|
tests/__init__.py
|
coleb/sendoff
|
fc1b38ba7571254a88ca457f6f618ae4572f30b6
|
[
"MIT"
] | 2
|
2021-09-28T09:53:53.000Z
|
2021-10-01T17:45:29.000Z
|
tests/__init__.py
|
coleb/sendoff
|
fc1b38ba7571254a88ca457f6f618ae4572f30b6
|
[
"MIT"
] | 10
|
2021-09-17T22:14:37.000Z
|
2022-03-21T16:25:39.000Z
|
tests/__init__.py
|
coleb/sendoff
|
fc1b38ba7571254a88ca457f6f618ae4572f30b6
|
[
"MIT"
] | 1
|
2021-09-27T15:55:40.000Z
|
2021-09-27T15:55:40.000Z
|
"""Tests for the `sendoff` library."""
"""
The `sendoff` library tests validate the expected function of the library.
"""
| 24.4
| 74
| 0.704918
| 16
| 122
| 5.375
| 0.5625
| 0.232558
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147541
| 122
| 4
| 75
| 30.5
| 0.826923
| 0.262295
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab450e026b0907e8b838f6f9a3e2ba1d4218dd25
| 5,065
|
py
|
Python
|
cmibs/cisco_vlan_membership_mib.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 84
|
2017-10-22T11:01:39.000Z
|
2022-02-27T03:43:48.000Z
|
cmibs/cisco_vlan_membership_mib.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 22
|
2017-12-11T07:21:56.000Z
|
2021-09-23T02:53:50.000Z
|
cmibs/cisco_vlan_membership_mib.py
|
prorevizor/noc
|
37e44b8afc64318b10699c06a1138eee9e7d6a4e
|
[
"BSD-3-Clause"
] | 23
|
2017-12-06T06:59:52.000Z
|
2022-02-24T00:02:25.000Z
|
# ----------------------------------------------------------------------
# CISCO-VLAN-MEMBERSHIP-MIB
# Compiled MIB
# Do not modify this file directly
# Run ./noc mib make-cmib instead
# ----------------------------------------------------------------------
# Copyright (C) 2007-2020 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# MIB Name
NAME = "CISCO-VLAN-MEMBERSHIP-MIB"
# Metadata
LAST_UPDATED = "2007-12-14"
COMPILED = "2020-01-19"
# MIB Data: name -> oid
MIB = {
"CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIB": "1.3.6.1.4.1.9.9.68",
"CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIBObjects": "1.3.6.1.4.1.9.9.68.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmps": "1.3.6.1.4.1.9.9.68.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsVQPVersion": "1.3.6.1.4.1.9.9.68.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRetries": "1.3.6.1.4.1.9.9.68.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmInterval": "1.3.6.1.4.1.9.9.68.1.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirm": "1.3.6.1.4.1.9.9.68.1.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmResult": "1.3.6.1.4.1.9.9.68.1.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsCurrent": "1.3.6.1.4.1.9.9.68.1.1.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsTable": "1.3.6.1.4.1.9.9.68.1.1.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsEntry": "1.3.6.1.4.1.9.9.68.1.1.7.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsIpAddress": "1.3.6.1.4.1.9.9.68.1.1.7.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsPrimary": "1.3.6.1.4.1.9.9.68.1.1.7.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRowStatus": "1.3.6.1.4.1.9.9.68.1.1.7.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembership": "1.3.6.1.4.1.9.9.68.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryTable": "1.3.6.1.4.1.9.9.68.1.2.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryEntry": "1.3.6.1.4.1.9.9.68.1.2.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryVlanIndex": "1.3.6.1.4.1.9.9.68.1.2.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMemberPorts": "1.3.6.1.4.1.9.9.68.1.2.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMember2kPorts": "1.3.6.1.4.1.9.9.68.1.2.1.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipTable": "1.3.6.1.4.1.9.9.68.1.2.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipEntry": "1.3.6.1.4.1.9.9.68.1.2.2.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlanType": "1.3.6.1.4.1.9.9.68.1.2.2.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlan": "1.3.6.1.4.1.9.9.68.1.2.2.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmPortStatus": "1.3.6.1.4.1.9.9.68.1.2.2.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans": "1.3.6.1.4.1.9.9.68.1.2.2.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans2k": "1.3.6.1.4.1.9.9.68.1.2.2.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans3k": "1.3.6.1.4.1.9.9.68.1.2.2.1.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans4k": "1.3.6.1.4.1.9.9.68.1.2.2.1.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtTable": "1.3.6.1.4.1.9.9.68.1.2.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtEntry": "1.3.6.1.4.1.9.9.68.1.2.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipPortRangeIndex": "1.3.6.1.4.1.9.9.68.1.2.3.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtPorts": "1.3.6.1.4.1.9.9.68.1.2.3.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlanCreationMode": "1.3.6.1.4.1.9.9.68.1.2.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmStatistics": "1.3.6.1.4.1.9.9.68.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPQueries": "1.3.6.1.4.1.9.9.68.1.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPResponses": "1.3.6.1.4.1.9.9.68.1.3.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChanges": "1.3.6.1.4.1.9.9.68.1.3.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPShutdown": "1.3.6.1.4.1.9.9.68.1.3.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPDenied": "1.3.6.1.4.1.9.9.68.1.3.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongDomain": "1.3.6.1.4.1.9.9.68.1.3.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongVersion": "1.3.6.1.4.1.9.9.68.1.3.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmInsufficientResources": "1.3.6.1.4.1.9.9.68.1.3.8",
"CISCO-VLAN-MEMBERSHIP-MIB::vmStatus": "1.3.6.1.4.1.9.9.68.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsEnabled": "1.3.6.1.4.1.9.9.68.1.4.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlan": "1.3.6.1.4.1.9.9.68.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanTable": "1.3.6.1.4.1.9.9.68.1.5.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanEntry": "1.3.6.1.4.1.9.9.68.1.5.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanId": "1.3.6.1.4.1.9.9.68.1.5.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanCdpVerifyEnable": "1.3.6.1.4.1.9.9.68.1.5.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotifications": "1.3.6.1.4.1.9.9.68.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsPrefix": "1.3.6.1.4.1.9.9.68.2.0",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChange": "1.3.6.1.4.1.9.9.68.2.0.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBConformance": "1.3.6.1.4.1.9.9.68.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBCompliances": "1.3.6.1.4.1.9.9.68.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBGroups": "1.3.6.1.4.1.9.9.68.3.2",
}
DISPLAY_HINTS = {}
| 64.113924
| 98
| 0.62231
| 1,027
| 5,065
| 3.067186
| 0.10224
| 0.04381
| 0.349841
| 0.405079
| 0.600635
| 0.486032
| 0.299365
| 0.226667
| 0.226667
| 0.223492
| 0
| 0.166881
| 0.079566
| 5,065
| 78
| 99
| 64.935897
| 0.508795
| 0.082922
| 0
| 0
| 0
| 0.83871
| 0.83614
| 0.814983
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ab5b1e3328548b4d29719e9eabea190e03a2dd78
| 78,836
|
py
|
Python
|
bifacialvf/vf.py
|
shirubana/bifacialvf
|
7cd1c4c658bb7a68f0815b2bd1a6d5c492ca7300
|
[
"BSD-3-Clause"
] | 1
|
2020-05-20T06:19:40.000Z
|
2020-05-20T06:19:40.000Z
|
bifacialvf/vf.py
|
shirubana/bifacialvf
|
7cd1c4c658bb7a68f0815b2bd1a6d5c492ca7300
|
[
"BSD-3-Clause"
] | null | null | null |
bifacialvf/vf.py
|
shirubana/bifacialvf
|
7cd1c4c658bb7a68f0815b2bd1a6d5c492ca7300
|
[
"BSD-3-Clause"
] | 1
|
2020-12-30T08:05:49.000Z
|
2020-12-30T08:05:49.000Z
|
# -*- coding: utf-8 -*-
"""
ViewFactor module - VF calculation helper files for bifacial-viewfactor
@author Bill Marion
@translated to python by sayala 06/09/17
"""
# ensure python3 compatible division and printing
from __future__ import division, print_function, absolute_import
import math
import numpy as np
from sun import solarPos, sunIncident, perezComp, aOIcorrection
import logging
# TODO: set level or add formatters if more advanced logging required
LOGGER = logging.getLogger(__name__) # only used to raise errors
DTOR = math.pi / 180.0 # Factor for converting from degrees to radians
def getBackSurfaceIrradiances(rowType, maxShadow, PVbackSurface, beta, sazm,
dni, dhi, C, D, albedo, zen, azm, cellRows,
pvBackSH, rearGroundGHI, frontGroundGHI,
frontReflected, offset=0):
"""
This method calculates the AOI corrected irradiance on the back of the PV
module/panel. 11/19/2015
Added rowType and other changes to distinguish between types of rows.
4/19/2016
Added input of offset of reference cell from PV module back (in PV panel
slope lengths) for modeling Sara's reference cell measurements, should be
set to zero for PV module cell irradiances.
Added while loop so projected Xs aren't too negative causing array index
problems (<0) 12/13/2016::
while (projectedX1 < -100.0 || projectedX2 < -100.0):
# Offset so array indexes are >= -100.0 12/13/2016
projectedX1 += 100.0;
projectedX2 += 100.0;
Parameters
----------
rowType : str
Type of row: "first", "interior", "last", or "single"
maxShadow
Maximum shadow length projected to the front(-) or rear (+) from the
front of the module
PVbackSurface
PV module back surface material type, either "glass" or "ARglass"
beta
Tilt from horizontal of the PV modules/panels (deg) (for front surface)
sazm
Surface azimuth of PV panels (deg) (for front surface)
dni
Direct normal irradiance (W/m2)
dhi
Diffuse horizontal irradiance (W/m2)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
albedo
Ground albedo
zen
Sun zenith (in radians)
azm
Sun azimuth (in radians)
pvBackSH
Decimal fraction of the back surface of the PV panel that is shaded,
0.0 to 1.0
rearGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments (W/m2)
frontGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments (W/m2)
frontReflected : array of size [cellRows]
Irradiance reflected from the front of the PV module/panel (W/m2) in
the row behind the one of interest
offset
Offset of reference cell from PV module back (in PV panel slope
lengths), set to zero for PV module cell irradiances
Returns
-------
backGTI : array of size [cellRows]
AOI corrected irradiance on back side of PV module/panel, one for each
cell row (W/m2)
aveGroundGHI : numeric
Average GHI on ground under PV array
Notes
-----
1-degree hemispherical segment AOI correction factor for glass (index=0)
and ARglass (index=1)
"""
backGTI = []
SegAOIcor = [
[0.057563, 0.128570, 0.199651, 0.265024, 0.324661, 0.378968, 0.428391, 0.473670, 0.514788, 0.552454,
0.586857, 0.618484, 0.647076, 0.673762, 0.698029, 0.720118, 0.740726, 0.759671, 0.776946, 0.792833,
0.807374, 0.821010, 0.833534, 0.845241, 0.855524, 0.865562, 0.874567, 0.882831, 0.890769, 0.897939,
0.904373, 0.910646, 0.916297, 0.921589, 0.926512, 0.930906, 0.935179, 0.939074, 0.942627, 0.946009,
0.949096, 0.952030, 0.954555, 0.957157, 0.959669, 0.961500, 0.963481, 0.965353, 0.967387, 0.968580,
0.970311, 0.971567, 0.972948, 0.974114, 0.975264, 0.976287, 0.977213, 0.978142, 0.979057, 0.979662,
0.980460, 0.981100, 0.981771, 0.982459, 0.982837, 0.983199, 0.983956, 0.984156, 0.984682, 0.985026,
0.985364, 0.985645, 0.985954, 0.986241, 0.986484, 0.986686, 0.986895, 0.987043, 0.987287, 0.987388,
0.987541, 0.987669, 0.987755, 0.987877, 0.987903, 0.987996, 0.988022, 0.988091, 0.988104, 0.988114,
0.988114, 0.988104, 0.988091, 0.988022, 0.987996, 0.987903, 0.987877, 0.987755, 0.987669, 0.987541,
0.987388, 0.987287, 0.987043, 0.986895, 0.986686, 0.986484, 0.986240, 0.985954, 0.985645, 0.985364,
0.985020, 0.984676, 0.984156, 0.983956, 0.983199, 0.982837, 0.982459, 0.981771, 0.981100, 0.980460,
0.979662, 0.979057, 0.978142, 0.977213, 0.976287, 0.975264, 0.974114, 0.972947, 0.971567, 0.970311,
0.968580, 0.967387, 0.965353, 0.963481, 0.961501, 0.959671, 0.957157, 0.954555, 0.952030, 0.949096,
0.946009, 0.942627, 0.939074, 0.935179, 0.930906, 0.926512, 0.921589, 0.916297, 0.910646, 0.904373,
0.897939, 0.890769, 0.882831, 0.874567, 0.865562, 0.855524, 0.845241, 0.833534, 0.821010, 0.807374,
0.792833, 0.776946, 0.759671, 0.740726, 0.720118, 0.698029, 0.673762, 0.647076, 0.618484, 0.586857,
0.552454, 0.514788, 0.473670, 0.428391, 0.378968, 0.324661, 0.265024, 0.199651, 0.128570, 0.057563],
[0.062742, 0.139913, 0.216842, 0.287226, 0.351055, 0.408796, 0.460966, 0.508397, 0.551116, 0.589915,
0.625035, 0.657029, 0.685667, 0.712150, 0.735991, 0.757467, 0.777313, 0.795374, 0.811669, 0.826496,
0.839932, 0.852416, 0.863766, 0.874277, 0.883399, 0.892242, 0.900084, 0.907216, 0.914023, 0.920103,
0.925504, 0.930744, 0.935424, 0.939752, 0.943788, 0.947313, 0.950768, 0.953860, 0.956675, 0.959339,
0.961755, 0.964039, 0.965984, 0.967994, 0.969968, 0.971283, 0.972800, 0.974223, 0.975784, 0.976647,
0.977953, 0.978887, 0.979922, 0.980773, 0.981637, 0.982386, 0.983068, 0.983759, 0.984436, 0.984855,
0.985453, 0.985916, 0.986417, 0.986934, 0.987182, 0.987435, 0.988022, 0.988146, 0.988537, 0.988792,
0.989043, 0.989235, 0.989470, 0.989681, 0.989857, 0.990006, 0.990159, 0.990263, 0.990455, 0.990515,
0.990636, 0.990731, 0.990787, 0.990884, 0.990900, 0.990971, 0.990986, 0.991042, 0.991048, 0.991057,
0.991057, 0.991048, 0.991042, 0.990986, 0.990971, 0.990900, 0.990884, 0.990787, 0.990731, 0.990636,
0.990515, 0.990455, 0.990263, 0.990159, 0.990006, 0.989857, 0.989681, 0.989470, 0.989235, 0.989043,
0.988787, 0.988532, 0.988146, 0.988022, 0.987435, 0.987182, 0.986934, 0.986417, 0.985916, 0.985453,
0.984855, 0.984436, 0.983759, 0.983068, 0.982386, 0.981637, 0.980773, 0.979920, 0.978887, 0.977953,
0.976647, 0.975784, 0.974223, 0.972800, 0.971284, 0.969970, 0.967994, 0.965984, 0.964039, 0.961755,
0.959339, 0.956675, 0.953860, 0.950768, 0.947313, 0.943788, 0.939752, 0.935424, 0.930744, 0.925504,
0.920103, 0.914023, 0.907216, 0.900084, 0.892242, 0.883399, 0.874277, 0.863766, 0.852416, 0.839932,
0.826496, 0.811669, 0.795374, 0.777313, 0.757467, 0.735991, 0.712150, 0.685667, 0.657029, 0.625035,
0.589915, 0.551116, 0.508397, 0.460966, 0.408796, 0.351055, 0.287226, 0.216842, 0.139913, 0.062742]]
# Tilt from horizontal of the PV modules/panels, in radians
beta = beta * DTOR
sazm = sazm * DTOR # Surface azimuth of PV module/panels, in radians
# 1. Calculate and assign various paramters to be used for modeling
# irradiances
# For calling PerezComp to break diffuse into components for zero tilt
# (horizontal)
iso_dif = 0.0; circ_dif = 0.0; horiz_dif = 0.0; grd_dif = 0.0; beam = 0.0
# Call to get iso_dif for horizontal surface
ghi, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(
dni, dhi, albedo, zen, 0.0, zen)
# Isotropic irradiance from sky on horizontal surface, used later for
# determining isotropic sky component
iso_sky_dif = iso_dif
# For calling PerezComp to break diffuse into components for 90 degree tilt
# (vertical)
inc, tiltr, sazmr = sunIncident(0, 90.0, 180.0, 45.0, zen, azm)
# Call to get horiz_dif for vertical surface
vti, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(
dni, dhi, albedo, inc, tiltr, zen)
# Horizon diffuse irradiance on a vertical surface, used later for
# determining horizon brightening irradiance component
F2DHI = horiz_dif
index = -99
n2 = -99.9
if (PVbackSurface == "glass"):
# Index to use with 1-degree hemispherical segment AOI correction
# factor array
index = 0
n2 = 1.526 # Index of refraction for glass
elif (PVbackSurface == "ARglass"):
# Index to use with 1-degree hemispherical segment AOI correction
# factor array
index = 1
n2 = 1.300 # Index of refraction for ARglass
else:
raise Exception(
"Incorrect text input for PVbackSurface."
" Must be glass or ARglass.")
# Reflectance at normal incidence, Duffie and Beckman p217
Ro = math.pow((n2 - 1.0) / (n2 + 1.0), 2.0)
# Average GHI on ground under PV array for cases when x projection exceed
# 2*rtr
aveGroundGHI = 0.0
for i in range(0,100):
aveGroundGHI += rearGroundGHI[i] / 100.0
# Calculate x,y coordinates of bottom and top edges of PV row in back of desired PV row so that portions of sky and ground viewed by the
# PV cell may be determined. Origin of x-y axis is the ground pobelow the lower front edge of the desired PV row. The row in back of
# the desired row is in the positive x direction.
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
PbotX = rtr; # x value for poon bottom egde of PV module/panel of row in back of (in PV panel slope lengths)
PbotY = C; # y value for poon bottom egde of PV module/panel of row in back of (in PV panel slope lengths)
PtopX = rtr + x1; # x value for poon top egde of PV module/panel of row in back of (in PV panel slope lengths)
PtopY = h + C; # y value for poon top egde of PV module/panel of row in back of (in PV panel slope lengths)
# 2. Calculate diffuse and direct component irradiances for each cell row
for i in range (0, cellRows):
# Calculate diffuse irradiances and reflected amounts for each cell row over it's field of view of 180 degrees,
# beginning with the angle providing the upper most view of the sky (j=0)
#PcellX = x1 * (i + 0.5) / ((double)cellRows); # x value for location of PV cell
#PcellY = C + h * (i + 0.5) / ((double)cellRows); # y value for location of PV cell
PcellX = x1 * (i + 0.5) / (cellRows) + offset * math.sin(beta); # x value for location of PV cell with OFFSET FOR SARA REFERENCE CELLS 4/26/2016
PcellY = C + h * (i + 0.5) / (cellRows) - offset * math.cos(beta); # y value for location of PV cell with OFFSET FOR SARA REFERENCE CELLS 4/26/2016
elvUP = math.atan((PtopY - PcellY) / (PtopX - PcellX)); # Elevation angle up from PV cell to top of PV module/panel, radians
elvDOWN = math.atan((PcellY - PbotY) / (PbotX - PcellX)); # Elevation angle down from PV cell to bottom of PV module/panel, radians
if (rowType == "last" or rowType == "single"): # 4/19/16 No array to the rear for these cases
elvUP = 0.0;
elvDOWN = 0.0;
#Console.WriteLine("ElvUp = 0", elvUP / DTOR);
#if (i == 0)
# Console.WriteLine("ElvDown = 0", elvDOWN / DTOR);
#123
#iStopIso = Convert.ToInt32((beta - elvUP) / DTOR); # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
#iHorBright = Convert.ToInt32(max(0.0, 6.0 - elvUP / DTOR)); # Number of whole degrees for which horizon brightening occurs
#iStartGrd = Convert.ToInt32((beta + elvDOWN) / DTOR); # First whole degree in arc range that sees ground, last is 180
iStopIso = int(round((beta - elvUP) / DTOR)); # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
iHorBright = int(round(max(0.0, 6.0 - elvUP / DTOR))); # Number of whole degrees for which horizon brightening occurs
iStartGrd = int(round((beta + elvDOWN) / DTOR)); # First whole degree in arc range that sees ground, last is 180
backGTI.append(0.0) # Initialtize front GTI
for j in range (0, iStopIso): # Add sky diffuse component and horizon brightening if present
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * iso_sky_dif; # Sky radiation
# backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * iso_sky_dif; # Sky radiation
if ((iStopIso - j) <= iHorBright): # Add horizon brightening term if seen
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
#backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
if (rowType == "interior" or rowType == "first"): # 4/19/16 Only add reflections from PV modules for these cases
for j in range (iStopIso, iStartGrd): #j = iStopIso; j < iStartGrd; j++) # Add relections from PV module front surfaces
L = (PbotX - PcellX) / math.cos(elvDOWN); # Diagonal distance from cell to bottom of module in row behind
startAlpha = -(j - iStopIso) * DTOR + elvUP + elvDOWN;
stopAlpha = -(j + 1 - iStopIso) * DTOR + elvUP + elvDOWN;
m = L * math.sin(startAlpha);
theta = math.pi - elvDOWN - (math.pi / 2.0 - startAlpha) - beta;
projectedX2 = m / math.cos(theta); # Projected distance on sloped PV module
m = L * math.sin(stopAlpha);
theta = math.pi - elvDOWN - (math.pi / 2.0 - stopAlpha) - beta;
projectedX1 = m / math.cos(theta); # Projected distance on sloped PV module
projectedX1 = max(0.0, projectedX1);
#Console.WriteLine("j= 0 projected X1 = 1,6:0.000 projected X2 = 2,6:0.000", j, projectedX1, projectedX2);
PVreflectedIrr = 0.0; # Irradiance from PV module front cover reflections
deltaCell = 1.0 / cellRows; # Length of cell in sloped direction in module/panel units (dimensionless)
for k in range (0, cellRows): # Determine which cells in behind row are seen, and their reflected irradiance
cellBot = k * deltaCell; # Position of bottom of cell along PV module/panel
cellTop = (k + 1) * deltaCell; # Position of top of cell along PV module/panel
cellLengthSeen = 0.0; # Length of cell seen for this row, start with zero
if (cellBot >= projectedX1 and cellTop <= projectedX2):
cellLengthSeen = cellTop - cellBot; # Sees the whole cell
elif (cellBot <= projectedX1 and cellTop >= projectedX2):
cellLengthSeen = projectedX2 - projectedX1; # Sees portion in the middle of cell
elif (cellBot >= projectedX1 and projectedX2 > cellBot and cellTop >= projectedX2):
cellLengthSeen = projectedX2 - cellBot; # Sees bottom of cell
elif (cellBot <= projectedX1 and projectedX1 < cellTop and cellTop <= projectedX2):
cellLengthSeen = cellTop - projectedX1; # Sees top of cell
#Console.WriteLine("cell= 0 cellBot = 1,5:0.00 cellTop = 2,5:0.00 Cell length seen = 3,5:0.00", k, cellBot, cellTop, cellLengthSeen);
PVreflectedIrr += cellLengthSeen * frontReflected[k]; # Add reflected radiation for this PV cell, if seen, weight by cell length seen
PVreflectedIrr /= projectedX2 - projectedX1; # Reflected irradiance from PV modules (W/m2)
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * PVreflectedIrr; # Radiation reflected from PV module surfaces onto back surface of module
# End of adding reflections from PV module surfaces
#Console.WriteLine("");
#if (i == 0)
#Console.WriteLine("iStartGrd = 0", iStartGrd);
for j in range (iStartGrd, 180): # Add ground reflected component
startElvDown = (j - iStartGrd) * DTOR + elvDOWN; # Start and ending down elevations for this j loop
stopElvDown = (j + 1 - iStartGrd) * DTOR + elvDOWN;
projectedX2 = PcellX + np.float64(PcellY) / math.tan(startElvDown); # Projection of ElvDown to ground in +x direction (X1 and X2 opposite nomenclature for front irradiance method)
projectedX1 = PcellX + PcellY / math.tan(stopElvDown);
actualGroundGHI = 0.0; # Actuall ground GHI from summing array values
#if (i == 0)
# Console.WriteLine("j= 0 projected X1 = 1,6:0.0", j, 100 * projectedX1 / rtr);
if (abs(projectedX1 - projectedX2) > 0.99 * rtr):
if (rowType == "last" or rowType == "single"): # 4/19/16 No array to rear for these cases
actualGroundGHI = ghi; # Use total value if projection approximates the rtr
else:
actualGroundGHI = aveGroundGHI; # Use average value if projection approximates the rtr
else:
projectedX1 = 100.0 * projectedX1 / rtr; # Normalize projections and multiply by 100
projectedX2 = 100.0 * projectedX2 / rtr;
#Console.WriteLine("projectedX1 = 0 projectedX2 = 1", projectedX1, projectedX2);
if ((rowType == "last" or rowType == "single") and (abs(projectedX1) > 99.0 or abs(projectedX2) > 99.0)): #4/19/2016
actualGroundGHI = ghi; # Use total value if projection > rtr for "last" or "single"
else:
while (projectedX1 >= 100.0 or projectedX2 >= 100.0): # Offset so array indexes are less than 100
projectedX1 -= 100.0;
projectedX2 -= 100.0;
while (projectedX1 < -100.0 or projectedX2 < -100.0): # Offset so array indexes are >= -100.0 12/13/2016
projectedX1 += 100.0;
projectedX2 += 100.0;
#Console.WriteLine("projectedX1 = 0 projectedX2 = 1", projectedX1, projectedX2);
index1 = (int)(projectedX1 + 100.0) - 100; # Determine indexes for use with rearGroundGHI array and frontGroundGHI array(truncates values)
index2 = (int)(projectedX2 + 100.0) - 100; # (int)(1.9) = 1 and (int)(-1.9) = -1; (int)(1.9+100) - 100 = 1 and (int)(-1.9+100) - 100 = -2
#Console.WriteLine("index1=0 index2=1", index1, index2);
if (index1 == index2):
if (index1 < 0):
actualGroundGHI = frontGroundGHI[index1 + 100];
#actualGroundGHI = 0.0;
else:
actualGroundGHI = rearGroundGHI[index1]; # x projections in same groundGHI element THIS SEEMS TO ADD HICCUP 4/26/2016 ***************************
#actualGroundGHI = 0.0;
else:
for k in range (index1, index2+1): #for (k = index1; k <= index2; k++) # Sum the irradiances on the ground if projections are in different groundGHI elements
if (k == index1):
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100] * (k + 1.0 - projectedX1);
else:
actualGroundGHI += rearGroundGHI[k] * (k + 1.0 - projectedX1);
elif (k == index2):
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100] * (projectedX2 - k);
else:
actualGroundGHI += rearGroundGHI[k] * (projectedX2 - k);
else:
if (k < 0):
actualGroundGHI += frontGroundGHI[k + 100];
else:
actualGroundGHI += rearGroundGHI[k];
actualGroundGHI /= projectedX2 - projectedX1; # Irradiance on ground in the 1 degree field of view
#if (i == 0)
# Console.WriteLine("j=0 index1=1 index2=2 projectX1=3,5:0.0 projectX2=4,5:0.0 actualGrdGHI=5,6:0.0", j, index1, index2, projectedX1, projectedX2, actualGroundGHI);
# End of if looping to determine actualGroundGHI
backGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * actualGroundGHI * albedo; # Add ground reflected component
#Console.WriteLine("actualGroundGHI = 0,6:0.0 inputGHI = 1,6:0.0 aveArrayGroundGHI = 2,6:0.0", actualGroundGHI, dhi + dni * math.cos(zen), aveGroundGHI);
# End of j loop for adding ground reflected componenet
# Calculate and add direct and circumsolar irradiance components
inc, tiltr, sazmr = sunIncident(0, 180-beta / DTOR, sazm / DTOR - 180, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for downward facing tilt
gtiAllpc, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get components for the tilt
cellShade = pvBackSH * cellRows - i;
if (cellShade > 1.0): # Fully shaded if > 1, no shade if < 0, otherwise fractionally shaded
cellShade = 1.0;
elif (cellShade < 0.0):
cellShade = 0.0;
if (cellShade < 1.0 and inc < math.pi / 2.0): # Cell not shaded entirely and inc < 90 deg
cor = aOIcorrection(n2, inc); # Get AOI correction for beam and circumsolar
backGTI[i] += (1.0 - cellShade) * (beam + circ_dif) * cor; # Add beam and circumsolar radiation
# End of for i = 0; i < cellRows loop
return backGTI, aveGroundGHI;
# End of GetBackSurfaceIrradiances
def getFrontSurfaceIrradiances(rowType, maxShadow, PVfrontSurface, beta, sazm,
dni, dhi, C, D, albedo, zen, azm, cellRows,
pvFrontSH, frontGroundGHI):
"""
This method calculates the AOI corrected irradiance on the front of the PV
module/panel and the irradiance reflected from the the front of the PV
module/panel. 11/12/2015
Added row type and MaxShadow and changed code to accommodate 4/19/2015
Parameters
----------
rowType : str
Type of row: "first", "interior", "last", or "single"
maxShadow
Maximum shadow length projected to the front (-) or rear (+) from the
front of the module row (in PV panel slope lengths), only used for
`rowTypes` other than "interior"
PVfrontSurface
PV module front surface material type, either "glass" or "ARglass"
beta
Tilt from horizontal of the PV modules/panels (deg)
sazm
Surface azimuth of PV panels (deg)
dni
Direct normal irradiance (W/m2)
dhi
Diffuse horizontal irradiance (W/m2)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
albedo
Ground albedo
zen
Sun zenith (in radians)
azm
Sun azimuth (in radians)
pvFrontSH
Decimal fraction of the front surface of the PV panel that is shaded,
0.0 to 1.0
froutGroundGHI : array of size [100]
Global horizontal irradiance for each of 100 ground segments in front
of the module row
Returns
-------
frontGTI : array of size [cellRows]
AOI corrected irradiance on front side of PV module/panel, one for each
cell row (W/m2)
frontReflected : array of size [cellRows]
Irradiance reflected from the front of the PV module/panel (W/m2)
aveGroundGHI : numeric
Average GHI on the ground (includes effects of shading by array) from
the array frontGroundGHI[100]
Notes
-----
1-degree hemispherical segment AOI correction factor for glass (index=0)
and ARglass (index=1). Creates a list containing 5 lists, each of 8 items,
all set to 0
"""
frontGTI = []
frontReflected = []
#w, h = 2, 180;
#SegAOIcor = [[0 for x in range(w)] for y in range(h)]
SegAOIcor = ([[0.057563, 0.128570, 0.199651, 0.265024, 0.324661, 0.378968, 0.428391, 0.473670, 0.514788, 0.552454,
0.586857, 0.618484, 0.647076, 0.673762, 0.698029, 0.720118, 0.740726, 0.759671, 0.776946, 0.792833,
0.807374, 0.821010, 0.833534, 0.845241, 0.855524, 0.865562, 0.874567, 0.882831, 0.890769, 0.897939,
0.904373, 0.910646, 0.916297, 0.921589, 0.926512, 0.930906, 0.935179, 0.939074, 0.942627, 0.946009,
0.949096, 0.952030, 0.954555, 0.957157, 0.959669, 0.961500, 0.963481, 0.965353, 0.967387, 0.968580,
0.970311, 0.971567, 0.972948, 0.974114, 0.975264, 0.976287, 0.977213, 0.978142, 0.979057, 0.979662,
0.980460, 0.981100, 0.981771, 0.982459, 0.982837, 0.983199, 0.983956, 0.984156, 0.984682, 0.985026,
0.985364, 0.985645, 0.985954, 0.986241, 0.986484, 0.986686, 0.986895, 0.987043, 0.987287, 0.987388,
0.987541, 0.987669, 0.987755, 0.987877, 0.987903, 0.987996, 0.988022, 0.988091, 0.988104, 0.988114,
0.988114, 0.988104, 0.988091, 0.988022, 0.987996, 0.987903, 0.987877, 0.987755, 0.987669, 0.987541,
0.987388, 0.987287, 0.987043, 0.986895, 0.986686, 0.986484, 0.986240, 0.985954, 0.985645, 0.985364,
0.985020, 0.984676, 0.984156, 0.983956, 0.983199, 0.982837, 0.982459, 0.981771, 0.981100, 0.980460,
0.979662, 0.979057, 0.978142, 0.977213, 0.976287, 0.975264, 0.974114, 0.972947, 0.971567, 0.970311,
0.968580, 0.967387, 0.965353, 0.963481, 0.961501, 0.959671, 0.957157, 0.954555, 0.952030, 0.949096,
0.946009, 0.942627, 0.939074, 0.935179, 0.930906, 0.926512, 0.921589, 0.916297, 0.910646, 0.904373,
0.897939, 0.890769, 0.882831, 0.874567, 0.865562, 0.855524, 0.845241, 0.833534, 0.821010, 0.807374,
0.792833, 0.776946, 0.759671, 0.740726, 0.720118, 0.698029, 0.673762, 0.647076, 0.618484, 0.586857,
0.552454, 0.514788, 0.473670, 0.428391, 0.378968, 0.324661, 0.265024, 0.199651, 0.128570, 0.057563],
[0.062742, 0.139913, 0.216842, 0.287226, 0.351055, 0.408796, 0.460966, 0.508397, 0.551116, 0.589915,
0.625035, 0.657029, 0.685667, 0.712150, 0.735991, 0.757467, 0.777313, 0.795374, 0.811669, 0.826496,
0.839932, 0.852416, 0.863766, 0.874277, 0.883399, 0.892242, 0.900084, 0.907216, 0.914023, 0.920103,
0.925504, 0.930744, 0.935424, 0.939752, 0.943788, 0.947313, 0.950768, 0.953860, 0.956675, 0.959339,
0.961755, 0.964039, 0.965984, 0.967994, 0.969968, 0.971283, 0.972800, 0.974223, 0.975784, 0.976647,
0.977953, 0.978887, 0.979922, 0.980773, 0.981637, 0.982386, 0.983068, 0.983759, 0.984436, 0.984855,
0.985453, 0.985916, 0.986417, 0.986934, 0.987182, 0.987435, 0.988022, 0.988146, 0.988537, 0.988792,
0.989043, 0.989235, 0.989470, 0.989681, 0.989857, 0.990006, 0.990159, 0.990263, 0.990455, 0.990515,
0.990636, 0.990731, 0.990787, 0.990884, 0.990900, 0.990971, 0.990986, 0.991042, 0.991048, 0.991057,
0.991057, 0.991048, 0.991042, 0.990986, 0.990971, 0.990900, 0.990884, 0.990787, 0.990731, 0.990636,
0.990515, 0.990455, 0.990263, 0.990159, 0.990006, 0.989857, 0.989681, 0.989470, 0.989235, 0.989043,
0.988787, 0.988532, 0.988146, 0.988022, 0.987435, 0.987182, 0.986934, 0.986417, 0.985916, 0.985453,
0.984855, 0.984436, 0.983759, 0.983068, 0.982386, 0.981637, 0.980773, 0.979920, 0.978887, 0.977953,
0.976647, 0.975784, 0.974223, 0.972800, 0.971284, 0.969970, 0.967994, 0.965984, 0.964039, 0.961755,
0.959339, 0.956675, 0.953860, 0.950768, 0.947313, 0.943788, 0.939752, 0.935424, 0.930744, 0.925504,
0.920103, 0.914023, 0.907216, 0.900084, 0.892242, 0.883399, 0.874277, 0.863766, 0.852416, 0.839932,
0.826496, 0.811669, 0.795374, 0.777313, 0.757467, 0.735991, 0.712150, 0.685667, 0.657029, 0.625035,
0.589915, 0.551116, 0.508397, 0.460966, 0.408796, 0.351055, 0.287226, 0.216842, 0.139913, 0.062742]]);
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/panels, in radians
# 1. Calculate and assign various paramters to be used for modeling irradiances
iso_dif = 0.0; circ_dif = 0.0; horiz_dif = 0.0; grd_dif = 0.0; beam = 0.0; # For calling PerezComp to break diffuse into components for zero tilt (horizontal)
ghi, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, zen, 0.0, zen) # Call to get iso_dif for horizontal surface
# print "PEREZCOMP1 = "
# print "ghi = ", ghi
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
iso_sky_dif = iso_dif; # Isotropic irradiance from sky on horizontal surface, used later for determining isotropic sky component
inc, tiltr, sazmr = sunIncident(0, 90.0, 180.0, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for 90 degree tilt (vertical)
# print "sunIncident 1."
# print "inc = ", inc
# print "tiltr = ", tiltr
# print "sazmr = ", sazmr
vti, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get horiz_dif for vertical surface
# print "PEREZCOMP1 = "
# print "vti = ", vti
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
F2DHI = horiz_dif; # Horizon diffuse irradiance on a vertical surface, used later for determining horizon brightening irradiance component
index = -99;
n2 = -99.9;
if (PVfrontSurface == "glass"):
index = 0; # Index to use with 1-degree hemispherical segment AOI correction factor array
n2 = 1.526; # Index of refraction for glass
elif (PVfrontSurface == "ARglass"):
index = 1; # Index to use with 1-degree hemispherical segment AOI correction factor array
n2 = 1.300; # Index of refraction for ARglass
else:
raise Exception("Incorrect text input for PVfrontSurface. Must be glass or ARglass.")
Ro = math.pow((n2 - 1.0) / (n2 + 1.0), 2.0); # Reflectance at normal incidence, Duffie and Beckman p217
aveGroundGHI = 0.0; # Average GHI on ground under PV array for cases when x projection exceed 2*rtr
for i in range (0,100):
aveGroundGHI += frontGroundGHI[i] / 100.0;
# Calculate x,y coordinates of bottom and top edges of PV row in front of desired PV row so that portions of sky and ground viewed by the
# PV cell may be determined. Origin of x-y axis is the ground pobelow the lower front edge of the desired PV row. The row in front of
# the desired row is in the negative x direction.
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
PbotX = -rtr; # x value for poon bottom egde of PV module/panel of row in front of (in PV panel slope lengths)
PbotY = C; # y value for poon bottom egde of PV module/panel of row in front of (in PV panel slope lengths)
PtopX = -D; # x value for poon top egde of PV module/panel of row in front of (in PV panel slope lengths)
PtopY = h + C; # y value for poon top egde of PV module/panel of row in front of (in PV panel slope lengths)
# 2. Calculate diffuse and direct component irradiances for each cell row
for i in range (0, cellRows):
# Calculate diffuse irradiances and reflected amounts for each cell row over it's field of view of 180 degrees,
# beginning with the angle providing the upper most view of the sky (j=0)
PcellX = x1 * (i + 0.5) / (cellRows); # x value for location of PV cell
PcellY = C + h * (i + 0.5) / (cellRows); # y value for location of PV cell
elvUP = math.atan((PtopY - PcellY) / (PcellX - PtopX)); # Elevation angle up from PV cell to top of PV module/panel, radians
elvDOWN = math.atan((PcellY - PbotY) / (PcellX - PbotX)); # Elevation angle down from PV cell to bottom of PV module/panel, radians
if (rowType == "first" or rowType == "single"): # 4/19/16 No array in front for these cases
elvUP = 0.0;
elvDOWN = 0.0;
#Console.WriteLine("ElvUp = 0", elvUP / DTOR);
#if (i == 0)
# Console.WriteLine("ElvDown = 0", elvDOWN / DTOR);
if math.isnan(beta):
print( "Beta is Nan")
if math.isnan(elvUP):
print( "elvUP is Nan")
if math.isnan((math.pi - beta - elvUP) / DTOR):
print( "division is Nan")
iStopIso = int(round(np.float64((math.pi - beta - elvUP)) / DTOR)) # Last whole degree in arc range that sees sky, first is 0
#Console.WriteLine("iStopIso = 0", iStopIso);
iHorBright = int(round(max(0.0, 6.0 - elvUP / DTOR))); # Number of whole degrees for which horizon brightening occurs
iStartGrd = int(round((math.pi - beta + elvDOWN) / DTOR)); # First whole degree in arc range that sees ground, last is 180
# print "iStopIso = ", iStopIso
# print "iHorBright = ", iHorBright
# print "iStartGrd = ", iStartGrd
frontGTI.append(0.0) # Initialtize front GTI
frontReflected.append(0.0); # Initialize reflected amount from front
for j in range (0, iStopIso): # Add sky diffuse component and horizon brightening if present
#for (j = 0; j < iStopIso; j++)
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * iso_sky_dif; # Sky radiation
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * iso_sky_dif * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected radiation from module
if ((iStopIso - j) <= iHorBright): # Add horizon brightening term if seen
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * F2DHI / 0.052264; # 0.052246 = 0.5 * [cos(84) - cos(90)]
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * (F2DHI / 0.052264) * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected radiation from module
#if (i == 0)
# Console.WriteLine("iStartGrd = 0", iStartGrd);
for j in range (iStartGrd, 180): # Add ground reflected component
#(j = iStartGrd; j < 180; j++)
startElvDown = (j - iStartGrd) * DTOR + elvDOWN; # Start and ending down elevations for this j loop
stopElvDown = (j + 1 - iStartGrd) * DTOR + elvDOWN;
projectedX1 = PcellX - np.float64(PcellY) / math.tan(startElvDown); # Projection of ElvDown to ground in -x direction
projectedX2 = PcellX - PcellY / math.tan(stopElvDown);
actualGroundGHI = 0.0; # Actuall ground GHI from summing array values
#if (i == 0)
# Console.WriteLine("j= 0 projected X1 = 1,6:0.0", j, 100 * projectedX1 / rtr);
if (abs(projectedX1 - projectedX2) > 0.99 * rtr):
if (rowType == "first" or rowType == "single"): # 4/19/16 No array in front for these cases
actualGroundGHI = ghi; # Use total value if projection approximates the rtr
else:
actualGroundGHI = aveGroundGHI; # Use average value if projection approximates the rtr
else:
projectedX1 = 100.0 * projectedX1 / rtr; # Normalize projections and multiply by 100
projectedX2 = 100.0 * projectedX2 / rtr;
if ((rowType == "first" or rowType == "single") and (abs(projectedX1) > rtr or abs(projectedX2) > rtr)): #4/19/2016
actualGroundGHI = ghi; # Use total value if projection > rtr for "first" or "single"
else:
while (projectedX1 < 0.0 or projectedX2 < 0.0): # Offset so array indexes are positive
projectedX1 += 100.0;
projectedX2 += 100.0;
index1 = int(projectedX1); # Determine indexes for use with groundGHI array (truncates values)
index2 = int(projectedX2);
if (index1 == index2):
actualGroundGHI = frontGroundGHI[index1]; # x projections in same groundGHI element
else:
for k in range (index1, index2+1): # Sum the irradiances on the ground if projections are in different groundGHI elements
#for (k = index1; k <= index2; k++)
#Console.WriteLine("index1=0 index2=1", index1,index2);
if (k == index1):
actualGroundGHI += frontGroundGHI[k] * (k + 1.0 - projectedX1);
elif (k == index2):
if (k < 100):
actualGroundGHI += frontGroundGHI[k] * (projectedX2 - k);
else:
actualGroundGHI += frontGroundGHI[k - 100] * (projectedX2 - k);
else:
if (k < 100):
actualGroundGHI += frontGroundGHI[k];
else:
actualGroundGHI += frontGroundGHI[k - 100];
actualGroundGHI /= projectedX2 - projectedX1; # Irradiance on ground in the 1 degree field of view
#if (i == 0)
# Console.WriteLine("j=0 index1=1 index2=2 projectX1=3,5:0.0 projectX2=4,5:0.0 actualGrdGHI=5,6:0.0", j, index1, index2, projectedX1, projectedX2, actualGroundGHI);
frontGTI[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * SegAOIcor[index][j] * actualGroundGHI * albedo; # Add ground reflected component
frontReflected[i] += 0.5 * (math.cos(j * DTOR) - math.cos((j + 1) * DTOR)) * actualGroundGHI * albedo * (1.0 - SegAOIcor[index][j] * (1.0 - Ro)); # Reflected ground radiation from module
#Console.WriteLine("actualGroundGHI = 0,6:0.0 inputGHI = 1,6:0.0 aveArrayGroundGHI = 2,6:0.0", actualGroundGHI, dhi + dni * math.cos(zen), aveGroundGHI);
# End of j loop for adding ground reflected componenet
# Calculate and add direct and circumsolar irradiance components
inc, tiltr, sazmr = sunIncident(0, beta / DTOR, sazm / DTOR, 45.0, zen, azm) # For calling PerezComp to break diffuse into components for 90 degree tilt (vertical)
# print "sunIncident 2."
# print "inc = ", inc
# print "tiltr = ", tiltr
# print "sazmr = ", sazmr
# print " INCIDENT REALY NEEDED for AOI ", inc
gtiAllpc, iso_dif, circ_dif, horiz_dif, grd_dif, beam = perezComp(dni, dhi, albedo, inc, tiltr, zen) # Call to get components for the tilt
# print "PEREZCOMP 2 = "
# print "gtiAllpc = ", vti
# print "iso_dif = ", iso_dif
# print "circ_dif = ", circ_dif
# print "horiz_dif = ", horiz_dif
# print "grd_dif = ", grd_dif
# print "beam = ", beam
cellShade = pvFrontSH * cellRows - i;
if (cellShade > 1.0): # Fully shaded if > 1, no shade if < 0, otherwise fractionally shaded
cellShade = 1.0;
elif (cellShade < 0.0):
cellShade = 0.0;
if (cellShade < 1.0 and inc < math.pi / 2.0): # Cell not shaded entirely and inc < 90 deg
cor = aOIcorrection(n2, inc); # Get AOI correction for beam and circumsolar
frontGTI[i] += (1.0 - cellShade) * (beam + circ_dif) * cor; # Add beam and circumsolar radiation
#frontReflected[i] += (1.0 - cellShade) * (beam + circ_dif) * (1.0 - cor * (1.0 - Ro)); # Reflected beam and circumsolar radiation from module
# End of for i = 0; i < cellRows loop
return aveGroundGHI, frontGTI, frontReflected;
# End of GetFrontSurfaceIrradiances
def getGroundShadeFactors(rowType, beta, C, D, elv, azm, sazm):
"""
This method determines if the ground is shaded from direct beam radiation
for points on the ground from the leading edge of one row of PV panels to
the leading edge of the next row of PV panels behind it. This row-to-row
dimension is divided into 100 ground segments and a ground shade factor is
returned for each ground segment, with values of 1 for shaded segments and
values of 0 for non shaded segments. The fractional amounts of shading of
the front and back surfaces of the PV panel are also returned. 8/20/2015
4/18/2016 - Modified to account for different row types. Because the ground
factors may now be different depending on row, they are calculated for the
row-to-row dimension to the rear of the leading module edge and to the
front of the leading edge. Also returned is the maximum shadow length
projected to the front or rear from the front of the module row
Parameters
----------
rowType : str
"first", "interior", "last", or "single"
beta
Tilt from horizontal of the PV modules/panels (deg)
C
Ground clearance of PV panel (in PV panel slope lengths)
D
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
elv
Sun elevation (in radians)
azm
Sun azimuth (in radians)
sazm
Surface azimuth of PV panels (deg)
Returns
-------
pvFrontSH : numeric
Decimal fraction of the front surface of the PV panel that is shaded,
0.0 to 1.0
pvBackSH : numeric
Decimal fraction of the back surface of the PV panel that is shaded,
0.0 to 1.0
rearGroundSH : array of size [100]
Ground shade factors for ground segments to the rear, 0 = not shaded,
1 = shaded
frontGroundSH : array of size [100]
Ground shade factors for ground segments to the front, 0 = not shaded,
1 = shaded
maxShadow : numeric
Maximum shadow length projected to the front(-) or rear (+) from the
front of the module row (in PV panel slope lengths), only used later
for rowTypes other than "interior"
"""
rearGroundSH = []
frontGroundSH = []
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/pamels, in radians
h = math.sin(beta); # Vertical height of sloped PV panel (in PV panel slope lengths)
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
# Divide the row-to-row spacing into 100 intervals for calculating ground shade factors
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
Lh = (h / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from top of module to bottom of module
Lhc = ((h + C) / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from top of module to ground level
Lc = (C / math.tan(elv)) * math.cos(sazm - azm); # Horizontal length of shadow perpindicular to row from bottom of module to ground level
ss1 = 0.0; se1 = 0.0; ss2 = 0.0; se2 = 0.0; # Initialize shading start (s) and end (e) to zeros for two potential shading segments
pvFrontSH = 0.0;
pvBackSH = 0.0;
if (rowType == "interior"):
if (Lh > D): # Front side of PV module partially shaded, back completely shaded, ground completely shaded
pvFrontSH = (Lh - D) / (Lh + x1);
pvBackSH = 1.0;
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
elif (Lh < -(rtr + x1)): # Back side of PV module partially shaded, front completely shaded, ground completely shaded
pvFrontSH = 1.0;
pvBackSH = (Lh + rtr + x1) / (Lh + x1);
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
else: # Ground is partially shaded (I assume)
if (Lhc >= 0.0): # Shadow to rear of row, module front unshaded, back shaded
pvFrontSH = 0.0;
pvBackSH = 1.0;
Ss = Lc; # Shadow starts at Lc
Se = Lhc + x1; # Shadow ends here
while (Ss > rtr):
Ss -= rtr; # Put shadow in correct rtr space if needed
Se -= rtr;
ss1 = Ss;
se1 = Se;
if (se1 > rtr): # then need to use two shade areas
se1 = rtr;
ss2 = 0.0;
se2 = Se - rtr;
if (se2 > ss1):
# This would mean ground completely shaded, does this occur?
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
else: # Shadow to front of row, either front or back might be shaded, depending on tilt and other factors
Ss = 0.0; # Shadow starts at Lc, initialize
Se = 0.0; # Shadow ends here, initialize
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
Ss = Lc; # Shadow starts at Lc
Se = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
Ss = Lhc + x1; # Shadow starts at Lhc + x1
Se = Lc; # Shadow ends here
while (Ss < 0.0):
Ss += rtr; # Put shadow in correct rtr space if needed
Se += rtr;
ss1 = Ss;
se1 = Se;
if (se1 > rtr): # then need to use two shade areas
se1 = rtr;
ss2 = 0.0;
se2 = Se - rtr;
if (se2 > ss1):
# This would mean ground completely shaded, does this occur?
ss1 = 0.0; # Ground shaded from 0.0 to rtr
se1 = rtr;
# End of if (Lh > D) else branching
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
#for (i = 0; i <= 99; i++)
for i in range(0,100):
x += delta;
#if ((x >= ss1 && x < se1) || (x >= ss2 && x < se2)):
if ((x >= ss1 and x < se1) or (x >= ss2 and x < se2)):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
frontGroundSH.append(1); # same for both front and rear
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
frontGroundSH.append(0); # same for both front and rear
#Console.WriteLine("x = 0,6:0.0000 groundSH = 1", x, groundSH[i]);
# End of if row type == "interior"
elif (rowType == "first"):
if (Lh > 0.0): # Sun is on front side of PV module
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Ground shaded from shadow of lower edge
se1 = x1 + Lhc; # to shadow of upper edge
# End of if sun on front side of PV module
elif (Lh < -(rtr + x1)): # Back side of PV module partially shaded from row to rear, front completely shaded, ground completely shaded
pvFrontSH = 1.0;
pvBackSH = (Lh + rtr + x1) / (Lh + x1);
ss1 = -rtr; # Ground shaded from -rtr to rtr
se1 = rtr;
# End of if back side of PV module partially shaded, front completely shaded, ground completely shaded
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1) # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0) # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1) # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0) # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "first"
elif (rowType == "last"):
if (Lh > D): # Front side of PV module partially shaded, back completely shaded, ground completely shaded
pvFrontSH = (Lh - D) / (Lh + x1);
pvBackSH = 1.0;
ss1 = -rtr; # Ground shaded from -rtr to rtr
se1 = rtr;
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "last"
elif (rowType == "single"):
if (Lh > 0.0): # Shadow to the rear
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Ground shaded from shadow of lower edge
se1 = x1 + Lhc; # to shadow of upper edge
# End of if sun on front side of PV module
else: # Shadow to frontside of row, either front or back might be shaded, depending on tilt and other factors
if (Lc < Lhc + x1):
pvFrontSH = 0.0;
pvBackSH = 1.0;
ss1 = Lc; # Shadow starts at Lc
se1 = Lhc + x1; # Shadow ends here
else:
pvFrontSH = 1.0;
pvBackSH = 0.0;
ss1 = Lhc + x1; # Shadow starts at Lhc + x1
se1 = Lc; # Shadow ends here
# End of shadow to front of row
delta = rtr / 100.0;
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
rearGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
rearGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
x = -rtr - delta / 2.0; # Initialize horizontal dimension x to provide midpoof intervals for front interval
for i in range(0,100):
x += delta;
if (x >= ss1 and x < se1):
frontGroundSH.append(1); # x within a shaded interval, set groundSH to 1 to indicate shaded
else:
frontGroundSH.append(0); # x not within a shaded interval, set groundSH to 0 to indicated not shaded, i.e. sunny
# End of if row type == "single"
else:
print ("ERROR: Incorrect row type not passed to function GetGroundShadedFactors ");
if (abs(ss1) > abs(se1)): # Maximum shadow length projected from the front of the PV module row
maxShadow = ss1;
else:
maxShadow = se1;
#Console.WriteLine("elv = 0,6:0.00 azm = 1,6:0.00 sazm = 2,6:0.00", elv * 180.0 / math.pi, azm * 180.0 / math.pi, sazm * 180.0 / math.pi);
#Console.WriteLine("ss1 = 0,6:0.0000 se1 = 1,6:0.0000 ss2 = 2,6:0.0000 se2 = 3,6:0.0000 rtr = 4,6:0.000", ss1, se1, ss2, se2, rtr);
#Console.WriteLine("pvFrontSH = 0,6:0.00 pvBackSH = 1,6:0.00", pvFrontSH, pvBackSH);
# End of GetGroundShadedFactors
#print "rearGroundSH", rearGroundSH[0]
return pvFrontSH, pvBackSH, maxShadow, rearGroundSH, frontGroundSH;
# End of getGroundShadeFactors
def getSkyConfigurationFactors(rowType, beta, C, D):
"""
This method determines the sky configuration factors for points on the
ground from the leading edge of one row of PV panels to the leading edge of
the next row of PV panels behind it. This row-to-row dimension is divided
into 100 ground segments and a sky configuration factor is returned for
each ground segment. The sky configuration factor represents the fraction
of the isotropic diffuse sky radiation (unobstructed) that is present on
the ground when partially obstructed by the rows of PV panels. The
equations follow that on pages in the notebook dated 8/12/2015. 8/20/2015
4/15/2016 Modifed for calculations other than just the interior rows. Row
type is identified with the string `rowType`, with the possilbe values:
* first = first row of the array
* interior = interior row of array
* last = last row of the array
* single = a single row array
Because the sky configuration factors may now be different depending on
row, they are calculated for the row-to-row dimension to the rear of the
leading module edge and to the front of the leading edge.
Parameters
----------
rowType : str
"first", "interior", "last", or "single"
beta : float
Tilt from horizontal of the PV modules/panels (deg)
C : float
Ground clearance of PV panel (in PV module/panel slope lengths)
D : float
Horizontal distance between rows of PV panels (in PV module/panel slope
lengths)
Returns
-------
rearSkyConfigFactors : array of size [100]
Sky configuration factors to rear of leading PVmodule edge (decimal
fraction)
frontSkyConfigFactors : array of size [100]
Sky configuration factors to rear of leading PVmodule edge (decimal
fraction)
Notes
-----
The horizontal distance between rows, `D`, is from the back edge of one row
to the front edge of the next, and it is not the row-to-row spacing.
"""
rearSkyConfigFactors = []
frontSkyConfigFactors = []
# Tilt from horizontal of the PV modules/panels, in radians
beta = beta * DTOR
# Vertical height of sloped PV panel (in PV panel slope lengths)
h = math.sin(beta)
# Horizontal distance from front of panel to rear of panel (in PV panel
# slope lengths)
x1 = math.cos(beta)
rtr = D + x1 # Row-to-row distance (in PV panel slope lengths)
# Forced fix for case of C = 0
# FIXME: for some reason the Config Factors go from 1 to 2 and not 0 to 1.
# TODO: investigate why this is happening in the code.
if C==0:
C=0.0000000001
if C < 0:
LOGGER.error(
"Height is below ground level. Function GetSkyConfigurationFactors"
" will continue but results might be unreliable")
# Divide the row-to-row spacing into 100 intervals and calculate
# configuration factors
delta = rtr / 100.0
if (rowType == "interior"):
# Initialize horizontal dimension x to provide midpoint of intervals
x = -delta / 2.0
for i in range(0,100):
x += delta
# <--rtr=x1+D--><--rtr=x1+D--><--rtr=x1+D-->
# |\ |\ |\ |\
# | \ ` | \ | \ /| \
# h \ ` h \ h \ / h \
# | \ ` | \ | \ / | \
# |_x1_\____D__`|_x1_\____D___|_x1_\_/_D____|_x1_\_
# | ` <------x-----/|
# C ` /
# | angA ` / angB
# *------------------------`-/---------------------
# x
# use ATAN2: 4-quadrant tangent instead of ATAN
# check 2 rows away
angA = math.atan2(h + C, (2.0 * rtr + x1 - x))
angB = math.atan2(C, (2.0 * rtr - x))
beta1 = max(angA, angB)
# check 1 rows away
angA = math.atan2(h + C, (rtr + x1 - x))
angB = math.atan2(C, (rtr - x))
beta2 = min(angA, angB)
# check 0 rows away
beta3 = max(angA, angB)
beta4 = math.atan2(h + C, (x1 - x))
beta5 = math.atan2(C, (-x))
beta6 = math.atan2(h + C, (-D - x))
sky1 =0; sky2 =0; sky3 =0
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2))
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4))
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6))
skyAll = sky1 + sky2 + sky3
# Save as arrays of values, same for both to the rear and front
rearSkyConfigFactors.append(skyAll)
frontSkyConfigFactors.append(skyAll)
# End of if "interior"
elif (rowType == "first"):
# RearSkyConfigFactors don't have a row in front, calculation of sky3
# changed, beta6 = 180 degrees
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.pi;
sky1 = 0.0; sky2 = 0.0; sky3 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky1 + sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# frontSkyConfigFactors don't have a row in front, calculation of sky3 included as part of revised sky2,
# beta 4 set to 180 degrees
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.pi;
sky1 = 0.0; sky2 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
skyAll = sky1 + sky2;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "first"
elif (rowType == "last"):
# RearSkyConfigFactors don't have a row to the rear, combine sky1 into sky 2, set beta 3 = 0.0
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
beta3 = 0.0;
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.atan((h + C) / (-D - x));
if (beta6 < 0.0):
beta6 += math.pi;
sky2 = 0.0; sky3 = 0.0;
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# FrontSkyConfigFactors have beta1 = 0.0
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
beta1 = 0.0;
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.atan((h + C) / (-D - x));
if (beta6 < 0.0):
beta6 += math.pi;
sky1 = 0.0; sky2 = 0.0; sky3 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky1 + sky2 + sky3;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values,
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "last" row
elif (rowType == "single"):
# RearSkyConfigFactors don't have a row to the rear ir front, combine sky1 into sky 2, set beta 3 = 0.0,
# for sky3, beta6 = 180.0.
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
beta3 = 0.0;
beta4 = math.atan((h + C) / (x1 - x));
if (beta4 < 0.0):
beta4 += math.pi;
beta5 = math.atan(C / (-x));
if (beta5 < 0.0):
beta5 += math.pi;
beta6 = math.pi;
sky2 = 0.0; sky3 = 0.0;
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
if (beta6 > beta5):
sky3 = 0.5 * (math.cos(beta5) - math.cos(beta6));
skyAll = sky2 + sky3;
rearSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# FrontSkyConfigFactors have only a row to the rear, combine sky3 into sky2, set beta1 = 0, beta4 = 180
x = -delta / 2.0; # Initialize horizontal dimension x to provide midpoint of intervals
for i in range(0,100):
x += delta;
angA = math.atan((h + C) / (2.0 * rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (2.0 * rtr - x));
if (angB < 0.0):
angB += math.pi;
beta1 = max(angA, angB);
beta1 = 0.0;
angA = math.atan((h + C) / (rtr + x1 - x));
if (angA < 0.0):
angA += math.pi;
angB = math.atan(C / (rtr - x));
if (angB < 0.0):
angB += math.pi;
beta2 = min(angA, angB);
beta3 = max(angA, angB);
beta4 = math.pi;
sky1 = 0.0; sky2 = 0.0;
if (beta2 > beta1):
sky1 = 0.5 * (math.cos(beta1) - math.cos(beta2));
if (beta4 > beta3):
sky2 = 0.5 * (math.cos(beta3) - math.cos(beta4));
skyAll = sky1 + sky2;
frontSkyConfigFactors.append(skyAll); # Save as arrays of values
#Console.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
#sw.WriteLine("0,5:0.000,1,5:0.000,2,5:0.000,3,5:0.000,4,5:0.000", x, sky1, sky2, sky3, skyAll);
# End of if "single"
else:
print("ERROR: Incorrect row type not passed to function GetSkyConfigurationFactors ");
return rearSkyConfigFactors, frontSkyConfigFactors;
# End of GetSkyConfigurationFactors
def rowSpacing(beta, sazm, lat, lng, tz, hour, minute):
"""
This method determines the horizontal distance D between rows of PV panels
(in PV module/panel slope lengths) for no shading on December 21 (north
hemisphere) June 21 (south hemisphere) for a module tilt angle beta and
surface azimuth sazm, and a given latitude, longitude, and time zone and
for the time passed to the method (typically 9 am).
(Ref: the row-to-row spacing is then ``D + cos(beta)``)
8/21/2015
Parameters
----------
beta : double
Tilt from horizontal of the PV modules/panels (deg)
sazm : double
Surface azimuth of the PV modules/panels (deg)
lat : double
Site latitude (deg)
lng : double
Site longitude (deg)
tz : double
Time zone (hrs)
hour : int
hour for no shading criteria
minute: double
minute for no shading
Returns
-------
D : numeric
Horizontal distance between rows of PV panels (in PV panel slope
lengths)
"""
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
sazm = sazm * DTOR # Surface azimuth of PV module/pamels, in radians
if lat >= 0:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos (2014, 12, 21, hour, minute, lat, lng, tz)
else:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos (2014, 6, 21, hour, minute, lat, lng, tz)
tst = 8.877 ##DLL Forced value
minute -= 60.0 * (tst - hour); # Adjust minute so sun position is calculated for a tst equal to the
# time passed to the function
if lat >= 0:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos(2014, 12, 21, hour, minute, lat, lng, tz)
else:
[azm, zen, elv, dec, sunrise, sunset, Eo, tst] = solarPos(2014, 6, 21, hour, minute, lat, lng, tz)
# Console.WriteLine("tst = {0} azm = {1} elv = {2}", tst, azm * 180.0 / Math.PI, elv * 180.0 / Math.PI);
D = math.cos(sazm - azm) * math.sin(beta) / math.tan(elv)
return D
# End of RowSpacing
def trackingBFvaluescalculator(beta, hub_height, r2r):
'''
1-axis tracking helper file
Parameters
----------
beta : float
Tilt from horizontal of the PV modules/panels, in radians
hub_height : float
tracker hub height
r2r : float
Row-to-row distance (in PV panel slope lengths)
Returns
-------
C : float
ground clearance of PV panel
D : float
row-to-row distance (each in PV panel slope lengths)
'''
# Created on Tue Jun 13 08:01:56 2017
# @author: sayala
beta = beta * DTOR # Tilt from horizontal of the PV modules/panels, in radians
x1 = math.cos(beta); # Horizontal distance from front of panel to rear of panel (in PV panel slope lengths)
#rtr = D + x1; # Row-to-row distance (in PV panel slope lengths)
D = r2r - x1; # Calculates D DistanceBetweenRows(panel slope lengths)
hm = 0.5*math.sin(beta); # vertical distance from bottom of panel to top of panel (in PV panel slope lengths)
#C = 0.5+Cv-hm # Ground clearance of PV panel (in PV panel slope lengths).
C = hub_height - hm #Adding a 0.5 for half a panel slope length, since it is assumed the panel is rotating around its middle axis
return C, D
| 50.374441
| 203
| 0.534692
| 10,227
| 78,836
| 4.110003
| 0.083896
| 0.006424
| 0.007137
| 0.012324
| 0.797112
| 0.775081
| 0.752195
| 0.72819
| 0.711489
| 0.691933
| 0
| 0.149293
| 0.362258
| 78,836
| 1,564
| 204
| 50.40665
| 0.686735
| 0.430895
| 0
| 0.692
| 0
| 0
| 0.013246
| 0.001708
| 0
| 0
| 0
| 0.001279
| 0
| 1
| 0.008
| false
| 0.002667
| 0.006667
| 0
| 0.022667
| 0.008
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db3fb84bca4d1b9ce63dca5f602d76eb7650bd3f
| 106
|
py
|
Python
|
lib/loss/__init__.py
|
kennethwdk/PINet
|
3a0abbd653146c56e39612384891c94c3fb49b35
|
[
"MIT"
] | 10
|
2021-12-22T11:31:53.000Z
|
2022-01-18T11:52:17.000Z
|
lib/loss/__init__.py
|
kennethwdk/PINet
|
3a0abbd653146c56e39612384891c94c3fb49b35
|
[
"MIT"
] | null | null | null |
lib/loss/__init__.py
|
kennethwdk/PINet
|
3a0abbd653146c56e39612384891c94c3fb49b35
|
[
"MIT"
] | null | null | null |
from .heatmaploss import HeatmapLoss
from .offsetloss import OffsetLoss
from .refineloss import RefineLoss
| 35.333333
| 36
| 0.867925
| 12
| 106
| 7.666667
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103774
| 106
| 3
| 37
| 35.333333
| 0.968421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
db555bcdcf43aa3bbda4391fd627c19482dc0997
| 68,250
|
py
|
Python
|
dalme_app/migrations/0001_initial.py
|
DALME/dalme
|
46f9a0011fdb75c5098b552104fc73b1062e16e9
|
[
"BSD-3-Clause"
] | 6
|
2019-05-07T01:06:04.000Z
|
2021-02-19T20:45:09.000Z
|
dalme_app/migrations/0001_initial.py
|
DALME/dalme
|
46f9a0011fdb75c5098b552104fc73b1062e16e9
|
[
"BSD-3-Clause"
] | 23
|
2018-09-14T18:01:42.000Z
|
2021-12-29T17:25:18.000Z
|
dalme_app/migrations/0001_initial.py
|
DALME/dalme
|
46f9a0011fdb75c5098b552104fc73b1062e16e9
|
[
"BSD-3-Clause"
] | 1
|
2020-02-10T16:20:57.000Z
|
2020-02-10T16:20:57.000Z
|
# Generated by Django 3.1.2 on 2020-11-29 13:25
import dalme_app.models._templates
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_currentuser.middleware
import uuid
import wagtail.search.index
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0012_alter_user_first_name_max_length'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='rs_collection',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, null=True)),
('user', models.IntegerField(null=True)),
('created', models.DateTimeField(blank=True, null=True)),
('public', models.IntegerField(default='0')),
('theme', models.CharField(max_length=100, null=True)),
('theme2', models.CharField(max_length=100, null=True)),
('theme3', models.CharField(max_length=100, null=True)),
('allow_changes', models.IntegerField(default='0')),
('cant_delete', models.IntegerField(default='0')),
('keywords', models.TextField()),
('savedsearch', models.IntegerField(null=True)),
('home_page_publish', models.IntegerField(null=True)),
('home_page_text', models.TextField()),
('home_page_image', models.IntegerField(null=True)),
('session_id', models.IntegerField(null=True)),
('theme4', models.CharField(max_length=100, null=True)),
('theme5', models.CharField(max_length=100, null=True)),
('theme6', models.CharField(max_length=100, null=True)),
('theme7', models.CharField(max_length=100, null=True)),
('theme8', models.CharField(max_length=100, null=True)),
('theme9', models.CharField(max_length=100, null=True)),
('theme10', models.CharField(max_length=100, null=True)),
('theme11', models.CharField(max_length=100, null=True)),
('theme12', models.CharField(max_length=100, null=True)),
('theme13', models.CharField(max_length=100, null=True)),
('theme14', models.CharField(max_length=100, null=True)),
('theme15', models.CharField(max_length=100, null=True)),
('theme16', models.CharField(max_length=100, null=True)),
('theme17', models.CharField(max_length=100, null=True)),
('theme18', models.CharField(max_length=100, null=True)),
('theme19', models.CharField(max_length=100, null=True)),
('theme20', models.CharField(max_length=100, null=True)),
],
options={
'db_table': 'collection',
'managed': False,
},
),
migrations.CreateModel(
name='rs_collection_resource',
fields=[
('date_added', models.DateTimeField(auto_now_add=True, primary_key=True, serialize=False)),
('comment', models.TextField()),
('rating', models.IntegerField(null=True)),
('use_as_theme_thumbnail', models.IntegerField(null=True)),
('purchase_size', models.CharField(max_length=10, null=True)),
('purchase_complete', models.IntegerField(default='0')),
('purchase_price', models.FloatField(default='0.00', max_length=10)),
('sortorder', models.IntegerField(null=True)),
],
options={
'db_table': 'collection_resource',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=200, null=True)),
('resource_type', models.IntegerField(null=True)),
('has_image', models.IntegerField(default='0')),
('is_transcoding', models.IntegerField(default='0')),
('hit_count', models.IntegerField(default='0')),
('new_hit_count', models.IntegerField(default='0')),
('creation_date', models.DateTimeField(blank=True, null=True)),
('rating', models.IntegerField(null=True)),
('user_rating', models.IntegerField(null=True)),
('user_rating_count', models.IntegerField(null=True)),
('user_rating_total', models.IntegerField(null=True)),
('country', models.CharField(default=None, max_length=200, null=True)),
('file_extension', models.CharField(max_length=10, null=True)),
('preview_extension', models.CharField(max_length=10, null=True)),
('image_red', models.IntegerField(null=True)),
('image_green', models.IntegerField(null=True)),
('image_blue', models.IntegerField(null=True)),
('thumb_width', models.IntegerField(null=True)),
('thumb_height', models.IntegerField(null=True)),
('archive', models.IntegerField(default='0')),
('access', models.IntegerField(default='0')),
('colour_key', models.CharField(max_length=5, null=True)),
('created_by', models.IntegerField(null=True)),
('file_path', models.CharField(max_length=500, null=True)),
('file_modified', models.DateTimeField(blank=True, null=True)),
('file_checksum', models.CharField(max_length=32, null=True)),
('request_count', models.IntegerField(default='0')),
('expiry_notification_sent', models.IntegerField(default='0')),
('preview_tweaks', models.CharField(max_length=50, null=True)),
('geo_lat', models.FloatField(default=None, null=True)),
('geo_long', models.FloatField(default=None, null=True)),
('mapzoom', models.IntegerField(null=True)),
('disk_usage', models.IntegerField(null=True)),
('disk_usage_last_updated', models.DateTimeField(blank=True, null=True)),
('file_size', models.IntegerField(default=None, null=True)),
('preview_attempts', models.IntegerField(default=None, null=True)),
('field12', models.CharField(default=None, max_length=200, null=True)),
('field8', models.CharField(default=None, max_length=200, null=True)),
('field3', models.CharField(default=None, max_length=200, null=True)),
('annotation_count', models.IntegerField(null=True)),
('field51', models.CharField(default=None, max_length=200, null=True)),
('field79', models.CharField(blank=True, default=None, max_length=200, null=True)),
('modified', models.DateTimeField(auto_now_add=True, null=True)),
],
options={
'db_table': 'resource',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource_data',
fields=[
('django_id', models.IntegerField(db_column='django_id', primary_key=True, serialize=False)),
('value', models.TextField()),
],
options={
'db_table': 'resource_data',
'managed': False,
},
),
migrations.CreateModel(
name='rs_resource_type_field',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50, null=True)),
('title', models.CharField(max_length=400, null=True)),
('type', models.IntegerField(null=True)),
('order_by', models.IntegerField(default='0')),
('keywords_index', models.IntegerField(default='0')),
('partial_index', models.IntegerField(default='0')),
('resource_type', models.IntegerField(default='0')),
('resource_column', models.CharField(max_length=50, null=True)),
('display_field', models.IntegerField(default='1')),
('use_for_similar', models.IntegerField(default='1')),
('iptc_equiv', models.CharField(max_length=20, null=True)),
('display_template', models.TextField()),
('tab_name', models.CharField(max_length=50, null=True)),
('required', models.IntegerField(default='0')),
('smart_theme_name', models.CharField(max_length=200, null=True)),
('exiftool_field', models.CharField(max_length=200, null=True)),
('advanced_search', models.IntegerField(default='1')),
('simple_search', models.IntegerField(default='0')),
('help_text', models.TextField()),
('display_as_dropdown', models.IntegerField(default='0')),
('external_user_access', models.IntegerField(default='1')),
('autocomplete_macro', models.TextField()),
('hide_when_uploading', models.IntegerField(default='0')),
('hide_when_restricted', models.IntegerField(default='0')),
('value_filter', models.TextField()),
('exiftool_filter', models.TextField()),
('omit_when_copying', models.IntegerField(default='0')),
('tooltip_text', models.TextField()),
('regexp_filter', models.CharField(max_length=400, null=True)),
('sync_field', models.IntegerField(null=True)),
('display_condition', models.CharField(max_length=400, null=True)),
('onchange_macro', models.TextField()),
('field_constraint', models.IntegerField(null=True)),
('linked_data_field', models.TextField()),
('automatic_nodes_ordering', models.IntegerField(default='0')),
('fits_field', models.CharField(max_length=255, null=True)),
('personal_data', models.IntegerField(default='0')),
],
options={
'db_table': 'resource_type_field',
'managed': False,
},
),
migrations.CreateModel(
name='rs_user',
fields=[
('ref', models.IntegerField(primary_key=True, serialize=False)),
('username', models.CharField(max_length=50, unique=True)),
('password', models.CharField(max_length=64, null=True)),
('fullname', models.CharField(max_length=100, null=True)),
('email', models.CharField(max_length=100, null=True)),
('usergroup', models.IntegerField(choices=[(2, 'General User'), (4, 'Archivist'), (1, 'Administrator'), (3, 'Super Admin')], null=True)),
('last_active', models.DateTimeField(blank=True, null=True)),
('logged_in', models.IntegerField(null=True)),
('last_browser', models.TextField()),
('last_ip', models.CharField(max_length=100, null=True)),
('current_collection', models.IntegerField(null=True)),
('accepted_terms', models.IntegerField(default='0')),
('account_expires', models.DateTimeField(blank=True, null=True)),
('comments', models.TextField()),
('session', models.CharField(max_length=50, null=True)),
('ip_restrict', models.TextField()),
('search_filter_override', models.TextField()),
('password_last_change', models.DateTimeField(null=True)),
('login_tries', models.IntegerField(default='0')),
('login_last_try', models.DateTimeField(blank=True, null=True)),
('approved', models.IntegerField(default='1')),
('lang', models.CharField(max_length=11, null=True)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('hidden_collections', models.TextField()),
('password_reset_hash', models.CharField(max_length=100, null=True)),
('origin', models.CharField(max_length=50, null=True)),
('unique_hash', models.CharField(max_length=50, null=True)),
('wp_authrequest', models.CharField(max_length=50, null=True)),
('csrf_token', models.CharField(max_length=255, null=True)),
],
options={
'db_table': 'user',
'managed': False,
},
),
migrations.CreateModel(
name='Agent',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('standard_name', models.CharField(max_length=255)),
('type', models.IntegerField(choices=[(1, 'Person'), (2, 'Organization')])),
('notes', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_agent_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_agent_modification', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='agent', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('file', models.FileField(upload_to='attachments/%Y/%m/')),
('type', models.CharField(max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attachment_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Attribute_type',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55, unique=True)),
('description', models.TextField()),
('data_type', models.CharField(choices=[('DATE', 'DATE (date)'), ('INT', 'INT (integer)'), ('STR', 'STR (string)'), ('TXT', 'TXT (text)'), ('FK-UUID', 'FK-UUID (DALME record)'), ('FK-INT', 'FK-INT (DALME record)')], max_length=15)),
('source', models.CharField(blank=True, default=None, max_length=255, null=True)),
('options_list', models.CharField(blank=True, default=None, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_type_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_type_modification', to=settings.AUTH_USER_MODEL)),
('same_as', models.ForeignKey(db_column='same_as', null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attribute_type')),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Concept',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('getty_id', models.IntegerField(db_index=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_concept_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_concept_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Content_attributes',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('order', models.IntegerField(db_index=True, null=True)),
('required', models.BooleanField(default=False)),
('unique', models.BooleanField(default=True)),
('attribute_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='content_types', to='dalme_app.attribute_type')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Content_class',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55, unique=True)),
('description', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_class_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_class_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='Content_type',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255, unique=True)),
('short_name', models.CharField(max_length=55)),
('description', models.TextField()),
('has_pages', models.BooleanField(db_index=True, default=False)),
('has_inventory', models.BooleanField(default=False)),
('parents', models.CharField(blank=True, default=None, max_length=255, null=True)),
('r1_inheritance', models.CharField(blank=True, default=None, max_length=255, null=True)),
('r2_inheritance', models.CharField(blank=True, default=None, max_length=255, null=True)),
('attribute_types', models.ManyToManyField(through='dalme_app.Content_attributes', to='dalme_app.Attribute_type')),
('content_class', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.content_class')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_type_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_type_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['id'],
},
),
migrations.CreateModel(
name='CountryReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255, unique=True)),
('alpha_3_code', models.CharField(max_length=3)),
('alpha_2_code', models.CharField(max_length=2)),
('num_code', models.IntegerField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_countryreference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_countryreference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Entity_phrase',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('phrase', models.TextField(blank=True)),
('type', models.IntegerField(choices=[(1, 'Agent'), (2, 'Object'), (3, 'Place')])),
('object_id', models.UUIDField(db_index=True, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_entity_phrase_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_entity_phrase_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Headword',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('word', models.CharField(max_length=55)),
('full_lemma', models.CharField(max_length=255)),
('concept_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_headword_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_headword_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Object',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=55)),
('dam_id', models.IntegerField(db_index=True, null=True)),
('order', models.IntegerField(db_index=True)),
('canvas', models.TextField(null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_page_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_page_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['order'],
},
),
migrations.CreateModel(
name='Set',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('set_type', models.IntegerField(choices=[(1, 'Corpus'), (2, 'Collection'), (3, 'Dataset'), (4, 'Workset')])),
('is_public', models.BooleanField(default=False)),
('has_landing', models.BooleanField(default=False)),
('endpoint', models.CharField(max_length=55)),
('permissions', models.IntegerField(choices=[(1, 'Private'), (2, 'Others: view'), (3, 'Others: view|add'), (4, 'Others: view|add|delete')], default=2)),
('description', models.TextField()),
('stat_title', models.CharField(blank=True, max_length=25, null=True)),
('stat_text', models.CharField(blank=True, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_creation', to=settings.AUTH_USER_MODEL)),
('dataset_usergroup', models.ForeignKey(limit_choices_to={'properties__type': 3}, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='dataset', to='auth.group')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Source',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55)),
('has_inventory', models.BooleanField(db_index=True, default=False)),
('is_private', models.BooleanField(db_index=True, default=False)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_related', to=settings.AUTH_USER_MODEL)),
],
bases=(wagtail.search.index.Indexed, models.Model),
),
migrations.CreateModel(
name='Wordform',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('normalized_form', models.CharField(max_length=55)),
('pos', models.CharField(max_length=255)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_wordform_creation', to=settings.AUTH_USER_MODEL)),
('headword_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.headword')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_wordform_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Transcription',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('transcription', models.TextField(blank=True, default=None)),
('author', models.CharField(default=dalme_app.models._templates.get_current_username, max_length=255)),
('version', models.IntegerField(default=1)),
('count_ignore', models.BooleanField(default=False)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_transcription_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_transcription_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Token',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('raw_token', models.CharField(max_length=255)),
('clean_token', models.CharField(max_length=55)),
('order', models.IntegerField(db_index=True)),
('flags', models.CharField(max_length=10)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_token_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_token_modification', to=settings.AUTH_USER_MODEL)),
('object_phrase_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.entity_phrase')),
('wordform_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dalme_app.wordform')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('subject', models.CharField(max_length=140)),
('description', models.TextField(blank=True, null=True)),
('status', models.IntegerField(choices=[(0, 'Open'), (1, 'Closed')], default=0)),
('url', models.CharField(default=None, max_length=255, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_ticket_creation', to=settings.AUTH_USER_MODEL)),
('file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_ticket_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['status', 'creation_timestamp'],
},
),
migrations.CreateModel(
name='TaskList',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=60)),
('slug', models.SlugField(default='')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_creation', to=settings.AUTH_USER_MODEL)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_list_group', to='auth.group')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_modification', to=settings.AUTH_USER_MODEL)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tasklist_related', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Task Lists',
'ordering': ['name'],
'unique_together': {('group', 'slug')},
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('title', models.CharField(max_length=140)),
('due_date', models.DateField(blank=True, null=True)),
('completed', models.BooleanField(default=False)),
('completed_date', models.DateField(blank=True, null=True)),
('description', models.TextField(blank=True, null=True)),
('priority', models.PositiveIntegerField(blank=True, null=True)),
('position', models.CharField(blank=True, default=None, max_length=255)),
('url', models.CharField(default=None, max_length=255, null=True)),
('assigned_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_assigned_to', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='task_created_by', to=settings.AUTH_USER_MODEL)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_task_creation', to=settings.AUTH_USER_MODEL)),
('file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_task_modification', to=settings.AUTH_USER_MODEL)),
('task_list', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.tasklist')),
('workset', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='dalme_app.set')),
],
options={
'ordering': ['priority', 'creation_timestamp'],
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('tag_type', models.CharField(choices=[('WF', 'Workflow'), ('C', 'Control'), ('T', 'Ticket')], max_length=2)),
('tag', models.CharField(default=None, max_length=55, null=True)),
('tag_group', models.CharField(default=None, max_length=255, null=True)),
('object_id', models.CharField(db_index=True, max_length=55, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tag_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_tag_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Source_pages',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_pages_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_pages_modification', to=settings.AUTH_USER_MODEL)),
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sources', to='dalme_app.page')),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='source_pages', to='dalme_app.source')),
('transcription', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='source_pages', to='dalme_app.transcription')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='source',
name='pages',
field=models.ManyToManyField(db_index=True, through='dalme_app.Source_pages', to='dalme_app.Page'),
),
migrations.AddField(
model_name='source',
name='parent',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='dalme_app.source'),
),
migrations.AddField(
model_name='source',
name='primary_dataset',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_query_name='set_members', to='dalme_app.set'),
),
migrations.AddField(
model_name='source',
name='type',
field=models.ForeignKey(db_column='type', on_delete=django.db.models.deletion.PROTECT, to='dalme_app.content_type'),
),
migrations.CreateModel(
name='Scope',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('type', models.IntegerField(choices=[(1, 'Temporal'), (2, 'Spatial'), (3, 'Linguistic'), (4, 'Context')])),
('range', models.TextField()),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_scope_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_scope_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RightsPolicy',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=100)),
('rights_status', models.IntegerField(choices=[(1, 'Copyrighted'), (2, 'Orphaned'), (3, 'Owned'), (4, 'Public Domain'), (5, 'Unknown')], default=5)),
('rights', models.TextField(blank=True, default=None)),
('rights_notice', models.JSONField(null=True)),
('licence', models.TextField(blank=True, default=None, null=True)),
('rights_holder', models.CharField(default=None, max_length=255, null=True)),
('notice_display', models.BooleanField(default=False)),
('public_display', models.BooleanField(default=True)),
('attachments', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.attachment')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_rightspolicy_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_rightspolicy_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Relationship',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('source_object_id', models.UUIDField(db_index=True, null=True)),
('target_object_id', models.UUIDField(db_index=True, null=True)),
('notes', models.TextField(blank=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_relationship_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_relationship_modification', to=settings.AUTH_USER_MODEL)),
('scope', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='dalme_app.scope')),
('source_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_sources', to='contenttypes.contenttype')),
('target_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_targets', to='contenttypes.contenttype')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='PublicRegister',
fields=[
('object_id', models.UUIDField(db_index=True, primary_key=True, serialize=False)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creator', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_publicregister_creation', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(blank=True, max_length=50)),
('primary_group', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='auth.group')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Place',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('std_name', models.CharField(max_length=255)),
('type', models.IntegerField(db_index=True)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_place_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_place_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Object_attribute',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('attribute_concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.concept')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_attribute_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_object_attribute_modification', to=settings.AUTH_USER_MODEL)),
('object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dalme_app.object')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='LanguageReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('glottocode', models.CharField(max_length=25, unique=True)),
('iso6393', models.CharField(blank=True, default=None, max_length=25, null=True, unique=True)),
('name', models.CharField(max_length=255)),
('type', models.IntegerField(choices=[(1, 'Language'), (2, 'Dialect')])),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_languagereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_languagereference_modification', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.languagereference')),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='GroupProperties',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.IntegerField(choices=[(1, 'Admin'), (2, 'DAM'), (3, 'Dataset'), (4, 'Knowledge Base'), (5, 'Website')])),
('description', models.CharField(max_length=255)),
('group', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='properties', to='auth.group')),
],
),
migrations.AddField(
model_name='entity_phrase',
name='transcription_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entity_phrases', to='dalme_app.transcription'),
),
migrations.AddField(
model_name='content_attributes',
name='content_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attribute_type_list', to='dalme_app.content_type'),
),
migrations.AddField(
model_name='content_attributes',
name='creation_user',
field=models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_attributes_creation', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='content_attributes',
name='modification_user',
field=models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_content_attributes_modification', to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.CharField(db_index=True, max_length=55, null=True)),
('body', models.TextField(blank=True, default=None, null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_comment_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_comment_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['creation_timestamp'],
},
),
migrations.CreateModel(
name='AttributeReference',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('short_name', models.CharField(max_length=55)),
('description', models.TextField()),
('data_type', models.CharField(max_length=15)),
('source', models.CharField(max_length=255)),
('term_type', models.CharField(blank=True, default=None, max_length=55)),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attributereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attributereference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Workflow',
fields=[
('source', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='workflow', serialize=False, to='dalme_app.source')),
('wf_status', models.IntegerField(choices=[(1, 'assessing'), (2, 'processing'), (3, 'processed')], default=2)),
('stage', models.IntegerField(choices=[(1, 'ingestion'), (2, 'transcription'), (3, 'markup'), (4, 'review'), (5, 'parsing')], default=1)),
('last_modified', models.DateTimeField(blank=True, null=True)),
('help_flag', models.BooleanField(default=False)),
('ingestion_done', models.BooleanField(default=False)),
('transcription_done', models.BooleanField(default=False)),
('markup_done', models.BooleanField(default=False)),
('parsing_done', models.BooleanField(default=False)),
('review_done', models.BooleanField(default=False)),
('is_public', models.BooleanField(default=False)),
('last_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Work_log',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('event', models.CharField(max_length=255)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='work_log', to='dalme_app.workflow')),
],
),
migrations.CreateModel(
name='Source_credit',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('type', models.IntegerField(choices=[(1, 'Editor'), (2, 'Corrections'), (3, 'Contributor')])),
('note', models.CharField(blank=True, max_length=255, null=True)),
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='dalme_app.agent')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_credit_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_source_credit_modification', to=settings.AUTH_USER_MODEL)),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='credits', to='dalme_app.source')),
],
options={
'unique_together': {('source', 'agent', 'type')},
},
),
migrations.AlterUniqueTogether(
name='source',
unique_together={('type', 'name')},
),
migrations.CreateModel(
name='Set_x_content',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.UUIDField(db_index=True, default=uuid.uuid4)),
('workset_done', models.BooleanField(default=False)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_x_content_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_set_x_content_modification', to=settings.AUTH_USER_MODEL)),
('set_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='dalme_app.set')),
],
options={
'ordering': ['set_id', 'id'],
'unique_together': {('content_type', 'object_id', 'set_id')},
},
),
migrations.CreateModel(
name='LocaleReference',
fields=[
('id', models.AutoField(db_index=True, primary_key=True, serialize=False, unique=True)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=255)),
('administrative_region', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=6, max_digits=9, null=True)),
('longitude', models.DecimalField(decimal_places=6, max_digits=9, null=True)),
('country', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='dalme_app.countryreference')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_localereference_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_localereference_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['country', 'name'],
'unique_together': {('name', 'administrative_region')},
},
),
migrations.CreateModel(
name='Attribute',
fields=[
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('creation_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('modification_timestamp', models.DateTimeField(auto_now=True, null=True)),
('object_id', models.UUIDField(db_index=True, null=True)),
('value_STR', models.CharField(blank=True, default=None, max_length=255, null=True)),
('value_DATE_d', models.IntegerField(blank=True, null=True)),
('value_DATE_m', models.IntegerField(blank=True, null=True)),
('value_DATE_y', models.IntegerField(blank=True, null=True)),
('value_DATE', models.DateField(blank=True, null=True)),
('value_INT', models.IntegerField(blank=True, null=True)),
('value_TXT', models.TextField(blank=True, default=None, null=True)),
('value_JSON', models.JSONField(null=True)),
('attribute_type', models.ForeignKey(db_column='attribute_type', on_delete=django.db.models.deletion.CASCADE, to='dalme_app.attribute_type')),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
('creation_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_creation', to=settings.AUTH_USER_MODEL)),
('modification_user', models.ForeignKey(default=django_currentuser.middleware.get_current_user, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='dalme_app_attribute_modification', to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('object_id', 'attribute_type', 'value_STR')},
},
),
]
| 72.761194
| 257
| 0.636674
| 7,314
| 68,250
| 5.699344
| 0.063713
| 0.059302
| 0.041646
| 0.065443
| 0.828451
| 0.788245
| 0.737867
| 0.673983
| 0.646923
| 0.624565
| 0
| 0.00929
| 0.223985
| 68,250
| 937
| 258
| 72.838847
| 0.777769
| 0.000659
| 0
| 0.458065
| 1
| 0
| 0.16555
| 0.055335
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.003226
| 0.007527
| 0
| 0.011828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
db6d31174807080316cb8c996b05fcc9ce69a5b7
| 40
|
py
|
Python
|
my_classes/.history/ModulesPackages_PackageNamespaces/example3a/main_20210725220637.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
my_classes/.history/ModulesPackages_PackageNamespaces/example3a/main_20210725220637.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
my_classes/.history/ModulesPackages_PackageNamespaces/example3a/main_20210725220637.py
|
minefarmer/deep-Dive-1
|
b0675b853180c5b5781888266ea63a3793b8d855
|
[
"Unlicense"
] | null | null | null |
import os.path
import types
import sys
| 8
| 14
| 0.8
| 7
| 40
| 4.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 40
| 4
| 15
| 10
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
db752d631ccf3257bd962fe18b4682f3220a6fa6
| 178
|
py
|
Python
|
geoviz/__init__.py
|
JustinGOSSES/geoviz
|
159b0665d9efcffe46061313c15ad09ced840d2d
|
[
"MIT"
] | 6
|
2018-10-16T16:38:15.000Z
|
2018-10-22T13:56:13.000Z
|
geoviz/__init__.py
|
JustinGOSSES/geoviz
|
159b0665d9efcffe46061313c15ad09ced840d2d
|
[
"MIT"
] | 5
|
2018-10-14T21:49:00.000Z
|
2018-11-12T18:59:48.000Z
|
geoviz/__init__.py
|
nathangeology/geoviz
|
5643e8880b4ecc241d4f8806743bf0441dd435c1
|
[
"MIT"
] | 1
|
2019-05-30T23:36:29.000Z
|
2019-05-30T23:36:29.000Z
|
from load_las_data import LoadLasData
from altair_log_plot import AltAirLogPlot
from load_shapefile_data import LoadShpData
from alitair_well_location_map import WellLocationMap
| 35.6
| 53
| 0.910112
| 25
| 178
| 6.12
| 0.68
| 0.104575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 178
| 4
| 54
| 44.5
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dbca8d6120f0830afa062de217262e49809ebe82
| 388
|
py
|
Python
|
backend/api/tests/test_models/test_utils/test_ranking_suffixes.py
|
ChristchurchCityWeightlifting/lifter-api
|
a82b79c75106e7f4f8ea4b4e3e12d727213445e3
|
[
"MIT"
] | null | null | null |
backend/api/tests/test_models/test_utils/test_ranking_suffixes.py
|
ChristchurchCityWeightlifting/lifter-api
|
a82b79c75106e7f4f8ea4b4e3e12d727213445e3
|
[
"MIT"
] | 5
|
2022-03-07T08:30:47.000Z
|
2022-03-22T09:15:52.000Z
|
backend/api/tests/test_models/test_utils/test_ranking_suffixes.py
|
ChristchurchCityWeightlifting/lifter-api
|
a82b79c75106e7f4f8ea4b4e3e12d727213445e3
|
[
"MIT"
] | null | null | null |
import pytest
from api.models.utils import rankings
@pytest.fixture
def test_data():
return [1, 11, 101]
def test_rankings(test_data):
"""Tests if ranking works
e.g. 1 returns 1st
11 returns 11th
101 return 101st
"""
assert rankings(test_data[0]) == "1st"
assert rankings(test_data[1]) == "11th"
assert rankings(test_data[2]) == "101st"
| 19.4
| 44
| 0.641753
| 55
| 388
| 4.418182
| 0.509091
| 0.164609
| 0.263374
| 0.271605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091525
| 0.239691
| 388
| 19
| 45
| 20.421053
| 0.732203
| 0.216495
| 0
| 0
| 0
| 0
| 0.042705
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.222222
| false
| 0
| 0.222222
| 0.111111
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
918a3b0f516ea68dd89954d9a42756ad875c22c6
| 33
|
py
|
Python
|
src/stoat/core/structure/__init__.py
|
saarkatz/guppy-struct
|
b9099353312c365cfd788dbd2d168a9c844765be
|
[
"Apache-2.0"
] | 1
|
2021-12-07T11:59:11.000Z
|
2021-12-07T11:59:11.000Z
|
src/stoat/core/structure/__init__.py
|
saarkatz/stoat-struct
|
b9099353312c365cfd788dbd2d168a9c844765be
|
[
"Apache-2.0"
] | null | null | null |
src/stoat/core/structure/__init__.py
|
saarkatz/stoat-struct
|
b9099353312c365cfd788dbd2d168a9c844765be
|
[
"Apache-2.0"
] | null | null | null |
from .structure import Structure
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
91903bbb82369647bc8ec6646143a89d378edc88
| 234
|
py
|
Python
|
iqoptionapi/http/billing.py
|
mustx1/MYIQ
|
3afb597aa8a8abc278b7d70dad46af81789eae3e
|
[
"MIT"
] | 3
|
2021-06-05T06:58:01.000Z
|
2021-11-25T23:52:18.000Z
|
iqoptionapi/http/billing.py
|
mustx1/MYIQ
|
3afb597aa8a8abc278b7d70dad46af81789eae3e
|
[
"MIT"
] | 5
|
2022-01-20T00:32:49.000Z
|
2022-02-16T23:12:10.000Z
|
iqoptionapi/http/billing.py
|
mustx1/MYIQ
|
3afb597aa8a8abc278b7d70dad46af81789eae3e
|
[
"MIT"
] | 2
|
2020-11-10T19:03:38.000Z
|
2020-12-07T10:42:36.000Z
|
"""Module for IQ option billing resource."""
from iqoptionapi.http.resource import Resource
class Billing(Resource):
"""Class for IQ option billing resource."""
# pylint: disable=too-few-public-methods
url = "billing"
| 21.272727
| 47
| 0.709402
| 29
| 234
| 5.724138
| 0.62069
| 0.271084
| 0.13253
| 0.216867
| 0.313253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17094
| 234
| 10
| 48
| 23.4
| 0.85567
| 0.495727
| 0
| 0
| 0
| 0
| 0.065421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
91b62cc1816352d2c7a0ead7b1bf1eabb9a68df6
| 8,113
|
py
|
Python
|
dataset.py
|
mintanwei/IPCLs-Net
|
04937df683216a090c0749cc90ab7e517dbab0fd
|
[
"MIT"
] | null | null | null |
dataset.py
|
mintanwei/IPCLs-Net
|
04937df683216a090c0749cc90ab7e517dbab0fd
|
[
"MIT"
] | null | null | null |
dataset.py
|
mintanwei/IPCLs-Net
|
04937df683216a090c0749cc90ab7e517dbab0fd
|
[
"MIT"
] | null | null | null |
import os
import torch
from PIL import Image
from read_csv import csv_to_label_and_bbx
import numpy as np
from torch.utils.data import Subset, random_split, ConcatDataset
class NBIDataset(object):
def __init__(self, root, transforms, nob3=False):
self.root = root
self.transforms = transforms
# load all image files, sorting them to ensure that they are aligned
self.imgs = list(sorted(os.listdir(os.path.join(root, "images"))))
self.boxes = csv_to_label_and_bbx(os.path.join(self.root, "annotations.csv"), nob3)
def __getitem__(self, idx):
img_path = os.path.join(self.root, "images", self.imgs[idx])
img = Image.open(img_path).convert("RGB")
annotations = self.boxes[self.imgs[idx]]
boxes = annotations['bbx']
labels = annotations['labels']
# FloatTensor[N, 4]
boxes = torch.as_tensor(boxes, dtype=torch.float32)
# Int64Tensor[N]
labels = torch.as_tensor(labels, dtype=torch.int64)
image_id = torch.tensor([idx])
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
iscrowd = torch.zeros((labels.size()[0],), dtype=torch.int64)
target = {}
target["boxes"] = boxes
target["labels"] = labels
target["image_id"] = image_id
# target["image_path"] = img_path
target["area"] = area
target["iscrowd"] = iscrowd
if self.transforms is not None:
img = self.transforms(img)
# target = self.transforms(target)
return img, target
def __len__(self):
return len(self.imgs)
class NBINewDataset(object):
def __init__(self, root, transforms, train=True):
self.root = root
self.transforms = transforms
if train:
self.path = os.path.join(root, "train")
else:
self.path = os.path.join(root, "test")
self.imgs = list(sorted(os.listdir(self.path)))
self.boxes = csv_to_label_and_bbx(os.path.join(self.root, "annotations_all.csv"), img_names=self.imgs)
def __getitem__(self, idx):
img_path = os.path.join(self.path, self.imgs[idx])
img = Image.open(img_path).convert("RGB")
annotations = self.boxes[self.imgs[idx]]
boxes = annotations['bbx']
labels = annotations['labels']
# FloatTensor[N, 4]
boxes = torch.as_tensor(boxes, dtype=torch.float32)
# Int64Tensor[N]
labels = torch.as_tensor(labels, dtype=torch.int64)
image_id = torch.tensor([idx])
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
iscrowd = torch.zeros((labels.size()[0],), dtype=torch.int64)
target = {}
target["boxes"] = boxes
target["labels"] = labels
target["image_id"] = image_id
# target["image_path"] = img_path
target["area"] = area
target["iscrowd"] = iscrowd
if self.transforms is not None:
img = self.transforms(img)
# target = self.transforms(target)
return img, target
def __len__(self):
return len(self.imgs)
class NBIFullDataset(object):
def __init__(self, root, transforms):
self.root = root
self.transforms = transforms
self.path = os.path.join(root, "all")
self.imgs = list(sorted(os.listdir(self.path)))
self.boxes = csv_to_label_and_bbx(os.path.join(self.root, "annotations.csv"), img_names=self.imgs)
def __getitem__(self, idx):
img_path = os.path.join(self.path, self.imgs[idx])
img = Image.open(img_path).convert("RGB")
annotations = self.boxes[self.imgs[idx]]
boxes = annotations['bbx']
labels = annotations['labels']
# FloatTensor[N, 4]
boxes = torch.as_tensor(boxes, dtype=torch.float32)
# Int64Tensor[N]
labels = torch.as_tensor(labels, dtype=torch.int64)
image_id = torch.tensor([idx])
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
iscrowd = torch.zeros((labels.size()[0],), dtype=torch.int64)
target = {}
target["boxes"] = boxes
target["labels"] = labels
target["image_id"] = image_id
# target["image_path"] = img_path
target["area"] = area
target["iscrowd"] = iscrowd
if self.transforms is not None:
img = self.transforms(img)
# target = self.transforms(target)
return img, target
def __len__(self):
return len(self.imgs)
class NBIDenseDataset(object):
def __init__(self, root, transforms):
self.root = root
self.transforms = transforms
# load all image files, sorting them to ensure that they are aligned
self.imgs = list(sorted(os.listdir(os.path.join(root, "images"))))
def __getitem__(self, idx):
img_path = os.path.join(self.root, "images", self.imgs[idx])
img = Image.open(img_path).convert("RGB")
density_path = os.path.join(self.root, "density_maps")
density_map = np.load(os.path.join(density_path, self.imgs[idx][:-4] + ".npy"))
density_map = torch.from_numpy(density_map)
if self.transforms is not None:
img = self.transforms(img)
# target = self.transforms(target)
return img, density_map
def __len__(self):
return len(self.imgs)
class NBIPatchDataset(object):
def __init__(self, root, transforms):
self.root = root
self.transforms = transforms
# load all image files, sorting them to ensure that they are aligned
self.imgs = [x for x in list(sorted(os.listdir(root))) if x[-3:] == "png"]
self.ans = np.load(os.path.join(root, "ans.npy"), allow_pickle=True).item()
def __getitem__(self, idx):
# img_path = os.path.join(self.root, "images", self.imgs[idx])
# img = Image.open(img_path).convert("RGB")
# density_path = os.path.join(self.root, "density_maps")
# density_map = np.load(os.path.join(density_path, self.imgs[idx][:-4] + ".npy"))
# density_map = torch.from_numpy(density_map)
#
# if self.transforms is not None:
# img = self.transforms(img)
# # target = self.transforms(target)
return self.imgs[idx]
def __len__(self):
return len(self.imgs)
def split_index(K=5, len=100):
idx = list(range(len))
final_list = []
for i in range(K):
final_list.append(idx[(i*len)//K:((i+1)*len)//K])
return final_list
def k_fold_index(K=5, len=100, fold=0):
split = split_index(K, len)
val = split[fold]
train = []
for i in range(K):
if i != fold:
train = train + split[i]
return train, val
def stat_dataset(dataset):
class_ids = {1: "A", 2: "B1", 3: "B2", 4: "B3"}
stats = {"A": 0, "B1": 0, "B2": 0, "B3": 0}
for img, target in dataset:
for k in target['labels']:
stats[class_ids[int(k)]] += 1
print(stats)
def NBIFiveFoldDataset(transforms):
ds = NBIFullDataset(root="./NBI_full_dataset/", transforms=transforms)
# n = len(ds)
# for i in range(5):
# train_idx, val_idx = k_fold_index(5, n, i)
# train_subset = Subset(ds, train_idx)
# val_subset = Subset(ds, val_idx)
# print("Fold: %d" % i, len(train_subset), len(val_subset))
# stat_dataset(train_subset)
# stat_dataset(val_subset)
torch.manual_seed(13)
all_subsets = random_split(ds, [46, 46, 46, 45, 45])
fold_i_subsets = []
for i in range(5):
val_subset = all_subsets[i]
train_subset = ConcatDataset([all_subsets[j] for j in range(5) if j != i])
fold_i_subsets.append({"train": train_subset, "val": val_subset})
# print("Fold: %d" % i, len(train_subset), len(val_subset))
# stat_dataset(train_subset)
# stat_dataset(val_subset)
return fold_i_subsets
if __name__ == '__main__':
# ds = NBIFiveFoldDataset(None)
di = "aaa".encode("UTF-8")
result = eval(di)
print(result)
| 32.322709
| 110
| 0.598053
| 1,065
| 8,113
| 4.384038
| 0.134272
| 0.039409
| 0.038552
| 0.029985
| 0.773185
| 0.750482
| 0.715357
| 0.709574
| 0.70272
| 0.70272
| 0
| 0.014833
| 0.260446
| 8,113
| 250
| 111
| 32.452
| 0.763333
| 0.163688
| 0
| 0.608974
| 0
| 0
| 0.044922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121795
| false
| 0
| 0.038462
| 0.038462
| 0.275641
| 0.012821
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
91d921988391847f171d7f816701e122ce388582
| 143
|
py
|
Python
|
tb/storage/__init__.py
|
DronMDF/manabot
|
b412e8cb9b5247f05487bed4cbf4967f7b58327f
|
[
"MIT"
] | 1
|
2017-11-29T11:51:12.000Z
|
2017-11-29T11:51:12.000Z
|
tb/storage/__init__.py
|
DronMDF/manabot
|
b412e8cb9b5247f05487bed4cbf4967f7b58327f
|
[
"MIT"
] | 109
|
2017-11-28T20:51:59.000Z
|
2018-02-02T13:15:29.000Z
|
tb/storage/__init__.py
|
DronMDF/manabot
|
b412e8cb9b5247f05487bed4cbf4967f7b58327f
|
[
"MIT"
] | null | null | null |
from .database import StDatabase
from .telegram import StTelegram
from .tinydb import TinyDataBase, TinySelect
from .utility import StDispatch
| 28.6
| 44
| 0.846154
| 17
| 143
| 7.117647
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118881
| 143
| 4
| 45
| 35.75
| 0.960317
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
37cd4b6be89839faecee7dd52588398ff12411ba
| 247
|
py
|
Python
|
src/compas_blender/forms/__init__.py
|
yijiangh/compas
|
a9e86edf6b602f47ca051fccedcaa88a5e5d3600
|
[
"MIT"
] | 1
|
2019-03-27T22:32:56.000Z
|
2019-03-27T22:32:56.000Z
|
src/compas_blender/forms/__init__.py
|
yijiangh/compas
|
a9e86edf6b602f47ca051fccedcaa88a5e5d3600
|
[
"MIT"
] | null | null | null |
src/compas_blender/forms/__init__.py
|
yijiangh/compas
|
a9e86edf6b602f47ca051fccedcaa88a5e5d3600
|
[
"MIT"
] | null | null | null |
"""
********************************************************************************
compas_blender.forms
********************************************************************************
.. currentmodule:: compas_blender.forms
"""
__all__ = []
| 22.454545
| 80
| 0.234818
| 8
| 247
| 6.5
| 0.625
| 0.5
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05668
| 247
| 10
| 81
| 24.7
| 0.223176
| 0.902834
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
37e97b75428a1033eda5441303e4da93aa132446
| 221
|
py
|
Python
|
src/wormhole/__main__.py
|
dmgolembiowski/magic-wormhole
|
d517a10282d5e56f300db462b1a6eec517202af7
|
[
"MIT"
] | 2,801
|
2021-01-10T16:37:14.000Z
|
2022-03-31T19:02:50.000Z
|
src/wormhole/__main__.py
|
dmgolembiowski/magic-wormhole
|
d517a10282d5e56f300db462b1a6eec517202af7
|
[
"MIT"
] | 52
|
2021-01-10T01:54:00.000Z
|
2022-03-11T13:12:41.000Z
|
src/wormhole/__main__.py
|
dmgolembiowski/magic-wormhole
|
d517a10282d5e56f300db462b1a6eec517202af7
|
[
"MIT"
] | 106
|
2021-01-21T14:32:22.000Z
|
2022-03-18T10:33:09.000Z
|
from __future__ import absolute_import, print_function, unicode_literals
if __name__ == "__main__":
from .cli import cli
cli.wormhole()
else:
# raise ImportError('this module should not be imported')
pass
| 27.625
| 72
| 0.737557
| 28
| 221
| 5.285714
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18552
| 221
| 7
| 73
| 31.571429
| 0.822222
| 0.248869
| 0
| 0
| 0
| 0
| 0.04878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.333333
| 0
| 0.333333
| 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
37f7e625de3ee5f43165604bef1b04155036f942
| 56
|
py
|
Python
|
models/__init__.py
|
TvSeriesFans/CineMonster
|
036a3223618afd536932d21b0e86d18d0fba3b28
|
[
"Apache-2.0"
] | 15
|
2017-09-17T17:52:43.000Z
|
2020-08-31T15:41:12.000Z
|
models/__init__.py
|
TvSeriesFans/CineMonster
|
036a3223618afd536932d21b0e86d18d0fba3b28
|
[
"Apache-2.0"
] | 13
|
2017-03-14T13:24:14.000Z
|
2021-08-20T13:52:54.000Z
|
models/__init__.py
|
TvSeriesFans/CineMonster
|
036a3223618afd536932d21b0e86d18d0fba3b28
|
[
"Apache-2.0"
] | 27
|
2017-07-01T18:33:49.000Z
|
2021-08-05T09:13:18.000Z
|
from models.Model import Player, Group, Session, engine
| 28
| 55
| 0.803571
| 8
| 56
| 5.625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 56
| 1
| 56
| 56
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
37fbc6ec2f245fb6973d1636993985b6d187eb07
| 63
|
py
|
Python
|
src/csvutils.py
|
imco/nmx
|
5c6303ece6148a83963b2e6524d6f94b450ad659
|
[
"MIT"
] | null | null | null |
src/csvutils.py
|
imco/nmx
|
5c6303ece6148a83963b2e6524d6f94b450ad659
|
[
"MIT"
] | null | null | null |
src/csvutils.py
|
imco/nmx
|
5c6303ece6148a83963b2e6524d6f94b450ad659
|
[
"MIT"
] | 1
|
2020-04-07T19:02:41.000Z
|
2020-04-07T19:02:41.000Z
|
def escapeQuotes(string):
return string.replace('"','""');
| 21
| 36
| 0.634921
| 6
| 63
| 6.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 37
| 31.5
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
37fbd5a54d581539270bd58bff9f475311ff3236
| 68
|
py
|
Python
|
test/sanity_import_vpp_papi.py
|
amithbraj/vpp
|
edf1da94dc099c6e2ab1d455ce8652fada3cdb04
|
[
"Apache-2.0"
] | 751
|
2017-07-13T06:16:46.000Z
|
2022-03-30T09:14:35.000Z
|
test/sanity_import_vpp_papi.py
|
amithbraj/vpp
|
edf1da94dc099c6e2ab1d455ce8652fada3cdb04
|
[
"Apache-2.0"
] | 63
|
2018-06-11T09:48:35.000Z
|
2021-01-05T09:11:03.000Z
|
test/sanity_import_vpp_papi.py
|
amithbraj/vpp
|
edf1da94dc099c6e2ab1d455ce8652fada3cdb04
|
[
"Apache-2.0"
] | 479
|
2017-07-13T06:17:26.000Z
|
2022-03-31T18:20:43.000Z
|
#!/usr/bin/env python3
""" sanity check script """
import vpp_papi
| 13.6
| 27
| 0.691176
| 10
| 68
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0.147059
| 68
| 4
| 28
| 17
| 0.775862
| 0.617647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5338514dfbd12161e51e2cea2de687a8253338f8
| 28,526
|
py
|
Python
|
PyDSTool/PyCont/BifPoint.py
|
mdlama/pydstool
|
3d298e908ff55340cd3612078508be0c791f63a8
|
[
"Python-2.0",
"OLDAP-2.7"
] | 2
|
2021-02-04T15:01:31.000Z
|
2021-02-25T16:08:43.000Z
|
PyDSTool/PyCont/BifPoint.py
|
mdlama/pydstool
|
3d298e908ff55340cd3612078508be0c791f63a8
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
PyDSTool/PyCont/BifPoint.py
|
mdlama/pydstool
|
3d298e908ff55340cd3612078508be0c791f63a8
|
[
"Python-2.0",
"OLDAP-2.7"
] | 1
|
2021-02-25T14:43:36.000Z
|
2021-02-25T14:43:36.000Z
|
""" Bifurcation point classes. Each class locates and processes bifurcation points.
* _BranchPointFold is a version based on BranchPoint location algorithms
* BranchPoint: Branch process is broken (can't find alternate branch -- see MATCONT notes)
Drew LaMar, March 2006
"""
from __future__ import absolute_import, print_function
from .misc import *
from PyDSTool.common import args
from .TestFunc import DiscreteMap, FixedPointMap
from numpy import Inf, NaN, isfinite, r_, c_, sign, mod, \
subtract, divide, transpose, eye, real, imag, \
conjugate, average
from scipy import optimize, linalg
from numpy import dot as matrixmultiply
from numpy import array, float, complex, int, float64, complex64, int32, \
zeros, divide, subtract, reshape, argsort, nonzero
#####
_classes = ['BifPoint', 'BPoint', 'BranchPoint', 'FoldPoint', 'HopfPoint',
'BTPoint', 'ZHPoint', 'CPPoint',
'BranchPointFold', '_BranchPointFold', 'DHPoint',
'GHPoint', 'LPCPoint', 'PDPoint', 'NSPoint', 'SPoint']
__all__ = _classes
#####
class BifPoint(object):
def __init__(self, testfuncs, flagfuncs, label='Bifurcation', stop=False):
self.testfuncs = []
self.flagfuncs = []
self.found = []
self.label = label
self.stop = stop
self.data = args()
if not isinstance(testfuncs, list):
testfuncs = [testfuncs]
if not isinstance(flagfuncs, list):
flagfuncs = [flagfuncs]
self.testfuncs.extend(testfuncs)
self.flagfuncs.extend(flagfuncs)
self.tflen = len(self.testfuncs)
def locate(self, P1, P2, C):
pointlist = []
for i, testfunc in enumerate(self.testfuncs):
if self.flagfuncs[i] == iszero:
for ind in range(testfunc.m):
X, V = testfunc.findzero(P1, P2, ind)
pointlist.append((X,V))
X = average([point[0] for point in pointlist], axis=0)
V = average([point[1] for point in pointlist], axis=0)
C.Corrector(X,V)
return X, V
def process(self, X, V, C):
data = args()
data.X = todict(C, X)
data.V = todict(C, V)
self.found.append(data)
def info(self, C, ind=None, strlist=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
if C.verbosity >= 1:
print(self.label + ' Point found ')
if C.verbosity >= 2:
print('========================== ')
for n, i in enumerate(ind):
print(n, ': ')
Xd = self.found[i].X
for k, j in Xd.items():
print(k, ' = ', j)
print('')
if hasattr(self.found[i], 'eigs'):
print('Eigenvalues = \n')
for x in self.found[i].eigs:
print(' (%f,%f)' % (x.real, x.imag))
print('\n')
if strlist is not None:
for string in strlist:
print(string)
print('')
class SPoint(BifPoint):
"""Special point that represents user-selected free parameter values."""
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'S', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
self.info(C, -1)
return True
class BPoint(BifPoint):
"""Special point that represents boundary of computational domain."""
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'B', stop=stop)
def locate(self, P1, P2, C):
# Find location that triggered testfunc and initialize testfunc to that index
val1 = (P1[0]-self.testfuncs[0].lower)*(self.testfuncs[0].upper-P1[0])
val2 = (P2[0]-self.testfuncs[0].lower)*(self.testfuncs[0].upper-P2[0])
ind = nonzero(val1*val2 < 0)
self.testfuncs[0].ind = ind
self.testfuncs[0].func = self.testfuncs[0].one
X, V = BifPoint.locate(self, P1, P2, C)
# Set testfunc back to monitoring all
self.testfuncs[0].ind = None
self.testfuncs[0].func = self.testfuncs[0].all
return X, V
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
BifPoint.info(self, C, ind)
class BranchPoint(BifPoint):
"""May only work for EquilibriumCurve ... (needs fixing)"""
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'BP', stop=stop)
def __locate_newton(self, X, C):
"""x[0:self.dim] = (x,alpha)
x[self.dim] = beta
x[self.dim+1:2*self.dim] = p
"""
J_coords = C.CorrFunc.jac(X[0:C.dim], C.coords)
J_params = C.CorrFunc.jac(X[0:C.dim], C.params)
return r_[C.CorrFunc(X[0:C.dim]) + X[C.dim]*X[C.dim+1:], \
matrixmultiply(transpose(J_coords),X[C.dim+1:]), \
matrixmultiply(transpose(X[C.dim+1:]),J_params), \
matrixmultiply(transpose(X[C.dim+1:]),X[C.dim+1:]) - 1]
def locate(self, P1, P2, C):
# Initiliaze p vector to eigenvector with smallest eigenvalue
X, V = P1
X2, V2 = P2
J_coords = C.CorrFunc.jac(X, C.coords)
W, VL = linalg.eig(J_coords, left=1, right=0)
ind = argsort([abs(eig) for eig in W])[0]
p = real(VL[:,ind])
initpoint = zeros(2*C.dim, float)
initpoint[0:C.dim] = X
initpoint[C.dim+1:] = p
X = optimize.fsolve(self.__locate_newton, initpoint, C)
self.data.psi = X[C.dim+1:]
X = X[0:C.dim]
V = 0.5*(V+V2)
return X, V
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
# Finds the new branch
J_coords = C.CorrFunc.jac(X, C.coords)
J_params = C.CorrFunc.jac(X, C.params)
singular = True
perpvec = r_[1,zeros(C.dim-1)]
d = 1
while singular and d <= C.dim:
try:
v0 = linalg.solve(r_[c_[J_coords, J_params],
[perpvec]], \
r_[zeros(C.dim-1),1])
except:
perpvec = r_[0., perpvec[0:(C.dim-1)]]
d += 1
else:
singular = False
if singular:
raise PyDSTool_ExistError("Problem in _compute: Failed to compute tangent vector.")
v0 /= linalg.norm(v0)
V = sign([x for x in v0 if abs(x) > 1e-8][0])*v0
A = r_[c_[J_coords, J_params], [V]]
W, VR = linalg.eig(A)
W0 = [ind for ind, eig in enumerate(W) if abs(eig) < 5e-5]
V1 = real(VR[:,W0[0]])
H = C.CorrFunc.hess(X, C.coords+C.params, C.coords+C.params)
c11 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V, V) for i in range(H.shape[0])])
c12 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V, V1) for i in range(H.shape[0])])
c22 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V1, V1) for i in range(H.shape[0])])
beta = 1
alpha = -1*c22/(2*c12)
V1 = alpha*V + beta*V1
V1 /= linalg.norm(V1)
self.found[-1].eigs = W
self.found[-1].branch = todict(C, V1)
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
for n, i in enumerate(ind):
strlist.append('branch angle = ' + repr(matrixmultiply(tocoords(C, self.found[i].V), \
tocoords(C, self.found[i].branch))))
X = tocoords(C, self.found[-1].X)
V = tocoords(C, self.found[-1].V)
C._preTestFunc(X, V)
strlist.append('Test function #1: ' + repr(self.testfuncs[0](X,V)[0]))
BifPoint.info(self, C, ind, strlist)
class FoldPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'LP', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
# Compute normal form coefficient
# NOTE: These are for free when using bordering technique!)
# NOTE: Does not agree with MATCONT output! (if |p| = |q| = 1, then it does)
J_coords = C.CorrFunc.jac(X, C.coords)
W, VL, VR = linalg.eig(J_coords, left=1, right=1)
minW = min(abs(W))
ind = [(abs(eig) < minW+1e-8) and (abs(eig) > minW-1e-8) for eig in W].index(True)
p, q = real(VL[:,ind]), real(VR[:,ind])
p /= matrixmultiply(p,q)
B = C.CorrFunc.hess(X, C.coords, C.coords)
self.found[-1].a = abs(0.5*matrixmultiply(p,[bilinearform(B[i,:,:], q, q) for i in range(B.shape[0])]))
self.found[-1].eigs = W
numzero = len([eig for eig in W if abs(eig) < 1e-4])
if numzero > 1:
if C.verbosity >= 2:
print('Fold-Fold!\n')
del self.found[-1]
return False
elif numzero == 0:
if C.verbosity >= 2:
print('False positive!\n')
del self.found[-1]
return False
if C.verbosity >= 2:
print('\nChecking...')
print(' |q| = %f' % linalg.norm(q))
print(' <p,q> = %f' % matrixmultiply(p,q))
print(' |Aq| = %f' % linalg.norm(matrixmultiply(J_coords,q)))
print(' |transpose(A)p| = %f\n' % linalg.norm(matrixmultiply(transpose(J_coords),p)))
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
for n, i in enumerate(ind):
strlist.append('a = ' + repr(self.found[i].a))
BifPoint.info(self, C, ind, strlist)
class HopfPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'H', stop=stop)
def process(self, X, V, C):
"""Tolerance for eigenvalues a possible problem when checking for neutral saddles."""
BifPoint.process(self, X, V, C)
J_coords = C.CorrFunc.jac(X, C.coords)
eigs, LV, RV = linalg.eig(J_coords,left=1,right=1)
# Check for neutral saddles
found = False
for i in range(len(eigs)):
if abs(imag(eigs[i])) < 1e-5:
for j in range(i+1,len(eigs)):
if C.verbosity >= 2:
if abs(eigs[i]) < 1e-5 and abs(eigs[j]) < 1e-5:
print('Fold-Fold point found in Hopf!\n')
elif abs(imag(eigs[j])) < 1e-5 and abs(real(eigs[i]) + real(eigs[j])) < 1e-5:
print('Neutral saddle found!\n')
elif abs(real(eigs[i])) < 1e-5:
for j in range(i+1, len(eigs)):
if abs(real(eigs[j])) < 1e-5 and abs(real(eigs[i]) - real(eigs[j])) < 1e-5:
found = True
w = abs(imag(eigs[i]))
if imag(eigs[i]) > 0:
p = conjugate(LV[:,j])/linalg.norm(LV[:,j])
q = RV[:,i]/linalg.norm(RV[:,i])
else:
p = conjugate(LV[:,i])/linalg.norm(LV[:,i])
q = RV[:,j]/linalg.norm(RV[:,j])
if not found:
del self.found[-1]
return False
direc = conjugate(1/matrixmultiply(conjugate(p),q))
p = direc*p
# Alternate way to compute 1st lyapunov coefficient (from Kuznetsov [4])
#print (1./(w*w))*real(1j*matrixmultiply(conjugate(p),b1)*matrixmultiply(conjugate(p),b3) + \
# w*matrixmultiply(conjugate(p),trilinearform(D,q,q,conjugate(q))))
self.found[-1].w = w
self.found[-1].l1 = firstlyapunov(X, C.CorrFunc, w, J_coords=J_coords, p=p, q=q, check=(C.verbosity==2))
self.found[-1].eigs = eigs
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
for n, i in enumerate(ind):
strlist.append('w = ' + repr(self.found[i].w))
strlist.append('l1 = ' + repr(self.found[i].l1))
BifPoint.info(self, C, ind, strlist)
# Codimension-2 bifurcations
class BTPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'BT', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.CorrFunc.sysfunc.jac(X, C.coords)
W, VL, VR = linalg.eig(J_coords, left=1, right=1)
self.found[-1].eigs = W
if C.verbosity >= 2:
if C.CorrFunc.testfunc.data.B.shape[1] == 2:
b = matrixmultiply(transpose(J_coords), C.CorrFunc.testfunc.data.w[:,0])
c = matrixmultiply(J_coords, C.CorrFunc.testfunc.data.v[:,0])
else:
b = C.CorrFunc.testfunc.data.w[:,0]
c = C.CorrFunc.testfunc.data.v[:,0]
print('\nChecking...')
print(' <b,c> = %f' % matrixmultiply(transpose(b), c))
print('\n')
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
BifPoint.info(self, C, ind)
class ZHPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'ZH', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.CorrFunc.sysfunc.jac(X, C.coords)
W, VL, VR = linalg.eig(J_coords, left=1, right=1)
self.found[-1].eigs = W
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
BifPoint.info(self, C, ind)
class CPPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'CP', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.CorrFunc.sysfunc.jac(X, C.coords)
B = C.CorrFunc.sysfunc.hess(X, C.coords, C.coords)
W, VL, VR = linalg.eig(J_coords, left=1, right=1)
q = C.CorrFunc.testfunc.data.C/linalg.norm(C.CorrFunc.testfunc.data.C)
p = C.CorrFunc.testfunc.data.B/matrixmultiply(transpose(C.CorrFunc.testfunc.data.B),q)
self.found[-1].eigs = W
a = 0.5*matrixmultiply(transpose(p), reshape([bilinearform(B[i,:,:], q, q) \
for i in range(B.shape[0])],(B.shape[0],1)))[0][0]
if C.verbosity >= 2:
print('\nChecking...')
print(' |a| = %f' % a)
print('\n')
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
BifPoint.info(self, C, ind)
class BranchPointFold(BifPoint):
"""Check Equilibrium.m in MATCONT"""
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'BP', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
pind = self.testfuncs[0].pind
# Finds the new branch
J_coords = C.CorrFunc.jac(X, C.coords)
J_params = C.CorrFunc.jac(X, C.params)
A = r_[c_[J_coords, J_params[:,pind]]]
#A = r_[c_[J_coords, J_params], [V]]
W, VR = linalg.eig(A)
W0 = [ind for ind, eig in enumerate(W) if abs(eig) < 5e-5]
tmp = real(VR[:,W0[0]])
V1 = r_[tmp[:-1], 0, 0]
V1[len(tmp)-1+pind] = tmp[-1]
"""NEED TO FIX THIS!"""
H = C.CorrFunc.hess(X, C.coords+C.params, C.coords+C.params)
# c11 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V, V) for i in range(H.shape[0])])
# c12 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V, V1) for i in range(H.shape[0])])
# c22 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V1, V1) for i in range(H.shape[0])])
# beta = 1
# alpha = -1*c22/(2*c12)
# V1 = alpha*V + beta*V1
# V1 /= linalg.norm(V1)
self.found[-1].eigs = W
self.found[-1].branch = None
self.found[-1].par = C.freepars[self.testfuncs[0].pind]
# self.found[-1].branch = todict(C, V1)
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
#for n, i in enumerate(ind):
# strlist.append('branch angle = ' + repr(matrixmultiply(tocoords(C, self.found[i].V), \
# tocoords(C, self.found[i].branch))))
X = tocoords(C, self.found[-1].X)
V = tocoords(C, self.found[-1].V)
C._preTestFunc(X, V)
strlist.append('Test function #1: ' + repr(self.testfuncs[0](X,V)[0]))
BifPoint.info(self, C, ind, strlist)
class _BranchPointFold(BifPoint):
"""Check Equilibrium.m in MATCONT"""
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'BP', stop=stop)
def __locate_newton(self, X, C):
"""Note: This is redundant!! B is a column of A!!! Works for now, though..."""
pind = self.testfuncs[0].pind
J_coords = C.CorrFunc.jac(X[0:C.dim], C.coords)
J_params = C.CorrFunc.jac(X[0:C.dim], C.params)
A = c_[J_coords, J_params[:,pind]]
B = J_params[:,pind]
return r_[C.CorrFunc(X[0:C.dim]) + X[C.dim]*X[C.dim+1:], \
matrixmultiply(transpose(A),X[C.dim+1:]), \
matrixmultiply(transpose(X[C.dim+1:]),B), \
matrixmultiply(transpose(X[C.dim+1:]),X[C.dim+1:]) - 1]
def locate(self, P1, P2, C):
# Initiliaze p vector to eigenvector with smallest eigenvalue
X, V = P1
pind = self.testfuncs[0].pind
J_coords = C.CorrFunc.jac(X, C.coords)
J_params = C.CorrFunc.jac(X, C.params)
A = r_[c_[J_coords, J_params[:,pind]]]
W, VL = linalg.eig(A, left=1, right=0)
ind = argsort([abs(eig) for eig in W])[0]
p = real(VL[:,ind])
initpoint = zeros(2*C.dim, float)
initpoint[0:C.dim] = X
initpoint[C.dim+1:] = p
X = optimize.fsolve(self.__locate_newton, initpoint, C)
self.data.psi = X[C.dim+1:]
X = X[0:C.dim]
return X, V
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
pind = self.testfuncs[0].pind
# Finds the new branch
J_coords = C.CorrFunc.jac(X, C.coords)
J_params = C.CorrFunc.jac(X, C.params)
A = r_[c_[J_coords, J_params[:,pind]]]
#A = r_[c_[J_coords, J_params], [V]]
W, VR = linalg.eig(A)
W0 = [ind for ind, eig in enumerate(W) if abs(eig) < 5e-5]
tmp = real(VR[:,W0[0]])
V1 = r_[tmp[:-1], 0, 0]
V1[len(tmp)-1+pind] = tmp[-1]
"""NEED TO FIX THIS!"""
H = C.CorrFunc.hess(X, C.coords+C.params, C.coords+C.params)
c11 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V, V) for i in range(H.shape[0])])
c12 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V, V1) for i in range(H.shape[0])])
c22 = matrixmultiply(self.data.psi,[bilinearform(H[i,:,:], V1, V1) for i in range(H.shape[0])])
beta = 1
alpha = -1*c22/(2*c12)
V1 = alpha*V + beta*V1
V1 /= linalg.norm(V1)
self.found[-1].eigs = W
self.found[-1].branch = None
self.found[-1].par = C.freepars[self.testfuncs[0].pind]
self.found[-1].branch = todict(C, V1)
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
#for n, i in enumerate(ind):
# strlist.append('branch angle = ' + repr(matrixmultiply(tocoords(C, self.found[i].V), \
# tocoords(C, self.found[i].branch))))
X = tocoords(C, self.found[-1].X)
V = tocoords(C, self.found[-1].V)
C._preTestFunc(X, V)
strlist.append('Test function #1: ' + repr(self.testfuncs[0](X,V)[0]))
BifPoint.info(self, C, ind, strlist)
class DHPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'DH', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.CorrFunc.sysfunc.jac(X, C.coords)
eigs, LV, RV = linalg.eig(J_coords,left=1,right=1)
self.found[-1].eigs = eigs
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
BifPoint.info(self, C, ind)
class GHPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'GH', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.CorrFunc.sysfunc.jac(X, C.coords)
eigs, LV, RV = linalg.eig(J_coords,left=1,right=1)
# Check for neutral saddles
found = False
for i in range(len(eigs)):
if abs(imag(eigs[i])) < 1e-5:
for j in range(i+1,len(eigs)):
if C.verbosity >= 2:
if abs(eigs[i]) < 1e-5 and abs(eigs[j]) < 1e-5:
print('Fold-Fold point found in Hopf!\n')
elif abs(imag(eigs[j])) < 1e-5 and abs(real(eigs[i]) + real(eigs[j])) < 1e-5:
print('Neutral saddle found!\n')
elif abs(real(eigs[i])) < 1e-5:
for j in range(i+1, len(eigs)):
if abs(real(eigs[j])) < 1e-5 and abs(real(eigs[i]) - real(eigs[j])) < 1e-5:
found = True
w = abs(imag(eigs[i]))
if imag(eigs[i]) > 0:
p = conjugate(LV[:,j]/linalg.norm(LV[:,j]))
q = RV[:,i]/linalg.norm(RV[:,i])
else:
p = conjugate(LV[:,i]/linalg.norm(LV[:,i]))
q = RV[:,j]/linalg.norm(RV[:,j])
if not found:
del self.found[-1]
return False
direc = conjugate(1/matrixmultiply(conjugate(p),q))
p = direc*p
# Alternate way to compute 1st lyapunov coefficient (from Kuznetsov [4])
#print (1./(w*w))*real(1j*matrixmultiply(conjugate(p),b1)*matrixmultiply(conjugate(p),b3) + \
# w*matrixmultiply(conjugate(p),trilinearform(D,q,q,conjugate(q))))
self.found[-1].w = w
self.found[-1].l1 = firstlyapunov(X, C.CorrFunc.sysfunc, w, J_coords=J_coords, p=p, q=q, check=(C.verbosity==2))
self.found[-1].eigs = eigs
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
for n, i in enumerate(ind):
strlist.append('w = ' + repr(self.found[i].w))
strlist.append('l1 = ' + repr(self.found[i].l1))
BifPoint.info(self, C, ind, strlist)
# Discrete maps
class LPCPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'LPC', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.sysfunc.jac(X, C.coords)
W, VL, VR = linalg.eig(J_coords, left=1, right=1)
self.found[-1].eigs = W
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
X = tocoords(C, self.found[-1].X)
V = tocoords(C, self.found[-1].V)
C._preTestFunc(X, V)
strlist.append('Test function #1: ' + repr(self.testfuncs[0](X,V)[0]))
strlist.append('Test function #2: ' + repr(self.testfuncs[1](X,V)[0]))
BifPoint.info(self, C, ind, strlist)
class PDPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'PD', stop=stop)
def process(self, X, V, C):
"""Do I need to compute the branch, or will it always be in the direction of freepar = constant?"""
BifPoint.process(self, X, V, C)
F = DiscreteMap(C.sysfunc, period=2*C.sysfunc.period)
FP = FixedPointMap(F)
J_coords = FP.jac(X, C.coords)
J_params = FP.jac(X, C.params)
# Locate branch of double period map
W, VL = linalg.eig(J_coords, left=1, right=0)
ind = argsort([abs(eig) for eig in W])[0]
psi = real(VL[:,ind])
A = r_[c_[J_coords, J_params], [V]]
W, VR = linalg.eig(A)
W0 = argsort([abs(eig) for eig in W])[0]
V1 = real(VR[:,W0])
H = FP.hess(X, C.coords+C.params, C.coords+C.params)
c11 = matrixmultiply(psi,[bilinearform(H[i,:,:], V, V) for i in range(H.shape[0])])
c12 = matrixmultiply(psi,[bilinearform(H[i,:,:], V, V1) for i in range(H.shape[0])])
c22 = matrixmultiply(psi,[bilinearform(H[i,:,:], V1, V1) for i in range(H.shape[0])])
beta = 1
alpha = -1*c22/(2*c12)
V1 = alpha*V + beta*V1
V1 /= linalg.norm(V1)
J_coords = C.sysfunc.jac(X, C.coords)
W = linalg.eig(J_coords, right=0)
self.found[-1].eigs = W
self.found[-1].branch_period = 2*C.sysfunc.period
self.found[-1].branch = todict(C, V1)
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
strlist = []
for n, i in enumerate(ind):
strlist.append('Period doubling branch angle = ' + repr(matrixmultiply(tocoords(C, self.found[i].V), \
tocoords(C, self.found[i].branch))))
BifPoint.info(self, C, ind, strlist)
class NSPoint(BifPoint):
def __init__(self, testfuncs, flagfuncs, stop=False):
BifPoint.__init__(self, testfuncs, flagfuncs, 'NS', stop=stop)
def process(self, X, V, C):
BifPoint.process(self, X, V, C)
J_coords = C.sysfunc.jac(X, C.coords)
eigs, VL, VR = linalg.eig(J_coords, left=1, right=1)
# Check for nonreal multipliers
found = False
for i in range(len(eigs)):
for j in range(i+1,len(eigs)):
if abs(imag(eigs[i])) > 1e-10 and \
abs(imag(eigs[j])) > 1e-10 and \
abs(eigs[i]*eigs[j] - 1) < 1e-5:
found = True
if not found:
del self.found[-1]
return False
self.found[-1].eigs = eigs
self.info(C, -1)
return True
def info(self, C, ind=None):
if ind is None:
ind = list(range(len(self.found)))
elif isinstance(ind, int):
ind = [ind]
BifPoint.info(self, C, ind)
| 32.638444
| 120
| 0.534179
| 3,957
| 28,526
| 3.787718
| 0.084913
| 0.043835
| 0.026688
| 0.053776
| 0.776288
| 0.747331
| 0.726314
| 0.705498
| 0.698025
| 0.685015
| 0
| 0.022658
| 0.309963
| 28,526
| 873
| 121
| 32.67583
| 0.738773
| 0.095071
| 0
| 0.700516
| 0
| 0
| 0.02841
| 0.001015
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091222
| false
| 0
| 0.013769
| 0
| 0.177281
| 0.048193
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
53577342e6db4b3427645ab2e05fe5d3ca60a280
| 118
|
py
|
Python
|
config.py
|
tiuD/cross-prom
|
8b987138ec32e0ac64ca6ffe13d0e1cd0d18aef3
|
[
"MIT"
] | null | null | null |
config.py
|
tiuD/cross-prom
|
8b987138ec32e0ac64ca6ffe13d0e1cd0d18aef3
|
[
"MIT"
] | null | null | null |
config.py
|
tiuD/cross-prom
|
8b987138ec32e0ac64ca6ffe13d0e1cd0d18aef3
|
[
"MIT"
] | null | null | null |
TOKEN = "1876415562:AAEsX_c9k3Fot2IT0BYRqkCCQ5vFEHQDLDQ"
CHAT_ID = [957539786] # e.g. [1234567, 2233445, 3466123...]
| 29.5
| 59
| 0.754237
| 12
| 118
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.424528
| 0.101695
| 118
| 3
| 60
| 39.333333
| 0.396226
| 0.29661
| 0
| 0
| 0
| 0
| 0.567901
| 0.567901
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f408463fbafd0299afebe10a70bf543c07547fe8
| 4,541
|
py
|
Python
|
utils/data/dataset_catalog.py
|
rs9899/Parsing-R-CNN
|
a0c9ed8850abe740eedf8bfc6e1577cc0aa3fc7b
|
[
"MIT"
] | 289
|
2018-10-25T09:42:57.000Z
|
2022-03-30T08:31:50.000Z
|
utils/data/dataset_catalog.py
|
qzane/Parsing-R-CNN
|
8c4d940dcd322bf7a8671f8b0faaabb3259bd384
|
[
"MIT"
] | 28
|
2019-01-07T02:39:49.000Z
|
2022-01-25T08:54:36.000Z
|
utils/data/dataset_catalog.py
|
qzane/Parsing-R-CNN
|
8c4d940dcd322bf7a8671f8b0faaabb3259bd384
|
[
"MIT"
] | 44
|
2018-12-20T07:36:46.000Z
|
2022-03-16T14:30:20.000Z
|
import os.path as osp
# Root directory of project
ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
# Path to data dir
_DATA_DIR = osp.abspath(osp.join(ROOT_DIR, 'data'))
# Required dataset entry keys
_IM_DIR = 'image_directory'
_ANN_FN = 'annotation_file'
# Available datasets
COMMON_DATASETS = {
'coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_train2017.json',
},
'coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_val2017.json',
},
'coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json',
},
'coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'keypoints_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_train2017.json'
},
'keypoints_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_val2017.json'
},
'keypoints_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json'
},
'keypoints_coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'dense_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_train2017.json',
},
'dense_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_val2017.json',
},
'dense_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_test.json',
},
'CIHP_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/train_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_train.json',
},
'CIHP_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/val_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_val.json',
},
'CIHP_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/test_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_test.json',
},
'MHP-v2_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/train_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_train.json',
},
'MHP-v2_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/val_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_val.json',
},
'MHP-v2_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_all.json',
},
'MHP-v2_test_inter_top10': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top10.json',
},
'MHP-v2_test_inter_top20': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top20.json',
},
'PASCAL-Person-Part_train': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/train_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_train.json',
},
'PASCAL-Person-Part_test': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/test_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_test.json',
}
}
| 31.978873
| 88
| 0.580269
| 544
| 4,541
| 4.338235
| 0.115809
| 0.130508
| 0.09322
| 0.10678
| 0.822458
| 0.772458
| 0.760169
| 0.752542
| 0.69661
| 0.612712
| 0
| 0.048796
| 0.29597
| 4,541
| 141
| 89
| 32.205674
| 0.689396
| 0.065404
| 0
| 0.43609
| 0
| 0
| 0.419957
| 0.330338
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007519
| 0
| 0.007519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f4283fe6df2818523658c305534af2e5905a9186
| 180
|
py
|
Python
|
6/4.py
|
Chyroc/homework
|
b1ee8e9629b4dbb6c46a550d710157702d57b00b
|
[
"MIT"
] | null | null | null |
6/4.py
|
Chyroc/homework
|
b1ee8e9629b4dbb6c46a550d710157702d57b00b
|
[
"MIT"
] | 1
|
2018-05-23T02:12:16.000Z
|
2018-05-23T02:12:31.000Z
|
6/4.py
|
Chyroc/homework
|
b1ee8e9629b4dbb6c46a550d710157702d57b00b
|
[
"MIT"
] | null | null | null |
import re
def remove_not_alpha_num(string):
return re.sub('[^0-9a-zA-Z]+', '', string)
if __name__ == '__main__':
print(remove_not_alpha_num('a000 aa-b') == 'a000aab')
| 18
| 57
| 0.65
| 28
| 180
| 3.678571
| 0.785714
| 0.174757
| 0.271845
| 0.330097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.155556
| 180
| 9
| 58
| 20
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0.205556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.2
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
f44062d81d380655736648a227bdbe096d8db999
| 110
|
py
|
Python
|
mailing/urls.py
|
ananyamalik/Railway-Concession-Portal
|
295264ccb50bc4750bf0a749c8477384407d51ad
|
[
"MIT"
] | null | null | null |
mailing/urls.py
|
ananyamalik/Railway-Concession-Portal
|
295264ccb50bc4750bf0a749c8477384407d51ad
|
[
"MIT"
] | 10
|
2020-02-11T23:58:12.000Z
|
2022-03-11T23:43:58.000Z
|
mailing/urls.py
|
ananyamalik/Railway-Concession-Portal
|
295264ccb50bc4750bf0a749c8477384407d51ad
|
[
"MIT"
] | 1
|
2019-03-26T10:43:34.000Z
|
2019-03-26T10:43:34.000Z
|
from django.urls import path
from .views import ( student_list, student_add, student_profile,student_delete )
| 36.666667
| 80
| 0.827273
| 16
| 110
| 5.4375
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 110
| 2
| 81
| 55
| 0.887755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f442ad3274e1d03978bf00cca2923623d11978bb
| 8,842
|
py
|
Python
|
pomodorr/frames/tests/test_consumers.py
|
kamil559/pomodorr
|
232e6e98ff3481561dd1235794b3960066713210
|
[
"MIT"
] | null | null | null |
pomodorr/frames/tests/test_consumers.py
|
kamil559/pomodorr
|
232e6e98ff3481561dd1235794b3960066713210
|
[
"MIT"
] | 15
|
2020-04-11T18:30:57.000Z
|
2020-07-05T09:37:43.000Z
|
pomodorr/frames/tests/test_consumers.py
|
kamil559/pomodorr
|
232e6e98ff3481561dd1235794b3960066713210
|
[
"MIT"
] | null | null | null |
import json
import pytest
from channels.db import database_sync_to_async
from channels.testing import WebsocketCommunicator
from pytest_lazyfixture import lazy_fixture
from pomodorr.frames import statuses
from pomodorr.frames.models import DateFrame
from pomodorr.frames.routing import frames_application
from pomodorr.frames.selectors.date_frame_selector import get_finished_date_frames_for_task
pytestmark = [pytest.mark.django_db(transaction=True), pytest.mark.asyncio]
async def test_connect_websocket(task_instance, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator.scope['user'] = active_user
connected, _ = await communicator.connect()
assert connected
await communicator.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]
)
async def test_start_and_finish_date_frame(tested_frame_type, task_instance, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator.scope['user'] = active_user
await communicator.connect()
assert await database_sync_to_async(task_instance.frames.exists)() is False
await communicator.send_json_to({
'type': 'frame_start',
'frame_type': tested_frame_type
})
response = await communicator.receive_json_from()
assert response['level'] == statuses.MESSAGE_LEVEL_CHOICES[statuses.LEVEL_TYPE_SUCCESS]
assert response['code'] == statuses.LEVEL_TYPE_SUCCESS
assert response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_STARTED]
started_date_frame_id = response['data']['date_frame_id']
assert started_date_frame_id is not None
assert await database_sync_to_async(task_instance.frames.exists)()
await communicator.send_json_to({
'type': 'frame_finish',
'date_frame_id': started_date_frame_id
})
response = await communicator.receive_json_from()
assert response['level'] == statuses.MESSAGE_LEVEL_CHOICES[statuses.LEVEL_TYPE_SUCCESS]
assert response['code'] == statuses.LEVEL_TYPE_SUCCESS
assert response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_FINISHED]
assert await database_sync_to_async(get_finished_date_frames_for_task(task=task_instance).exists)()
await communicator.disconnect()
async def test_start_and_finish_pomodoro_with_pause_inside(task_instance, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator.scope['user'] = active_user
await communicator.connect()
await communicator.send_json_to({
'type': 'frame_start',
'frame_type': DateFrame.pomodoro_type
})
pomodoro_started_response = await communicator.receive_json_from()
assert pomodoro_started_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_STARTED]
started_pomodoro_id = pomodoro_started_response['data']['date_frame_id']
await communicator.send_json_to({
'type': 'frame_start',
'frame_type': DateFrame.pause_type
})
pause_started_response = await communicator.receive_json_from()
assert pause_started_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_STARTED]
pomodoro = await database_sync_to_async(DateFrame.objects.get)(id=started_pomodoro_id)
assert pomodoro.end is None # check if pomodoro hasn't been stopped by starting a pause date frame
started_pause_id = pause_started_response['data']['date_frame_id']
pause = await database_sync_to_async(DateFrame.objects.get)(id=started_pause_id)
assert pause.end is None
await communicator.send_json_to({
'type': 'frame_finish',
'date_frame_id': started_pause_id
})
pause_finished_response = await communicator.receive_json_from()
assert pause_finished_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_FINISHED]
await database_sync_to_async(pause.refresh_from_db)()
assert pause.end is not None # pause should be finished here
await database_sync_to_async(pomodoro.refresh_from_db)()
assert pomodoro.end is None
await communicator.send_json_to({
'type': 'frame_finish',
'date_frame_id': started_pomodoro_id
})
pomodoro_finished_response = await communicator.receive_json_from()
await database_sync_to_async(pomodoro.refresh_from_db)()
assert pomodoro.end is not None # Only now the pomodoro is expected to be finished
assert pomodoro_finished_response['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[statuses.FRAME_ACTION_FINISHED]
assert await database_sync_to_async(get_finished_date_frames_for_task(task=task_instance).count)() == 2
await communicator.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]
)
async def test_channel_group_separation(tested_frame_type, active_user, task_instance,
task_instance_in_second_project):
communicator_1 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_2 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance_in_second_project.id}/')
communicator_1.scope['user'] = active_user
communicator_2.scope['user'] = active_user
communicator_1_connected, _ = await communicator_1.connect()
communicator_2_connected, _ = await communicator_2.connect()
assert communicator_1_connected
assert communicator_2_connected
assert await communicator_1.receive_nothing()
assert await communicator_2.receive_nothing()
await communicator_1.send_json_to({
'type': 'frame_start',
'frame_type': tested_frame_type
})
assert await communicator_1.receive_nothing() is False
assert await communicator_2.receive_nothing()
await communicator_1.disconnect()
await communicator_2.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[DateFrame.pomodoro_type, DateFrame.break_type, DateFrame.pause_type]
)
async def test_connection_discarded_before_second_connection_established(tested_frame_type, active_user, task_instance):
communicator_1 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_2 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_1.scope['user'] = active_user
communicator_2.scope['user'] = active_user
communicator_1_connected, _ = await communicator_1.connect()
assert communicator_1_connected
communicator_2_connected, _ = await communicator_2.connect()
assert communicator_2_connected
connection_close_response = await communicator_1.receive_output()
assert connection_close_response['type'] == 'websocket.close'
assert await communicator_1.receive_nothing()
assert await communicator_2.receive_nothing()
await communicator_2.send_json_to({
'type': 'frame_start',
'frame_type': tested_frame_type
})
assert await communicator_1.receive_nothing()
assert await communicator_2.receive_nothing() is False
await communicator_2.disconnect()
@pytest.mark.parametrize(
'tested_frame_type',
[
lazy_fixture('pomodoro_in_progress'),
lazy_fixture('pause_in_progress')
]
)
async def test_date_frame_force_finished_and_client_notified(tested_frame_type, active_user, task_instance):
communicator_1 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_2 = WebsocketCommunicator(frames_application, f'date_frames/{task_instance.id}/')
communicator_1.scope['user'] = active_user
communicator_2.scope['user'] = active_user
await communicator_1.connect()
await communicator_2.connect()
notification_message = await communicator_1.receive_output()
assert notification_message['type'] == 'websocket.send'
assert json.loads(notification_message['text'])['action'] == statuses.MESSAGE_FRAME_ACTION_CHOICES[
statuses.FRAME_ACTION_FORCE_TERMINATED]
connection_close_response = await communicator_1.receive_output()
assert connection_close_response['type'] == 'websocket.close'
await communicator_1.disconnect()
await communicator_2.disconnect()
async def test_channel_group_permission(task_instance_for_random_project, active_user):
communicator = WebsocketCommunicator(frames_application, f'date_frames/{task_instance_for_random_project.id}/')
communicator.scope['user'] = active_user
connected, _ = await communicator.connect()
assert connected is False
| 39.123894
| 120
| 0.774259
| 1,078
| 8,842
| 5.959184
| 0.1141
| 0.113792
| 0.036426
| 0.029577
| 0.808842
| 0.767746
| 0.739415
| 0.724938
| 0.689446
| 0.651308
| 0
| 0.005914
| 0.139448
| 8,842
| 225
| 121
| 39.297778
| 0.838349
| 0.016625
| 0
| 0.576687
| 0
| 0
| 0.100679
| 0.039926
| 0
| 0
| 0
| 0
| 0.233129
| 1
| 0
| false
| 0
| 0.055215
| 0
| 0.055215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f4458a3941886161e8e7b509e9445b16e1094e76
| 24
|
py
|
Python
|
docker_squash/version.py
|
pombredanne/docker-scripts
|
ecee9f921b22cd44943197635875572185dd015d
|
[
"MIT"
] | 513
|
2016-04-04T21:44:14.000Z
|
2022-03-27T06:18:26.000Z
|
docker_squash/version.py
|
pombredanne/docker-scripts
|
ecee9f921b22cd44943197635875572185dd015d
|
[
"MIT"
] | 106
|
2016-04-01T11:53:20.000Z
|
2022-03-31T00:35:31.000Z
|
docker_squash/version.py
|
pombredanne/docker-scripts
|
ecee9f921b22cd44943197635875572185dd015d
|
[
"MIT"
] | 75
|
2016-05-11T01:08:47.000Z
|
2022-03-25T01:20:06.000Z
|
version = "1.0.10.dev0"
| 12
| 23
| 0.625
| 5
| 24
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 0.125
| 24
| 1
| 24
| 24
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f45a0afb4a750100d6616bb61de6015d31db9869
| 25
|
py
|
Python
|
heareval/__init__.py
|
neuralaudio/hear-eval-kit
|
f92119592954544dfb417f8e9aea21eadb4a65d0
|
[
"Apache-2.0"
] | 24
|
2021-07-26T21:21:46.000Z
|
2022-03-30T08:10:13.000Z
|
heareval/__init__.py
|
neuralaudio/hear-eval-kit
|
f92119592954544dfb417f8e9aea21eadb4a65d0
|
[
"Apache-2.0"
] | 196
|
2021-07-26T17:58:23.000Z
|
2022-01-26T17:40:25.000Z
|
heareval/__init__.py
|
neuralaudio/hear-eval-kit
|
f92119592954544dfb417f8e9aea21eadb4a65d0
|
[
"Apache-2.0"
] | 3
|
2021-08-10T13:12:53.000Z
|
2022-03-19T05:00:50.000Z
|
__version__ = "2021.0.6"
| 12.5
| 24
| 0.68
| 4
| 25
| 3.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.12
| 25
| 1
| 25
| 25
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f45caefa61ce261896189f11de67dd4621b4cff1
| 44
|
py
|
Python
|
code/abc057_a_02.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc057_a_02.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc057_a_02.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
a,b=map(int,input().split())
print((a+b)%24)
| 22
| 28
| 0.613636
| 10
| 44
| 2.7
| 0.8
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0.022727
| 44
| 2
| 29
| 22
| 0.581395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
be483eb33f37e53a2e55abe5acc6cd622141fb6c
| 200
|
py
|
Python
|
src/game/exceptions.py
|
UnBParadigmas/2020.1_G2_SMA_DarwInPython
|
34cdc979a95f827f230bd4f13442f6c67d81ba2b
|
[
"MIT"
] | null | null | null |
src/game/exceptions.py
|
UnBParadigmas/2020.1_G2_SMA_DarwInPython
|
34cdc979a95f827f230bd4f13442f6c67d81ba2b
|
[
"MIT"
] | 1
|
2020-11-20T10:32:49.000Z
|
2020-11-20T10:32:49.000Z
|
src/game/exceptions.py
|
UnBParadigmas/2020.1_G2_SMA_DarwInPython
|
34cdc979a95f827f230bd4f13442f6c67d81ba2b
|
[
"MIT"
] | null | null | null |
class InvalidMovementException(Exception):
pass
class InvalidMovementTargetException(InvalidMovementException):
pass
class InvalidMovimentOriginException(InvalidMovementException):
pass
| 22.222222
| 63
| 0.84
| 12
| 200
| 14
| 0.5
| 0.107143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115
| 200
| 9
| 64
| 22.222222
| 0.949153
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
be629e4dd47b9de924dd51caddb573587b68e29b
| 268
|
py
|
Python
|
cracking_the_coding_interview_qs/10.4/find_x_in_listy_test.py
|
angelusualle/algorithms
|
86286a49db2a755bc57330cb455bcbd8241ea6be
|
[
"Apache-2.0"
] | null | null | null |
cracking_the_coding_interview_qs/10.4/find_x_in_listy_test.py
|
angelusualle/algorithms
|
86286a49db2a755bc57330cb455bcbd8241ea6be
|
[
"Apache-2.0"
] | null | null | null |
cracking_the_coding_interview_qs/10.4/find_x_in_listy_test.py
|
angelusualle/algorithms
|
86286a49db2a755bc57330cb455bcbd8241ea6be
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from find_x_in_listy import find_x_in_listy, Listy
class Test_Case_Find_X_In_Listy(unittest.TestCase):
def test_case_find_x_in_listy(self):
listy = Listy(list(range(0, 1*10**8)))
self.assertEqual(find_x_in_listy(listy, 5678), 5678)
| 38.285714
| 60
| 0.761194
| 47
| 268
| 3.93617
| 0.446809
| 0.135135
| 0.189189
| 0.324324
| 0.4
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0.056522
| 0.141791
| 268
| 7
| 60
| 38.285714
| 0.747826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be7fa8fa9510f2347bc60a9ff146e619c5f6dc1c
| 11,457
|
py
|
Python
|
homeschool/students/tests/test_forms.py
|
brandonmcclure/homeschool
|
6ba2e35014740e952222535e9492cde0d41338b4
|
[
"MIT"
] | null | null | null |
homeschool/students/tests/test_forms.py
|
brandonmcclure/homeschool
|
6ba2e35014740e952222535e9492cde0d41338b4
|
[
"MIT"
] | null | null | null |
homeschool/students/tests/test_forms.py
|
brandonmcclure/homeschool
|
6ba2e35014740e952222535e9492cde0d41338b4
|
[
"MIT"
] | null | null | null |
import datetime
from homeschool.courses.tests.factories import (
CourseFactory,
CourseTaskFactory,
GradedWorkFactory,
)
from homeschool.schools.tests.factories import GradeLevelFactory
from homeschool.students.forms import CourseworkForm, EnrollmentForm, GradeForm
from homeschool.students.models import Coursework, Grade
from homeschool.students.tests.factories import (
CourseworkFactory,
EnrollmentFactory,
GradeFactory,
StudentFactory,
)
from homeschool.test import TestCase
class TestCourseworkForm(TestCase):
def test_is_valid(self):
"""The coursework validates."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
"completed_date": str(grade_level.school_year.start_date),
}
form = CourseworkForm(data=data)
is_valid = form.is_valid()
assert is_valid
def test_student_can_create_coursework(self):
"""The student is enrolled in a course that contains the task."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
"completed_date": str(grade_level.school_year.start_date),
}
form = CourseworkForm(data=data)
is_valid = form.is_valid()
assert not is_valid
assert form.non_field_errors() == [
"The student is not enrolled in this course."
]
def test_save_new_coursework(self):
"""A new coursework is created for a student and task."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
"completed_date": str(grade_level.school_year.start_date),
}
form = CourseworkForm(data=data)
form.is_valid()
form.save()
assert (
Coursework.objects.filter(student=student, course_task=course_task).count()
== 1
)
def test_save_existing_coursework(self):
"""A new coursework is created for a student and task."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
CourseworkFactory(student=student, course_task=course_task)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
"completed_date": str(grade_level.school_year.start_date),
}
form = CourseworkForm(data=data)
form.is_valid()
form.save()
assert (
Coursework.objects.filter(student=student, course_task=course_task).count()
== 1
)
def test_save_deletes_coursework(self):
"""A blank completed date deletes an existing coursework."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
CourseworkFactory(student=student, course_task=course_task)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
}
form = CourseworkForm(data=data)
form.is_valid()
form.save()
assert (
Coursework.objects.filter(student=student, course_task=course_task).count()
== 0
)
def test_completed_date_outside_school_year(self):
"""The completed data must be in the school year."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
"completed_date": str(
grade_level.school_year.start_date - datetime.timedelta(days=1)
),
}
form = CourseworkForm(data=data)
is_valid = form.is_valid()
assert not is_valid
assert form.non_field_errors() == [
"The completed date must be in the school year."
]
def test_invalid_course_task(self):
"""An invalid course task is an error."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
CourseTaskFactory(course=course)
data = {
"student": str(student.id),
"course_task": "0",
"completed_date": str(grade_level.school_year.start_date),
}
form = CourseworkForm(data=data)
is_valid = form.is_valid()
assert not is_valid
def test_invalid_completed_date(self):
"""An invalid completed date is an error."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
course_task = CourseTaskFactory(course=course)
data = {
"student": str(student.id),
"course_task": str(course_task.id),
"completed_date": "boom",
}
form = CourseworkForm(data=data)
is_valid = form.is_valid()
assert not is_valid
class TestEnrollmentForm(TestCase):
def test_students_only_enroll_in_one_grade_level_per_year(self):
"""A student can only be enrolled in a single grade level in a school year."""
user = self.make_user()
enrollment = EnrollmentFactory(
student__school=user.school, grade_level__school_year__school=user.school
)
another_grade_level = GradeLevelFactory(
school_year=enrollment.grade_level.school_year
)
data = {
"student": str(enrollment.student.id),
"grade_level": str(another_grade_level.id),
}
form = EnrollmentForm(user=user, data=data)
is_valid = form.is_valid()
assert not is_valid
assert (
"A student may not be enrolled in multiple grade levels in a school year. "
f"{enrollment.student} is enrolled in {enrollment.grade_level}."
in form.non_field_errors()
)
def test_no_grade_level(self):
"""A missing grade level raises a validation error."""
user = self.make_user()
school = user.school
enrollment = EnrollmentFactory(
student__school=school, grade_level__school_year__school=school
)
data = {"student": str(enrollment.student.id), "grade_level": "0"}
form = EnrollmentForm(user=user, data=data)
is_valid = form.is_valid()
assert not is_valid
assert "You need to select a grade level." in form.non_field_errors()
class TestGradeForm(TestCase):
def test_is_valid(self):
"""The new grade validates."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
graded_work = GradedWorkFactory(course_task__course=course)
data = {
"student": str(student.id),
"graded_work": str(graded_work.id),
"score": "100",
}
form = GradeForm(data=data)
is_valid = form.is_valid()
assert is_valid
def test_invalid_graded_work(self):
"""An invalid graded work is an error."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
GradedWorkFactory(course_task__course=course)
data = {"student": str(student.id), "graded_work": "0", "score": "100"}
form = GradeForm(data=data)
is_valid = form.is_valid()
assert not is_valid
def test_save(self):
"""The form creates a new grade."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
graded_work = GradedWorkFactory(course_task__course=course)
data = {
"student": str(student.id),
"graded_work": str(graded_work.id),
"score": "100",
}
form = GradeForm(data=data)
form.is_valid()
form.save()
assert (
Grade.objects.filter(
student=student, graded_work=graded_work, score=100
).count()
== 1
)
def test_save_update(self):
"""The form updates a grade."""
user = self.make_user()
student = StudentFactory(school=user.school)
grade_level = GradeLevelFactory(school_year__school=user.school)
EnrollmentFactory(student=student, grade_level=grade_level)
course = CourseFactory(grade_levels=[grade_level])
graded_work = GradedWorkFactory(course_task__course=course)
GradeFactory(student=student, graded_work=graded_work)
data = {
"student": str(student.id),
"graded_work": str(graded_work.id),
"score": "100",
}
form = GradeForm(data=data)
form.is_valid()
form.save()
assert (
Grade.objects.filter(student=student, graded_work=graded_work).count() == 1
)
| 36.141956
| 87
| 0.639696
| 1,251
| 11,457
| 5.614708
| 0.089528
| 0.09254
| 0.061503
| 0.031891
| 0.797978
| 0.78246
| 0.758827
| 0.742027
| 0.729784
| 0.729784
| 0
| 0.00284
| 0.262372
| 11,457
| 316
| 88
| 36.256329
| 0.828304
| 0.052806
| 0
| 0.679688
| 0
| 0
| 0.059827
| 0.002319
| 0
| 0
| 0
| 0
| 0.070313
| 1
| 0.054688
| false
| 0
| 0.027344
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
be98084b654d84cf6a197790eaa2f280fb68a68e
| 800
|
py
|
Python
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/train/experimental/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 2
|
2020-09-30T00:11:09.000Z
|
2021-10-04T13:00:38.000Z
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/train/experimental/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | null | null | null |
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v2/train/experimental/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 1
|
2021-01-28T01:57:41.000Z
|
2021-01-28T01:57:41.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.train.experimental namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.training.experimental.loss_scale import DynamicLossScale
from tensorflow.python.training.experimental.loss_scale import FixedLossScale
from tensorflow.python.training.experimental.loss_scale import LossScale
from tensorflow.python.training.experimental.mixed_precision import disable_mixed_precision_graph_rewrite
from tensorflow.python.training.experimental.mixed_precision import enable_mixed_precision_graph_rewrite
from tensorflow.python.training.tracking.python_state import PythonState
del _print_function
| 44.444444
| 105
| 0.86875
| 104
| 800
| 6.432692
| 0.442308
| 0.167414
| 0.179372
| 0.251121
| 0.54559
| 0.54559
| 0.54559
| 0.54559
| 0
| 0
| 0
| 0
| 0.075
| 800
| 17
| 106
| 47.058824
| 0.904054
| 0.2175
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.888889
| 0
| 0.888889
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
beb77481d7d9ef64134079c15cf78aedfbcf66f2
| 187
|
py
|
Python
|
RainIt/rain_it/ric/Procedure.py
|
luisgepeto/RainItPi
|
47cb7228e9c584c3c4489ebc78abf6de2096b770
|
[
"MIT"
] | null | null | null |
RainIt/rain_it/ric/Procedure.py
|
luisgepeto/RainItPi
|
47cb7228e9c584c3c4489ebc78abf6de2096b770
|
[
"MIT"
] | null | null | null |
RainIt/rain_it/ric/Procedure.py
|
luisgepeto/RainItPi
|
47cb7228e9c584c3c4489ebc78abf6de2096b770
|
[
"MIT"
] | null | null | null |
from ric.RainItComposite import RainItComposite
class Procedure(RainItComposite):
def __init__(self):
super().__init__()
def get_pickle_form(self):
return self
| 18.7
| 47
| 0.705882
| 20
| 187
| 6.1
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213904
| 187
| 9
| 48
| 20.777778
| 0.829932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fe2bf5d430a026df243c522eca3e9b1d054d0492
| 45
|
py
|
Python
|
remediar/modules/http/__init__.py
|
fabaff/remediar
|
014d7733b00cd40a45881c2729c04df5584476e7
|
[
"Apache-2.0"
] | null | null | null |
remediar/modules/http/__init__.py
|
fabaff/remediar
|
014d7733b00cd40a45881c2729c04df5584476e7
|
[
"Apache-2.0"
] | null | null | null |
remediar/modules/http/__init__.py
|
fabaff/remediar
|
014d7733b00cd40a45881c2729c04df5584476e7
|
[
"Apache-2.0"
] | null | null | null |
"""Support for HTTP or web server issues."""
| 22.5
| 44
| 0.688889
| 7
| 45
| 4.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 45
| 1
| 45
| 45
| 0.815789
| 0.844444
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe4a3921bc3a55c6c61a10f07f322ae6a1bc443a
| 12,719
|
py
|
Python
|
pay-api/tests/unit/api/test_fee.py
|
saravanpa-aot/sbc-pay
|
fb9f61b99e506e43280bc69531ee107cc12cd92d
|
[
"Apache-2.0"
] | null | null | null |
pay-api/tests/unit/api/test_fee.py
|
saravanpa-aot/sbc-pay
|
fb9f61b99e506e43280bc69531ee107cc12cd92d
|
[
"Apache-2.0"
] | null | null | null |
pay-api/tests/unit/api/test_fee.py
|
saravanpa-aot/sbc-pay
|
fb9f61b99e506e43280bc69531ee107cc12cd92d
|
[
"Apache-2.0"
] | 5
|
2019-03-01T01:12:12.000Z
|
2019-07-08T16:33:47.000Z
|
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to assure the fees end-point.
Test-Suite to ensure that the /fees endpoint is working as expected.
"""
import json
from datetime import date, timedelta
from pay_api.models import CorpType, FeeCode, FeeSchedule, FilingType
from pay_api.schemas import utils as schema_utils
from pay_api.utils.enums import Role
from tests.utilities.base_test import get_claims, get_gov_account_payload, token_header
def test_fees_with_corp_type_and_filing_type(session, client, jwt, app):
"""Assert that the endpoint returns 200."""
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
def test_fees_with_corp_type_and_filing_type_with_valid_start_date(session, client, jwt, app):
"""Assert that the endpoint returns 200."""
# Insert a record first and then query for it
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
now = date.today()
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100),
now - timedelta(1))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?valid_date={now}', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
assert not schema_utils.validate(rv.json, 'problem')[0]
def test_fees_with_corp_type_and_filing_type_with_invalid_start_date(session, client, jwt, app):
"""Assert that the endpoint returns 400."""
# Insert a record first and then query for it
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
now = date.today()
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100),
now + timedelta(1))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?valid_date={now}', headers=headers)
assert rv.status_code == 400
assert schema_utils.validate(rv.json, 'problem')[0]
assert not schema_utils.validate(rv.json, 'fees')[0]
def test_fees_with_corp_type_and_filing_type_with_valid_end_date(session, client, jwt, app):
"""Assert that the endpoint returns 200."""
# Insert a record first and then query for it
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
now = date.today()
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100),
now - timedelta(1),
now)
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?valid_date={now}', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
def test_fees_with_corp_type_and_filing_type_with_invalid_end_date(session, client, jwt, app):
"""Assert that the endpoint returns 400."""
# Insert a record first and then query for it
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
now = date.today()
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100),
now - timedelta(2),
now - timedelta(1))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?valid_date={now}', headers=headers)
assert rv.status_code == 400
assert schema_utils.validate(rv.json, 'problem')[0]
def test_calculate_fees_with_waive_fees(session, client, jwt, app):
"""Assert that the endpoint returns 201."""
token = jwt.create_jwt(get_claims(role='staff'), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?waiveFees=true', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
assert rv.json.get('filingFees') == 0
def test_calculate_fees_with_waive_fees_unauthorized(session, client, jwt, app):
"""Assert that the endpoint returns 201."""
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?waiveFees=true', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
assert rv.json.get('filingFees') == 100
def test_fees_with_quantity(session, client, jwt, app):
"""Assert that the endpoint returns 200."""
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}?quantity=10', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
def test_calculate_fees_for_service_fee(session, client, jwt, app):
"""Assert that the endpoint returns 201."""
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
service_fee = factory_fee_model('SF01', 1.5)
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 100),
service_fee=service_fee)
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
assert rv.json.get('filingFees') == 100
assert rv.json.get('serviceFees') == 1.5
def test_calculate_fees_with_zero_service_fee(session, client, jwt, app):
"""Assert that service fee is zero if the filing fee is zero."""
token = jwt.create_jwt(get_claims(), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
corp_type = 'XX'
filing_type_code = 'XOTANN'
factory_fee_schedule_model(
factory_filing_type_model('XOTANN', 'TEST'),
factory_corp_type_model('XX', 'TEST'),
factory_fee_model('XXX', 0))
rv = client.get(f'/api/v1/fees/{corp_type}/{filing_type_code}', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
assert rv.json.get('filingFees') == 0
assert rv.json.get('serviceFees') == 0
def test_fee_for_account_fee_settings(session, client, jwt, app):
"""Assert that the endpoint returns 200."""
token = jwt.create_jwt(get_claims(role=Role.SYSTEM.value), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
rv = client.post('/api/v1/accounts', data=json.dumps(get_gov_account_payload()),
headers=headers)
account_id = rv.json.get('authAccountId')
# Create account fee details.
token = jwt.create_jwt(get_claims(role=Role.MANAGE_ACCOUNTS.value), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
client.post(f'/api/v1/accounts/{account_id}/fees', data=json.dumps({'accountFees': [
{
'applyFilingFees': False,
'serviceFeeCode': 'TRF02', # 1.0
'product': 'BUSINESS'
}
]}), headers=headers)
# Get fee for this account.
token = jwt.create_jwt(get_claims(role=Role.EDITOR.value), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json', 'Account-Id': account_id}
rv = client.get('/api/v1/fees/BEN/BCANN', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
# assert filing fee is not applied and service fee is applied
assert rv.json.get('filingFees') == 0
assert rv.json.get('serviceFees') == 1.0
# Now change the settings to apply filing fees and assert
token = jwt.create_jwt(get_claims(role=Role.MANAGE_ACCOUNTS.value), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json'}
client.put(f'/api/v1/accounts/{account_id}/fees/BUSINESS', data=json.dumps({
'applyFilingFees': True,
'serviceFeeCode': 'TRF01', # 1.5
'product': 'BUSINESS'
}), headers=headers)
# Get fee for this account.
token = jwt.create_jwt(get_claims(role=Role.EDITOR.value), token_header)
headers = {'Authorization': f'Bearer {token}', 'content-type': 'application/json', 'Account-Id': account_id}
rv = client.get('/api/v1/fees/BEN/BCANN', headers=headers)
assert rv.status_code == 200
assert schema_utils.validate(rv.json, 'fees')[0]
# assert filing fee is applied and service fee is applied
assert rv.json.get('filingFees') > 0
assert rv.json.get('serviceFees') == 1.5
def factory_filing_type_model(
filing_type_code: str,
filing_description: str = 'TEST'):
"""Return the filing type model."""
filing_type = FilingType(code=filing_type_code,
description=filing_description)
filing_type.save()
return filing_type
def factory_fee_model(
fee_code: str,
amount: int):
"""Return the fee code model."""
fee_code_master = FeeCode(code=fee_code,
amount=amount)
fee_code_master.save()
return fee_code_master
def factory_corp_type_model(
corp_type_code: str,
corp_type_description: str):
"""Return the corp type model."""
corp_type = CorpType(code=corp_type_code,
description=corp_type_description)
corp_type.save()
return corp_type
def factory_fee_schedule_model(
filing_type: FilingType,
corp_type: CorpType,
fee_code: FeeCode,
fee_start_date: date = date.today(),
fee_end_date: date = None,
service_fee: FeeCode = None):
"""Return the fee schedule model."""
fee_schedule = FeeSchedule(filing_type_code=filing_type.code,
corp_type_code=corp_type.code,
fee_code=fee_code.code,
fee_start_date=fee_start_date,
fee_end_date=fee_end_date
)
if service_fee:
fee_schedule.service_fee_code = service_fee.code
fee_schedule.save()
return fee_schedule
| 41.029032
| 112
| 0.679928
| 1,730
| 12,719
| 4.755491
| 0.115029
| 0.045703
| 0.040841
| 0.030996
| 0.753738
| 0.734533
| 0.734533
| 0.727483
| 0.70767
| 0.698554
| 0
| 0.016225
| 0.190738
| 12,719
| 309
| 113
| 41.161812
| 0.782959
| 0.130907
| 0
| 0.622727
| 0
| 0
| 0.19145
| 0.060376
| 0
| 0
| 0
| 0
| 0.163636
| 1
| 0.068182
| false
| 0
| 0.027273
| 0
| 0.113636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
feb98f525f627b833eb5f7cdfb89e344a5f06574
| 103
|
py
|
Python
|
src/jj_analyzer/__init__.py
|
ninetymiles/jj-logcat-analyzer
|
d4ae0fddfefc303ae9c17e6c9e08aad6a231e036
|
[
"Apache-1.1"
] | null | null | null |
src/jj_analyzer/__init__.py
|
ninetymiles/jj-logcat-analyzer
|
d4ae0fddfefc303ae9c17e6c9e08aad6a231e036
|
[
"Apache-1.1"
] | null | null | null |
src/jj_analyzer/__init__.py
|
ninetymiles/jj-logcat-analyzer
|
d4ae0fddfefc303ae9c17e6c9e08aad6a231e036
|
[
"Apache-1.1"
] | null | null | null |
#! /usr/bin/python
import sys
if sys.version_info[0] == 3:
from .__main__ import *
else:
pass
| 12.875
| 28
| 0.640777
| 16
| 103
| 3.8125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.223301
| 103
| 8
| 29
| 12.875
| 0.7375
| 0.165049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
2290a77719ce3ea48bd13dc7fb8b6642fe413085
| 144
|
py
|
Python
|
application/recommendations/__init__.py
|
QualiChain/qualichain_backend
|
cc6dbf1ae5d09e8d01cccde94326563b25d28b58
|
[
"MIT"
] | null | null | null |
application/recommendations/__init__.py
|
QualiChain/qualichain_backend
|
cc6dbf1ae5d09e8d01cccde94326563b25d28b58
|
[
"MIT"
] | null | null | null |
application/recommendations/__init__.py
|
QualiChain/qualichain_backend
|
cc6dbf1ae5d09e8d01cccde94326563b25d28b58
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
recommendation_blueprint = Blueprint('recommendations', __name__)
from application.recommendations import routes
| 20.571429
| 65
| 0.847222
| 14
| 144
| 8.357143
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 144
| 7
| 66
| 20.571429
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
22c8357e530d1406b6c30aa5078c53db167737b2
| 128
|
py
|
Python
|
pichetprofile/__init__.py
|
jamenor/pichetprofile
|
6633ea6eaa7473af9e10f34f6a19428c2db92465
|
[
"MIT"
] | 2
|
2021-04-20T01:54:40.000Z
|
2022-01-31T10:00:04.000Z
|
pichetprofile/__init__.py
|
jamenor/pichetprofile
|
6633ea6eaa7473af9e10f34f6a19428c2db92465
|
[
"MIT"
] | null | null | null |
pichetprofile/__init__.py
|
jamenor/pichetprofile
|
6633ea6eaa7473af9e10f34f6a19428c2db92465
|
[
"MIT"
] | 2
|
2021-12-12T08:17:42.000Z
|
2022-02-13T21:04:44.000Z
|
# -*- coding: utf-8 -*-
from oopschool.school import Student,Tesla,SpecialStudent,Teacher
from oopschool.newschool import Test
| 42.666667
| 66
| 0.78125
| 16
| 128
| 6.25
| 0.8125
| 0.26
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008772
| 0.109375
| 128
| 3
| 67
| 42.666667
| 0.868421
| 0.164063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fe0a42ffd316cd292e323db6162852aaf54d8093
| 37
|
py
|
Python
|
website/addons/forward/views/__init__.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | 1
|
2015-10-02T18:35:53.000Z
|
2015-10-02T18:35:53.000Z
|
website/addons/forward/views/__init__.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | 13
|
2020-03-24T15:29:41.000Z
|
2022-03-11T23:15:28.000Z
|
website/addons/forward/views/__init__.py
|
DanielSBrown/osf.io
|
98dda2ac237377197acacce78274bc0a4ce8f303
|
[
"Apache-2.0"
] | 1
|
2019-07-16T00:14:49.000Z
|
2019-07-16T00:14:49.000Z
|
from . import config, widget # noqa
| 18.5
| 36
| 0.702703
| 5
| 37
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 37
| 1
| 37
| 37
| 0.896552
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fe0f496060ed3aa777376eab607ac140da6babfa
| 1,400
|
py
|
Python
|
horizon/forms/__init__.py
|
ameoba/horizon
|
ff9e367c98a8bb79f10914abffaaa04b0a461819
|
[
"Apache-2.0"
] | 2
|
2019-12-29T09:20:13.000Z
|
2020-01-01T13:12:34.000Z
|
horizon/forms/__init__.py
|
yongquanf/horizon
|
9aad7fd6f66588fed7c27b720642e47a4a12854b
|
[
"Apache-2.0"
] | 10
|
2015-02-19T20:27:04.000Z
|
2017-05-15T15:04:32.000Z
|
horizon/forms/__init__.py
|
yongquanf/horizon
|
9aad7fd6f66588fed7c27b720642e47a4a12854b
|
[
"Apache-2.0"
] | 4
|
2015-05-05T08:17:28.000Z
|
2020-02-05T10:47:06.000Z
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# FIXME(gabriel): Legacy imports for API compatibility.
from django.forms import * # noqa
from django.forms import widgets
# Convenience imports for public API components.
from horizon.forms.base import DateForm # noqa
from horizon.forms.base import SelfHandlingForm # noqa
from horizon.forms.base import SelfHandlingMixin # noqa
from horizon.forms.fields import DynamicChoiceField # noqa
from horizon.forms.fields import DynamicTypedChoiceField # noqa
from horizon.forms.views import ModalFormMixin # noqa
from horizon.forms.views import ModalFormView # noqa
assert widgets
assert SelfHandlingMixin
assert SelfHandlingForm
assert DateForm
assert ModalFormView
assert ModalFormMixin
assert DynamicTypedChoiceField
assert DynamicChoiceField
| 36.842105
| 78
| 0.784286
| 185
| 1,400
| 5.935135
| 0.513514
| 0.051002
| 0.102004
| 0.10929
| 0.193078
| 0.169399
| 0
| 0
| 0
| 0
| 0
| 0.00937
| 0.161429
| 1,400
| 37
| 79
| 37.837838
| 0.925894
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.470588
| 1
| 0
| true
| 0
| 0.529412
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3b0b5f68e1084bc860c329219fb7ebd7ec06dcc
| 70
|
py
|
Python
|
numberTheory/natural.py
|
ndarwin314/symbolicPy
|
ce2e48bf1557b5995db6c324ada9fbd4767df1e3
|
[
"MIT"
] | null | null | null |
numberTheory/natural.py
|
ndarwin314/symbolicPy
|
ce2e48bf1557b5995db6c324ada9fbd4767df1e3
|
[
"MIT"
] | null | null | null |
numberTheory/natural.py
|
ndarwin314/symbolicPy
|
ce2e48bf1557b5995db6c324ada9fbd4767df1e3
|
[
"MIT"
] | null | null | null |
# TODO: implement algorithms in c++ or something to make them fast
| 23.333333
| 67
| 0.728571
| 11
| 70
| 4.636364
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 70
| 2
| 68
| 35
| 0.927273
| 0.914286
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0.5
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a3bf6d02c2f4e332e2c37541b89b9a4e5f82ec94
| 97
|
py
|
Python
|
CH7_GitCmdAndCtrl/modules/environment.py
|
maxmac12/BlackHatPython
|
60044c65ffc2f1216cbf92c2ec850a4e2e9ca5bf
|
[
"MIT"
] | null | null | null |
CH7_GitCmdAndCtrl/modules/environment.py
|
maxmac12/BlackHatPython
|
60044c65ffc2f1216cbf92c2ec850a4e2e9ca5bf
|
[
"MIT"
] | null | null | null |
CH7_GitCmdAndCtrl/modules/environment.py
|
maxmac12/BlackHatPython
|
60044c65ffc2f1216cbf92c2ec850a4e2e9ca5bf
|
[
"MIT"
] | null | null | null |
import os
def run(**kwargs):
print("[*] In environment module.")
return str(os.environ)
| 16.166667
| 39
| 0.639175
| 13
| 97
| 4.769231
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195876
| 97
| 6
| 40
| 16.166667
| 0.794872
| 0
| 0
| 0
| 0
| 0
| 0.265306
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
a3fc7e9736f8ff7c6e4924c0d8a73afdf2dd7f02
| 81
|
py
|
Python
|
aiolookin/__init__.py
|
bachya/aiolookin
|
553731047b6910b1cb74667fbb343faf9b8656ac
|
[
"MIT"
] | null | null | null |
aiolookin/__init__.py
|
bachya/aiolookin
|
553731047b6910b1cb74667fbb343faf9b8656ac
|
[
"MIT"
] | 3
|
2021-08-16T21:32:30.000Z
|
2021-10-05T00:30:03.000Z
|
aiolookin/__init__.py
|
bachya/aiolookin
|
553731047b6910b1cb74667fbb343faf9b8656ac
|
[
"MIT"
] | null | null | null |
"""Define the aiolookin package."""
from .device import async_get_device # noqa
| 27
| 44
| 0.753086
| 11
| 81
| 5.363636
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135802
| 81
| 2
| 45
| 40.5
| 0.842857
| 0.432099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
430437fe39813c58d169d2be946182b08eb80151
| 200
|
py
|
Python
|
hoover/site/wsgi.py
|
hoover/hoover
|
84053b2479e966b0f639692c9e226261e3188709
|
[
"MIT"
] | 15
|
2016-08-18T10:48:06.000Z
|
2019-10-15T14:41:20.000Z
|
hoover/site/wsgi.py
|
hoover/hoover
|
84053b2479e966b0f639692c9e226261e3188709
|
[
"MIT"
] | 88
|
2019-10-28T14:55:16.000Z
|
2021-05-14T12:42:52.000Z
|
hoover/site/wsgi.py
|
hoover/hoover
|
84053b2479e966b0f639692c9e226261e3188709
|
[
"MIT"
] | 14
|
2016-09-27T13:11:57.000Z
|
2019-10-08T23:33:59.000Z
|
from . import events # noqa
from django.core.wsgi import get_wsgi_application
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hoover.site.settings")
application = get_wsgi_application()
| 25
| 71
| 0.81
| 27
| 200
| 5.777778
| 0.592593
| 0.089744
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 200
| 7
| 72
| 28.571429
| 0.866667
| 0.02
| 0
| 0
| 0
| 0
| 0.216495
| 0.113402
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
431f67abd21ada1dae45fd70ed84a4c58f410719
| 65
|
py
|
Python
|
addons14/base_rest/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/base_rest/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/base_rest/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import models
from . import components
from . import http
| 16.25
| 24
| 0.769231
| 9
| 65
| 5.555556
| 0.555556
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184615
| 65
| 3
| 25
| 21.666667
| 0.943396
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
433606583160b95b550e87a2119c5cb01f7c5b5a
| 76
|
py
|
Python
|
src/python/errors.py
|
Miravalier/canonfire
|
7eeb93270ec3f3332fa039f3a9d0e8b3b2c86263
|
[
"MIT"
] | 1
|
2020-01-30T16:36:04.000Z
|
2020-01-30T16:36:04.000Z
|
src/python/errors.py
|
Miravalier/canonfire
|
7eeb93270ec3f3332fa039f3a9d0e8b3b2c86263
|
[
"MIT"
] | 9
|
2021-11-21T14:28:54.000Z
|
2021-11-21T14:38:16.000Z
|
src/python/errors.py
|
Miravalier/canonfire
|
7eeb93270ec3f3332fa039f3a9d0e8b3b2c86263
|
[
"MIT"
] | null | null | null |
class AuthError(Exception):
pass
class JsonError(Exception):
pass
| 10.857143
| 27
| 0.710526
| 8
| 76
| 6.75
| 0.625
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 76
| 6
| 28
| 12.666667
| 0.9
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4a32ad81cfcc28f835805b24183250a1a290fdeb
| 235
|
py
|
Python
|
weibo_image_spider/exceptions.py
|
lonsty/weibo-pic-spider-hd
|
c7dae38b51209296cc8e71aa6fb80f094d549198
|
[
"MIT"
] | null | null | null |
weibo_image_spider/exceptions.py
|
lonsty/weibo-pic-spider-hd
|
c7dae38b51209296cc8e71aa6fb80f094d549198
|
[
"MIT"
] | null | null | null |
weibo_image_spider/exceptions.py
|
lonsty/weibo-pic-spider-hd
|
c7dae38b51209296cc8e71aa6fb80f094d549198
|
[
"MIT"
] | null | null | null |
# @AUTHOR : lonsty
# @DATE : 2020/3/28 18:01
class CookiesExpiredException(Exception):
pass
class NoImagesException(Exception):
pass
class ContentParserError(Exception):
pass
class UserNotFound(Exception):
pass
| 12.368421
| 41
| 0.719149
| 24
| 235
| 7.041667
| 0.625
| 0.307692
| 0.319527
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057895
| 0.191489
| 235
| 18
| 42
| 13.055556
| 0.831579
| 0.170213
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4a42d347c7abb078f1060ffec9bcd3fae7f3044c
| 46
|
py
|
Python
|
datajoint-workflow/{{cookiecutter.github_repo}}/src/{{cookiecutter.__pkg_import_name}}/version.py
|
Yambottle/dj-workflow-template
|
a47a354af2f9303c898ef403491e69cfc396d196
|
[
"MIT"
] | null | null | null |
datajoint-workflow/{{cookiecutter.github_repo}}/src/{{cookiecutter.__pkg_import_name}}/version.py
|
Yambottle/dj-workflow-template
|
a47a354af2f9303c898ef403491e69cfc396d196
|
[
"MIT"
] | null | null | null |
datajoint-workflow/{{cookiecutter.github_repo}}/src/{{cookiecutter.__pkg_import_name}}/version.py
|
Yambottle/dj-workflow-template
|
a47a354af2f9303c898ef403491e69cfc396d196
|
[
"MIT"
] | 6
|
2022-02-18T20:19:04.000Z
|
2022-03-05T05:29:23.000Z
|
__version__ = "{{cookiecutter._pkg_version}}"
| 23
| 45
| 0.76087
| 4
| 46
| 7.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 46
| 1
| 46
| 46
| 0.674419
| 0
| 0
| 0
| 0
| 0
| 0.630435
| 0.630435
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a4d9078d162889cc7a0df9b67742f350806db8d
| 13,952
|
py
|
Python
|
stores/apps/inventory/migrations/0001_initial.py
|
diassor/CollectorCity-Market-Place
|
892ad220b8cf1c0fc7433f625213fe61729522b2
|
[
"Apache-2.0"
] | 135
|
2015-03-19T13:28:18.000Z
|
2022-03-27T06:41:42.000Z
|
stores/apps/inventory/migrations/0001_initial.py
|
dfcoding/CollectorCity-Market-Place
|
e59acec3d600c049323397b17cae14fdcaaaec07
|
[
"Apache-2.0"
] | null | null | null |
stores/apps/inventory/migrations/0001_initial.py
|
dfcoding/CollectorCity-Market-Place
|
e59acec3d600c049323397b17cae14fdcaaaec07
|
[
"Apache-2.0"
] | 83
|
2015-01-30T01:00:15.000Z
|
2022-03-08T17:25:10.000Z
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProductType'
db.create_table('inventory_producttype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('inventory', ['ProductType'])
# Adding model 'Product'
db.create_table('inventory_product', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shops.Shop'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('description', self.gf('django.db.models.fields.TextField')()),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'])),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'])),
('date_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('weight', self.gf('django.db.models.fields.DecimalField')(default='0', max_digits=11, decimal_places=2)),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['inventory.ProductType'], null=True, blank=True)),
))
db.send_create_signal('inventory', ['Product'])
# Adding model 'Coin'
db.create_table('inventory_coin', (
('producttype_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['inventory.ProductType'], unique=True, primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'], null=True, blank=True)),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'], null=True, blank=True)),
('country_code', self.gf('django.db.models.fields.CharField')(default='us', max_length=2)),
('pcgs_number', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('year_issued', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('actual_year', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('denomination', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('major_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('die_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('prefix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('suffix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('sort_order', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('heading', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety_2', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('additional_data', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('last_update', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('inventory', ['Coin'])
def backwards(self, orm):
# Deleting model 'ProductType'
db.delete_table('inventory_producttype')
# Deleting model 'Product'
db.delete_table('inventory_product')
# Deleting model 'Coin'
db.delete_table('inventory_coin')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'inventory.coin': {
'Meta': {'object_name': 'Coin', '_ormbases': ['inventory.ProductType']},
'actual_year': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"}),
'additional_data': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']", 'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'default': "'us'", 'max_length': '2'}),
'denomination': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'die_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety_2': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'major_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'pcgs_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'prefix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'producttype_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['inventory.ProductType']", 'unique': 'True', 'primary_key': 'True'}),
'sort_order': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']", 'null': 'True', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'year_issued': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"})
},
'inventory.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.ProductType']", 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '11', 'decimal_places': '2'})
},
'inventory.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'market.marketcategory': {
'Meta': {'object_name': 'MarketCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'market.marketplace': {
'Meta': {'object_name': 'MarketPlace'},
'base_domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '92'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'template_prefix': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '92'})
},
'market.marketsubcategory': {
'Meta': {'unique_together': "(('parent', 'slug'),)", 'object_name': 'MarketSubCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subcategories'", 'null': 'True', 'to': "orm['market.MarketCategory']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '60', 'db_index': 'True'})
},
'shops.shop': {
'Meta': {'object_name': 'Shop'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'bids': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'39.29038,-76.61219'", 'max_length': '255'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['inventory']
| 76.240437
| 181
| 0.573825
| 1,447
| 13,952
| 5.422944
| 0.111265
| 0.110106
| 0.190901
| 0.272716
| 0.786798
| 0.784631
| 0.747419
| 0.720403
| 0.661654
| 0.561361
| 0
| 0.012016
| 0.182841
| 13,952
| 182
| 182
| 76.659341
| 0.676256
| 0.01154
| 0
| 0.148148
| 0
| 0
| 0.542843
| 0.328013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012346
| false
| 0.006173
| 0.024691
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a553a81b1d7bdf7e54e2eefdce19b67fef643fd
| 138
|
py
|
Python
|
cfdata/tabular/converters/__init__.py
|
carefree0910/carefree-data
|
ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19
|
[
"MIT"
] | 9
|
2020-10-25T11:52:34.000Z
|
2022-01-23T02:45:41.000Z
|
cfdata/tabular/converters/__init__.py
|
carefree0910/carefree-data
|
ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19
|
[
"MIT"
] | 2
|
2020-08-02T01:58:48.000Z
|
2021-02-26T11:24:19.000Z
|
cfdata/tabular/converters/__init__.py
|
carefree0910/carefree-data
|
ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19
|
[
"MIT"
] | 1
|
2021-11-04T14:34:13.000Z
|
2021-11-04T14:34:13.000Z
|
from .base import *
from .string import *
from .categorical import *
from .numerical import *
__all__ = ["Converter", "converter_dict"]
| 17.25
| 41
| 0.724638
| 16
| 138
| 5.9375
| 0.5625
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15942
| 138
| 7
| 42
| 19.714286
| 0.818966
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4a752e0adb3dfdb8832eacdb68f81c47021fa651
| 378
|
gyp
|
Python
|
deps/libgdal/gyp-formats/ogr_mem.gyp
|
khrushjing/node-gdal-async
|
6546b0c8690f2db677d5385b40b407523503b314
|
[
"Apache-2.0"
] | 42
|
2021-03-26T17:34:52.000Z
|
2022-03-18T14:15:31.000Z
|
deps/libgdal/gyp-formats/ogr_mem.gyp
|
khrushjing/node-gdal-async
|
6546b0c8690f2db677d5385b40b407523503b314
|
[
"Apache-2.0"
] | 29
|
2021-06-03T14:24:01.000Z
|
2022-03-23T15:43:58.000Z
|
deps/libgdal/gyp-formats/ogr_mem.gyp
|
khrushjing/node-gdal-async
|
6546b0c8690f2db677d5385b40b407523503b314
|
[
"Apache-2.0"
] | 8
|
2021-05-14T19:26:37.000Z
|
2022-03-21T13:44:42.000Z
|
{
"includes": [
"../common.gypi"
],
"targets": [
{
"target_name": "libgdal_ogr_mem_frmt",
"type": "static_library",
"sources": [
"../gdal/ogr/ogrsf_frmts/mem/ogrmemdatasource.cpp",
"../gdal/ogr/ogrsf_frmts/mem/ogrmemlayer.cpp",
"../gdal/ogr/ogrsf_frmts/mem/ogrmemdriver.cpp"
],
"include_dirs": [
"../gdal/ogr/ogrsf_frmts/mem"
]
}
]
}
| 18.9
| 55
| 0.595238
| 42
| 378
| 5.119048
| 0.547619
| 0.130233
| 0.223256
| 0.316279
| 0.4
| 0.213953
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18254
| 378
| 19
| 56
| 19.894737
| 0.695793
| 0
| 0
| 0.105263
| 0
| 0
| 0.685185
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a7ff589828eca63a17e67bce0eb8c34992e953a
| 158
|
py
|
Python
|
{{cookiecutter.project_name}}/{{cookiecutter.app_name}}/extensions.py
|
DevAerial/flask-api-template
|
6d3f745f2dacb793c4bdc6aaaceb86eb472efe55
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_name}}/{{cookiecutter.app_name}}/extensions.py
|
DevAerial/flask-api-template
|
6d3f745f2dacb793c4bdc6aaaceb86eb472efe55
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_name}}/{{cookiecutter.app_name}}/extensions.py
|
DevAerial/flask-api-template
|
6d3f745f2dacb793c4bdc6aaaceb86eb472efe55
|
[
"MIT"
] | null | null | null |
from flask_marshmallow import Marshmallow{% if cookiecutter.use_celery == 'yes'%}
from celery import Celery
celery = Celery(){% endif %}
ma = Marshmallow()
| 22.571429
| 81
| 0.740506
| 19
| 158
| 6.052632
| 0.578947
| 0.208696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139241
| 158
| 6
| 82
| 26.333333
| 0.845588
| 0
| 0
| 0
| 0
| 0
| 0.019108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4397c55661379269054e0b0a47adf3a823197ee1
| 173
|
py
|
Python
|
website/sites/admin.py
|
vnaskos/Website
|
1c2adb0985f3932ddeca12025a2d216d2470cb63
|
[
"MIT"
] | null | null | null |
website/sites/admin.py
|
vnaskos/Website
|
1c2adb0985f3932ddeca12025a2d216d2470cb63
|
[
"MIT"
] | null | null | null |
website/sites/admin.py
|
vnaskos/Website
|
1c2adb0985f3932ddeca12025a2d216d2470cb63
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.]
from website.sites.models import Post
@admin.register(Post)
class TestAdmin2(admin.ModelAdmin):
pass
| 15.727273
| 38
| 0.768786
| 23
| 173
| 5.782609
| 0.695652
| 0.195489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006803
| 0.150289
| 173
| 11
| 39
| 15.727273
| 0.897959
| 0.156069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
43ab43b6738516044ebfd16ee957b6dda20ddd01
| 161
|
py
|
Python
|
python/test-deco-1-1.py
|
li-ma/homework
|
d75b1752a02bd028af0806683abe079c7b0a9b29
|
[
"Apache-2.0"
] | null | null | null |
python/test-deco-1-1.py
|
li-ma/homework
|
d75b1752a02bd028af0806683abe079c7b0a9b29
|
[
"Apache-2.0"
] | null | null | null |
python/test-deco-1-1.py
|
li-ma/homework
|
d75b1752a02bd028af0806683abe079c7b0a9b29
|
[
"Apache-2.0"
] | null | null | null |
def deco1(func):
print("before myfunc() called.")
func()
print("after myfunc() called.")
def myfunc():
print("myfunc() called.")
deco1(myfunc)
| 16.1
| 36
| 0.608696
| 19
| 161
| 5.157895
| 0.421053
| 0.367347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015504
| 0.198758
| 161
| 9
| 37
| 17.888889
| 0.744186
| 0
| 0
| 0
| 0
| 0
| 0.378882
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0
| 0.285714
| 0.428571
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
43b6c1b507adc1bb371518dff1d4802b73e3e1a5
| 434
|
py
|
Python
|
py/multiple_dispatch_example.py
|
coalpha/coalpha.github.io
|
8a620314a5c0bcbe2225d29f733379d181534430
|
[
"Apache-2.0"
] | null | null | null |
py/multiple_dispatch_example.py
|
coalpha/coalpha.github.io
|
8a620314a5c0bcbe2225d29f733379d181534430
|
[
"Apache-2.0"
] | 1
|
2020-04-12T07:48:18.000Z
|
2020-04-12T07:49:29.000Z
|
py/multiple_dispatch_example.py
|
coalpha/coalpha.github.io
|
8a620314a5c0bcbe2225d29f733379d181534430
|
[
"Apache-2.0"
] | 1
|
2020-09-30T05:27:07.000Z
|
2020-09-30T05:27:07.000Z
|
from typing import *
from multiple_dispatch import multiple_dispatch
@overload
@multiple_dispatch
def add(a: Literal[4, 6, 8], b):
raise TypeError("No adding 2, 4, 6, or 8!")
@overload
@multiple_dispatch
def add(a: int, b: str):
return f"int + str = {a} + {b}"
@overload
@multiple_dispatch
def add(a: int, b: int):
return a + b
@multiple_dispatch
def add(a, b):
return f"Any + Any = {a} + {b}"
print(add(2, "hello"))
| 18.083333
| 47
| 0.658986
| 72
| 434
| 3.888889
| 0.375
| 0.342857
| 0.271429
| 0.314286
| 0.442857
| 0.360714
| 0.25
| 0.25
| 0
| 0
| 0
| 0.022663
| 0.186636
| 434
| 23
| 48
| 18.869565
| 0.770538
| 0
| 0
| 0.388889
| 0
| 0
| 0.163594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.166667
| 0.5
| 0.055556
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
43c8749a8ff42646c3b9643c7de460258d1664ae
| 68
|
py
|
Python
|
TTBenchmark/check_benchmark.py
|
yuqil725/benchmark_lib
|
f404ff829d7b3a8bb0f6b00689038cf533bba83e
|
[
"MIT"
] | null | null | null |
TTBenchmark/check_benchmark.py
|
yuqil725/benchmark_lib
|
f404ff829d7b3a8bb0f6b00689038cf533bba83e
|
[
"MIT"
] | null | null | null |
TTBenchmark/check_benchmark.py
|
yuqil725/benchmark_lib
|
f404ff829d7b3a8bb0f6b00689038cf533bba83e
|
[
"MIT"
] | null | null | null |
def check_difference():
pass
def update_benchmark():
pass
| 9.714286
| 23
| 0.676471
| 8
| 68
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 68
| 6
| 24
| 11.333333
| 0.846154
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
43d8dcfde4fc817f885eb2d557c4f9603d6da4be
| 86
|
py
|
Python
|
src/FunctionApps/DevOps/tests/test_get_ip.py
|
CDCgov/prime-public-health-data-infrastructure
|
7e4849c3a486a84e94765bf0023b80261c510c57
|
[
"Apache-2.0"
] | 3
|
2022-02-24T18:16:39.000Z
|
2022-03-29T20:21:41.000Z
|
src/FunctionApps/DevOps/tests/test_get_ip.py
|
CDCgov/prime-public-health-data-infrastructure
|
7e4849c3a486a84e94765bf0023b80261c510c57
|
[
"Apache-2.0"
] | 17
|
2022-02-08T17:13:55.000Z
|
2022-03-28T16:49:00.000Z
|
src/FunctionApps/DevOps/tests/test_get_ip.py
|
CDCgov/prime-public-health-data-infrastructure
|
7e4849c3a486a84e94765bf0023b80261c510c57
|
[
"Apache-2.0"
] | 3
|
2022-02-27T23:12:50.000Z
|
2022-03-17T04:51:47.000Z
|
def test_get_ip_placeholder():
"""placeholder so pytest does not fail"""
pass
| 21.5
| 45
| 0.697674
| 12
| 86
| 4.75
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197674
| 86
| 3
| 46
| 28.666667
| 0.826087
| 0.406977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
78efdc29bbe17ba841a42c2ad2e6e9e8b6de242a
| 34
|
py
|
Python
|
tests/functional/test_calculator.py
|
bellanov/calculator
|
a66e68a368a5212247aeff3291c9cb8b508e91be
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_calculator.py
|
bellanov/calculator
|
a66e68a368a5212247aeff3291c9cb8b508e91be
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/test_calculator.py
|
bellanov/calculator
|
a66e68a368a5212247aeff3291c9cb8b508e91be
|
[
"Apache-2.0"
] | 1
|
2021-05-26T16:54:17.000Z
|
2021-05-26T16:54:17.000Z
|
"""TODO: Move the Threads Here"""
| 17
| 33
| 0.647059
| 5
| 34
| 4.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 34
| 1
| 34
| 34
| 0.758621
| 0.794118
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 1
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.