Dataset Viewer
Auto-converted to Parquet
hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
d9c7f3fdaa6dbe4abf7e68c6052896f817807b98
190
py
Python
core/serializers.py
telminov/sonm-cdn-cms
e51107e3baed9e633e54db6cd7f784178f531b4a
[ "MIT" ]
1
2018-08-31T17:40:14.000Z
2018-08-31T17:40:14.000Z
core/serializers.py
telminov/sonm-cdn-cms
e51107e3baed9e633e54db6cd7f784178f531b4a
[ "MIT" ]
null
null
null
core/serializers.py
telminov/sonm-cdn-cms
e51107e3baed9e633e54db6cd7f784178f531b4a
[ "MIT" ]
null
null
null
from rest_framework import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields = '__all__'
19
51
0.731579
20
190
6.7
0.75
0
0
0
0
0
0
0
0
0
0
0
0.215789
190
9
52
21.111111
0.899329
0
0
0
0
0
0.036842
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
d9c7f680a10afbb210d6a7c50f3b0ac7716821e0
190
py
Python
tests/wasp1/AllAnswerSets/aggregates_count_boundvariables_1.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
19
2015-12-03T08:53:45.000Z
2022-03-31T02:09:43.000Z
tests/wasp1/AllAnswerSets/aggregates_count_boundvariables_1.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
80
2017-11-25T07:57:32.000Z
2018-06-10T19:03:30.000Z
tests/wasp1/AllAnswerSets/aggregates_count_boundvariables_1.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
6
2015-01-15T07:51:48.000Z
2020-06-18T14:47:48.000Z
input = """ c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. """ output = """ {a(2), c(2), d(2,2,1), okay(2), p(1)} """
14.615385
44
0.4
50
190
1.52
0.26
0.052632
0.078947
0.105263
0.605263
0.605263
0.368421
0.368421
0.368421
0.368421
0
0.104938
0.147368
190
12
45
15.833333
0.364198
0
0
0.181818
0
0.272727
0.831579
0.242105
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d9d5b48647e38ebb7586e30d71d263a91ce8bc1b
156
py
Python
src/zeep/wsse/__init__.py
bertonha/python-zeep
748f4e028db2ef498bc6dd1e60d3555b7688f08c
[ "MIT" ]
null
null
null
src/zeep/wsse/__init__.py
bertonha/python-zeep
748f4e028db2ef498bc6dd1e60d3555b7688f08c
[ "MIT" ]
null
null
null
src/zeep/wsse/__init__.py
bertonha/python-zeep
748f4e028db2ef498bc6dd1e60d3555b7688f08c
[ "MIT" ]
null
null
null
from .compose import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken # noqa
39
74
0.801282
17
156
7.352941
0.529412
0.128
0
0
0
0
0
0
0
0
0
0
0.147436
156
3
75
52
0.93985
0.089744
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
8a03ced3330b9102f19e53ae0f85a628054986d1
36
py
Python
tools/__init__.py
BranKein/Flask-template
3d8f43b3c44163e855c727de2a0dfe37d3b788f9
[ "MIT" ]
null
null
null
tools/__init__.py
BranKein/Flask-template
3d8f43b3c44163e855c727de2a0dfe37d3b788f9
[ "MIT" ]
null
null
null
tools/__init__.py
BranKein/Flask-template
3d8f43b3c44163e855c727de2a0dfe37d3b788f9
[ "MIT" ]
null
null
null
from . import ip __all__ = ['ip']
7.2
16
0.583333
5
36
3.4
0.8
0
0
0
0
0
0
0
0
0
0
0
0.25
36
4
17
9
0.62963
0
0
0
0
0
0.055556
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
8a06be2dde291c66efbc5f80746f557a0f2cecaa
336
py
Python
experiments/seidel-2d/tmp_files/6745.py
LoopTilingBenchmark/benchmark
52a3d2e70216552a498fd91de02a2fa9cb62122c
[ "BSD-2-Clause" ]
null
null
null
experiments/seidel-2d/tmp_files/6745.py
LoopTilingBenchmark/benchmark
52a3d2e70216552a498fd91de02a2fa9cb62122c
[ "BSD-2-Clause" ]
null
null
null
experiments/seidel-2d/tmp_files/6745.py
LoopTilingBenchmark/benchmark
52a3d2e70216552a498fd91de02a2fa9cb62122c
[ "BSD-2-Clause" ]
null
null
null
from chill import * source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/seidel-2d/kernel.c') destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/seidel-2d/tmp_files/6745.c') procedure('kernel_seidel_2d') loop(0) known(' n > 2 ') tile(0,2,16,2) tile(0,4,16,4)
30.545455
118
0.764881
59
336
4.271186
0.59322
0.095238
0.095238
0.119048
0.357143
0.357143
0.357143
0.357143
0.357143
0.357143
0
0.103448
0.050595
336
10
119
33.6
0.68652
0
0
0
0
0.25
0.669643
0.60119
0
0
0
0
0
1
0
true
0
0.125
0
0.125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
8a2036147565ecfe3e374843c7669120715a456c
93
py
Python
run.py
pran01/AlgoVision
40e85f3c55266f43ee103dfa0852a63af306a8d4
[ "MIT" ]
33
2020-10-05T01:04:55.000Z
2021-06-24T01:52:31.000Z
run.py
learning-zones/AlgoVision
9261e00ecb2540d8bb950d47d670bb6b2c69db0f
[ "MIT" ]
14
2020-10-07T03:15:12.000Z
2021-01-15T11:53:29.000Z
run.py
learning-zones/AlgoVision
9261e00ecb2540d8bb950d47d670bb6b2c69db0f
[ "MIT" ]
9
2020-10-05T07:16:45.000Z
2021-03-01T15:44:31.000Z
from algovision import app if(__name__=="__main__"): app.run(debug=True,host='0.0.0.0')
18.6
38
0.688172
16
93
3.5
0.75
0.107143
0.107143
0
0
0
0
0
0
0
0
0.04878
0.11828
93
4
39
23.25
0.634146
0
0
0
0
0
0.16129
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
8a5de963629a6bc23b3e927dcbf31f83ecc1590d
171
py
Python
indexof.py
gnuchev/homework
4083d44561cc9738d3cd8da99f8ef91b69961b6c
[ "MIT" ]
null
null
null
indexof.py
gnuchev/homework
4083d44561cc9738d3cd8da99f8ef91b69961b6c
[ "MIT" ]
null
null
null
indexof.py
gnuchev/homework
4083d44561cc9738d3cd8da99f8ef91b69961b6c
[ "MIT" ]
null
null
null
def indexof(listofnames, value): if value in listofnames: value_index = listofnames.index(value) return(listofnames, value_index) else: return(-1)
28.5
46
0.684211
20
171
5.75
0.5
0.417391
0.365217
0
0
0
0
0
0
0
0
0.007519
0.222222
171
5
47
34.2
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0
0
0.2
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
8a5eacf969c02364f5e4daefab7f03dd79ff6a0f
447
py
Python
programs/combine/jry2/treedef.py
lsrcz/SyGuS
5aab1b2c324d8a3c20e51f8acb2866190a1431d3
[ "MIT" ]
1
2021-07-11T08:32:32.000Z
2021-07-11T08:32:32.000Z
programs/combine/jry2/treedef.py
lsrcz/SyGuS
5aab1b2c324d8a3c20e51f8acb2866190a1431d3
[ "MIT" ]
null
null
null
programs/combine/jry2/treedef.py
lsrcz/SyGuS
5aab1b2c324d8a3c20e51f8acb2866190a1431d3
[ "MIT" ]
1
2020-12-20T16:08:10.000Z
2020-12-20T16:08:10.000Z
from jry2.semantics import Expr class TreeNode: pass class TreeLeaf(TreeNode): def __init__(self, term): self.term = term def getExpr(self): return self.term class TreeInnerNode(TreeNode): def __init__(self, pred, left, right): self.pred = pred self.left = left self.right = right def getExpr(self): return Expr('ite', self.pred, self.left.getExpr(), self.right.getExpr())
20.318182
80
0.630872
56
447
4.892857
0.357143
0.087591
0.109489
0.138686
0
0
0
0
0
0
0
0.003021
0.259508
447
21
81
21.285714
0.824773
0
0
0.133333
0
0
0.006711
0
0
0
0
0
0
1
0.266667
false
0.066667
0.066667
0.133333
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
8a6266df7a1375925ee79de0d3567238f763ecfa
165
py
Python
xlib/api/win32/oleaut32/oleaut32.py
jkennedyvz/DeepFaceLive
274c20808da089eb7fc0fc0e8abe649379a29ffe
[ "MIT" ]
null
null
null
xlib/api/win32/oleaut32/oleaut32.py
jkennedyvz/DeepFaceLive
274c20808da089eb7fc0fc0e8abe649379a29ffe
[ "MIT" ]
null
null
null
xlib/api/win32/oleaut32/oleaut32.py
jkennedyvz/DeepFaceLive
274c20808da089eb7fc0fc0e8abe649379a29ffe
[ "MIT" ]
null
null
null
from ctypes import POINTER, Structure from ..wintypes import VARIANT, dll_import @dll_import('OleAut32') def VariantInit( pvarg : POINTER(VARIANT) ) -> None: ...
20.625
56
0.739394
20
165
6
0.65
0.15
0
0
0
0
0
0
0
0
0
0.014085
0.139394
165
7
57
23.571429
0.830986
0
0
0
0
0
0.048485
0
0
0
0
0
0
1
0.25
false
0
0.75
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
4
8a7310d8abb463c70846c800ef296e8c1423ac2b
186
py
Python
src/events/cell_pressed.py
ArcosJuan/Get-out-of-my-fucking-maze
ca2cfeaaeecb6c6f583ad647d020f25176170805
[ "MIT" ]
2
2021-09-09T14:03:40.000Z
2021-11-03T03:35:55.000Z
src/events/cell_pressed.py
ArcosJuan/Get-out-of-my-fucking-maze
ca2cfeaaeecb6c6f583ad647d020f25176170805
[ "MIT" ]
null
null
null
src/events/cell_pressed.py
ArcosJuan/Get-out-of-my-fucking-maze
ca2cfeaaeecb6c6f583ad647d020f25176170805
[ "MIT" ]
null
null
null
from src.events import Event class CellPressed(Event): def __init__(self, position): self.position = position def get_position(self): return self.position
18.6
33
0.672043
22
186
5.454545
0.590909
0.3
0
0
0
0
0
0
0
0
0
0
0.252688
186
10
34
18.6
0.863309
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0.166667
0.833333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
8ac046daf66291ca73b420ce81a183abc787e157
51
py
Python
neptune/generated/swagger_client/path_constants.py
jiji-online/neptune-cli
50cf680a80d141497f9331ab7cdaee49fcb90b0c
[ "Apache-2.0" ]
null
null
null
neptune/generated/swagger_client/path_constants.py
jiji-online/neptune-cli
50cf680a80d141497f9331ab7cdaee49fcb90b0c
[ "Apache-2.0" ]
null
null
null
neptune/generated/swagger_client/path_constants.py
jiji-online/neptune-cli
50cf680a80d141497f9331ab7cdaee49fcb90b0c
[ "Apache-2.0" ]
null
null
null
REST_PATH = u"" WS_PATH = u"/api/notifications/v1"
17
34
0.705882
9
51
3.777778
0.777778
0.294118
0
0
0
0
0
0
0
0
0
0.022222
0.117647
51
2
35
25.5
0.733333
0
0
0
0
0
0.411765
0.411765
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
8ac2e2407dd1965a468039faf082dce81ec81f6c
109
py
Python
realfastapi/routes/endpoints/default.py
wborbajr/RealFastAPI
d97ca994c4c164387632cda814e80c026435a9f7
[ "MIT" ]
null
null
null
realfastapi/routes/endpoints/default.py
wborbajr/RealFastAPI
d97ca994c4c164387632cda814e80c026435a9f7
[ "MIT" ]
null
null
null
realfastapi/routes/endpoints/default.py
wborbajr/RealFastAPI
d97ca994c4c164387632cda814e80c026435a9f7
[ "MIT" ]
null
null
null
from fastapi import APIRouter router = APIRouter() @router.get("/") def working(): return {"Working"}
12.111111
29
0.669725
12
109
6.083333
0.75
0.410959
0
0
0
0
0
0
0
0
0
0
0.174312
109
8
30
13.625
0.811111
0
0
0
0
0
0.073395
0
0
0
0
0
0
1
0.2
false
0
0.2
0.2
0.6
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
4
8ac9b0e158167d7f3345bc07a8dd57de92905440
66
py
Python
scripts/get_file_name_as_variable.py
amin-henteti/airflow-dags
eb1e9a1a77d3c868e031cbe7420eae952ce5e767
[ "Apache-2.0" ]
null
null
null
scripts/get_file_name_as_variable.py
amin-henteti/airflow-dags
eb1e9a1a77d3c868e031cbe7420eae952ce5e767
[ "Apache-2.0" ]
null
null
null
scripts/get_file_name_as_variable.py
amin-henteti/airflow-dags
eb1e9a1a77d3c868e031cbe7420eae952ce5e767
[ "Apache-2.0" ]
null
null
null
import inspect def foo(): print(inspect.stack()[0][3]) foo()
13.2
31
0.621212
10
66
4.1
0.8
0
0
0
0
0
0
0
0
0
0
0.036364
0.166667
66
5
32
13.2
0.709091
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.25
0
0.5
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
76d842d33f2db656494e8fb701c74c89d920202e
182
py
Python
tests/test_command.py
vandana-11/cognito
4f92229511b265578def8e34d30575292070e584
[ "BSD-3-Clause" ]
null
null
null
tests/test_command.py
vandana-11/cognito
4f92229511b265578def8e34d30575292070e584
[ "BSD-3-Clause" ]
null
null
null
tests/test_command.py
vandana-11/cognito
4f92229511b265578def8e34d30575292070e584
[ "BSD-3-Clause" ]
null
null
null
from cognito.check import Check from cognito.table import Table import os import pytest import pandas as pd import numpy as np from os import path from sklearn import preprocessing
20.222222
33
0.82967
30
182
5.033333
0.5
0.145695
0
0
0
0
0
0
0
0
0
0
0.159341
182
8
34
22.75
0.986928
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
0a15bb92f32c4317216e7f1662783bb4852671eb
105
py
Python
school/admin/__init__.py
leyyin/university-SE
7cc3625bda787d2e79ab22f30d6f6e732ca9abb3
[ "MIT" ]
3
2015-03-12T15:50:58.000Z
2015-05-04T12:55:19.000Z
school/admin/__init__.py
leyyin/university-SE
7cc3625bda787d2e79ab22f30d6f6e732ca9abb3
[ "MIT" ]
2
2015-05-01T18:24:04.000Z
2015-05-15T15:58:47.000Z
school/admin/__init__.py
leyyin/university-SE
7cc3625bda787d2e79ab22f30d6f6e732ca9abb3
[ "MIT" ]
null
null
null
# contains any CRUD not related to strictly editing users info and courses info from .views import admin
35
79
0.809524
17
105
5
0.941176
0
0
0
0
0
0
0
0
0
0
0
0.171429
105
2
80
52.5
0.977011
0.733333
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
0a4c68a0832d4cee3f0250c6c84f885007935c0b
194
py
Python
5kyu/(5 kyu) Count IP Addresses/(5 kyu) Count IP Addresses.py
e1r0nd/codewars
9b05e32a26ee5f36a4b3f1e76a71e0c79b3c865b
[ "MIT" ]
49
2018-04-30T06:42:45.000Z
2021-07-22T16:39:02.000Z
5kyu/(5 kyu) Count IP Addresses/(5 kyu) Count IP Addresses.py
nis24jit/codewars-3
1a0d910af12f8af6e1070c31a30ba3c785a9b857
[ "MIT" ]
1
2020-08-31T02:36:53.000Z
2020-08-31T10:14:00.000Z
5kyu/(5 kyu) Count IP Addresses/(5 kyu) Count IP Addresses.py
nis24jit/codewars-3
1a0d910af12f8af6e1070c31a30ba3c785a9b857
[ "MIT" ]
25
2018-04-02T20:57:58.000Z
2021-05-28T15:24:51.000Z
def ips_between(start, end): calc = lambda n, m: (int(end.split(".")[n]) - int(start.split(".")[n])) * m return calc(0, 256 * 256 * 256) + calc(1, 256 * 256) + calc(2, 256) + calc(3, 1)
48.5
84
0.546392
34
194
3.088235
0.5
0.171429
0.190476
0
0
0
0
0
0
0
0
0.148387
0.201031
194
3
85
64.666667
0.529032
0
0
0
0
0
0.010309
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
0a4d54d89c32a47c57e2c8a928a39b69e030c881
35
py
Python
notebooks/_solutions/pandas_02_basic_operations28.py
rprops/Python_DS-WS
b2fc449a74be0c82863e5fcf1ddbe7d64976d530
[ "BSD-3-Clause" ]
65
2017-03-21T09:15:40.000Z
2022-02-01T23:43:08.000Z
notebooks/_solutions/pandas_02_basic_operations28.py
rprops/Python_DS-WS
b2fc449a74be0c82863e5fcf1ddbe7d64976d530
[ "BSD-3-Clause" ]
100
2016-12-15T03:44:06.000Z
2022-03-07T08:14:07.000Z
notebooks/_solutions/pandas_02_basic_operations28.py
rprops/Python_DS-WS
b2fc449a74be0c82863e5fcf1ddbe7d64976d530
[ "BSD-3-Clause" ]
52
2016-12-19T07:48:52.000Z
2022-02-19T17:53:48.000Z
df['Age'].hist() #bins=30, log=True
35
35
0.628571
7
35
3.142857
1
0
0
0
0
0
0
0
0
0
0
0.060606
0.057143
35
1
35
35
0.606061
0.485714
0
0
0
0
0.166667
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
0a616ab1db1fb980b42809561222fc9a899b77c4
153
py
Python
mikan/exceptions.py
dzzhvks94vd2/mikan
569b331cff02a089721fd6d0a430d5c2812b4934
[ "MIT" ]
1
2021-12-31T23:56:21.000Z
2021-12-31T23:56:21.000Z
mikan/exceptions.py
dzzhvks94vd2/mikan
569b331cff02a089721fd6d0a430d5c2812b4934
[ "MIT" ]
null
null
null
mikan/exceptions.py
dzzhvks94vd2/mikan
569b331cff02a089721fd6d0a430d5c2812b4934
[ "MIT" ]
null
null
null
class MikanException(Exception): """Generic Mikan exception""" class ConversionError(MikanException, ValueError): """Cannot convert a string"""
25.5
50
0.738562
14
153
8.071429
0.785714
0
0
0
0
0
0
0
0
0
0
0
0.137255
153
5
51
30.6
0.856061
0.30719
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
6a5b876bee110f96f947af456cbf93cb78d5e1bc
94
py
Python
nflfastpy/errors.py
hchaozhe/nflfastpy
11e4894d7fee4ff8baac2c08b000a39308b41143
[ "MIT" ]
47
2020-10-24T10:10:51.000Z
2022-03-07T19:48:05.000Z
nflfastpy/errors.py
jbf302/nflfastpy
c1e2365966e0f0f8efeb651be804d84caba57807
[ "MIT" ]
3
2021-05-03T11:58:00.000Z
2021-11-14T16:17:30.000Z
nflfastpy/errors.py
jbf302/nflfastpy
c1e2365966e0f0f8efeb651be804d84caba57807
[ "MIT" ]
7
2020-12-14T15:03:12.000Z
2021-11-17T23:41:37.000Z
""" Custom exceptions for nflfastpy module """ class SeasonNotFoundError(Exception): pass
15.666667
38
0.755319
9
94
7.888889
1
0
0
0
0
0
0
0
0
0
0
0
0.148936
94
6
39
15.666667
0.8875
0.404255
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
6a7bee943837f03f68168bdd6b1277bb1e2654a4
268
py
Python
db.py
RunnerPro/RunnerProApi
2e0aba17cba2a019b6d102bc4eac2fd60f164156
[ "MIT" ]
null
null
null
db.py
RunnerPro/RunnerProApi
2e0aba17cba2a019b6d102bc4eac2fd60f164156
[ "MIT" ]
null
null
null
db.py
RunnerPro/RunnerProApi
2e0aba17cba2a019b6d102bc4eac2fd60f164156
[ "MIT" ]
null
null
null
from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session from sqlalchemy.orm import sessionmaker from settings import DB_URI Session = sessionmaker(autocommit=False, autoflush=False, bind=create_engine(DB_URI)) session = scoped_session(Session)
33.5
85
0.850746
36
268
6.166667
0.444444
0.189189
0.153153
0.207207
0
0
0
0
0
0
0
0
0.093284
268
7
86
38.285714
0.91358
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
6a8e0d766c7cdfdc409946fd3a6196d6981baf1d
55
py
Python
python/testData/resolve/TryExceptElse.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/resolve/TryExceptElse.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/resolve/TryExceptElse.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
try: name = "" except: pass else: print na<ref>me
9.166667
17
0.6
9
55
3.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.254545
55
6
17
9.166667
0.804878
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0.166667
0
null
null
0.166667
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
4
6a92b244776a352d3c8cb2387f8e203d0ce669c3
22
py
Python
avatar/__init__.py
yogeshkheri/geonode-avatar
293474f814117ae680278223c8cdf8d59c67862d
[ "BSD-3-Clause" ]
3
2021-10-17T20:37:40.000Z
2022-03-17T10:29:14.000Z
avatar/__init__.py
yogeshkheri/geonode-avatar
293474f814117ae680278223c8cdf8d59c67862d
[ "BSD-3-Clause" ]
4
2021-09-02T13:26:11.000Z
2022-03-16T12:26:36.000Z
avatar/__init__.py
yogeshkheri/geonode-avatar
293474f814117ae680278223c8cdf8d59c67862d
[ "BSD-3-Clause" ]
null
null
null
__version__ = '5.0.2'
11
21
0.636364
4
22
2.5
1
0
0
0
0
0
0
0
0
0
0
0.157895
0.136364
22
1
22
22
0.368421
0
0
0
0
0
0.227273
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6aa2b3de5b891e225cac65fc5b3ebe31165e5ef6
63
py
Python
svd/core/exc.py
epicosy/svd
baa91f57ee5bd51b0140d9d0b1b97ce39f18acc4
[ "MIT" ]
null
null
null
svd/core/exc.py
epicosy/svd
baa91f57ee5bd51b0140d9d0b1b97ce39f18acc4
[ "MIT" ]
null
null
null
svd/core/exc.py
epicosy/svd
baa91f57ee5bd51b0140d9d0b1b97ce39f18acc4
[ "MIT" ]
null
null
null
class SVDError(Exception): """Generic errors.""" pass
12.6
26
0.619048
6
63
6.5
1
0
0
0
0
0
0
0
0
0
0
0
0.222222
63
4
27
15.75
0.795918
0.238095
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
6aa4b1f3d6675f767aaa7329c04a4c62bcde0e63
232
py
Python
v1/status_updates/urls.py
DucPhamTV/Bank
4905ec7d63ef4daafe2119bf6b32928d4db2d4f2
[ "MIT" ]
94
2020-07-12T23:08:47.000Z
2022-03-05T14:00:01.000Z
v1/status_updates/urls.py
DucPhamTV/Bank
4905ec7d63ef4daafe2119bf6b32928d4db2d4f2
[ "MIT" ]
84
2020-07-13T23:30:50.000Z
2022-03-15T15:47:46.000Z
v1/status_updates/urls.py
DucPhamTV/Bank
4905ec7d63ef4daafe2119bf6b32928d4db2d4f2
[ "MIT" ]
63
2020-07-13T02:46:51.000Z
2021-11-26T09:29:29.000Z
from rest_framework.routers import SimpleRouter from .views.upgrade_notice import UpgradeNoticeViewSet router = SimpleRouter(trailing_slash=False) router.register('upgrade_notice', UpgradeNoticeViewSet, basename='upgrade_notice')
33.142857
82
0.857759
25
232
7.76
0.64
0.201031
0
0
0
0
0
0
0
0
0
0
0.068966
232
6
83
38.666667
0.898148
0
0
0
0
0
0.12069
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
6aae27568c85842fa9dbea1ace5c81d9190ab20e
12,603
py
Python
glance/tests/functional/test_api.py
arvindn05/glance
055d15a6ba5d132f649156eac0fc91f4cd2813e4
[ "Apache-2.0" ]
null
null
null
glance/tests/functional/test_api.py
arvindn05/glance
055d15a6ba5d132f649156eac0fc91f4cd2813e4
[ "Apache-2.0" ]
null
null
null
glance/tests/functional/test_api.py
arvindn05/glance
055d15a6ba5d132f649156eac0fc91f4cd2813e4
[ "Apache-2.0" ]
null
null
null
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Version-independent api tests""" import httplib2 from oslo_serialization import jsonutils from six.moves import http_client from glance.tests import functional # TODO(rosmaita): all the EXPERIMENTAL stuff in this file can be ripped out # when v2.6 becomes CURRENT in Queens def _generate_v1_versions(url): v1_versions = {'versions': [ { 'id': 'v1.1', 'status': 'DEPRECATED', 'links': [{'rel': 'self', 'href': url % '1'}], }, { 'id': 'v1.0', 'status': 'DEPRECATED', 'links': [{'rel': 'self', 'href': url % '1'}], }, ]} return v1_versions def _generate_v2_versions(url): version_list = [] version_list.extend([ { 'id': 'v2.6', 'status': 'CURRENT', 'links': [{'rel': 'self', 'href': url % '2'}], }, { 'id': 'v2.5', 'status': 'SUPPORTED', 'links': [{'rel': 'self', 'href': url % '2'}], }, { 'id': 'v2.4', 'status': 'SUPPORTED', 'links': [{'rel': 'self', 'href': url % '2'}], }, { 'id': 'v2.3', 'status': 'SUPPORTED', 'links': [{'rel': 'self', 'href': url % '2'}], }, { 'id': 'v2.2', 'status': 'SUPPORTED', 'links': [{'rel': 'self', 'href': url % '2'}], }, { 'id': 'v2.1', 'status': 'SUPPORTED', 'links': [{'rel': 'self', 'href': url % '2'}], }, { 'id': 'v2.0', 'status': 'SUPPORTED', 'links': [{'rel': 'self', 'href': url % '2'}], } ]) v2_versions = {'versions': version_list} return v2_versions def _generate_all_versions(url): v1 = _generate_v1_versions(url) v2 = _generate_v2_versions(url) all_versions = {'versions': v2['versions'] + v1['versions']} return all_versions class TestApiVersions(functional.FunctionalTest): def test_version_configurations(self): """Test that versioning is handled properly through all channels""" # v1 and v2 api enabled self.start_servers(**self.__dict__.copy()) url = 'http://127.0.0.1:%d/v%%s/' % self.api_port versions = _generate_all_versions(url) # Verify version choices returned. path = 'http://%s:%d' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(versions, content) def test_v2_api_configuration(self): self.api_server.enable_v1_api = False self.api_server.enable_v2_api = True self.start_servers(**self.__dict__.copy()) url = 'http://127.0.0.1:%d/v%%s/' % self.api_port versions = _generate_v2_versions(url) # Verify version choices returned. path = 'http://%s:%d' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(versions, content) def test_v1_api_configuration(self): self.api_server.enable_v1_api = True self.api_server.enable_v2_api = False self.start_servers(**self.__dict__.copy()) url = 'http://127.0.0.1:%d/v%%s/' % self.api_port versions = _generate_v1_versions(url) # Verify version choices returned. path = 'http://%s:%d' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(versions, content) class TestApiPaths(functional.FunctionalTest): def setUp(self): super(TestApiPaths, self).setUp() self.start_servers(**self.__dict__.copy()) url = 'http://127.0.0.1:%d/v%%s/' % self.api_port self.versions = _generate_all_versions(url) images = {'images': []} self.images_json = jsonutils.dumps(images) def test_get_root_path(self): """Assert GET / with `no Accept:` header. Verify version choices returned. Bug lp:803260 no Accept header causes a 500 in glance-api """ path = 'http://%s:%d' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_images_path(self): """Assert GET /images with `no Accept:` header. Verify version choices returned. """ path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_v1_images_path(self): """GET /v1/images with `no Accept:` header. Verify empty images list returned. """ path = 'http://%s:%d/v1/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(http_client.OK, response.status) def test_get_root_path_with_unknown_header(self): """Assert GET / with Accept: unknown header Verify version choices returned. Verify message in API log about unknown accept header. """ path = 'http://%s:%d/' % ('127.0.0.1', self.api_port) http = httplib2.Http() headers = {'Accept': 'unknown'} response, content_json = http.request(path, 'GET', headers=headers) self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_root_path_with_openstack_header(self): """Assert GET / with an Accept: application/vnd.openstack.images-v1 Verify empty image list returned """ path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() headers = {'Accept': 'application/vnd.openstack.images-v1'} response, content = http.request(path, 'GET', headers=headers) self.assertEqual(http_client.OK, response.status) self.assertEqual(self.images_json, content.decode()) def test_get_images_path_with_openstack_header(self): """Assert GET /images with a `Accept: application/vnd.openstack.compute-v1` header. Verify version choices returned. Verify message in API log about unknown accept header. """ path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() headers = {'Accept': 'application/vnd.openstack.compute-v1'} response, content_json = http.request(path, 'GET', headers=headers) self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_v10_images_path(self): """Assert GET /v1.0/images with no Accept: header Verify version choices returned """ path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) def test_get_v1a_images_path(self): """Assert GET /v1.a/images with no Accept: header Verify version choices returned """ path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) def test_get_va1_images_path(self): """Assert GET /va.1/images with no Accept: header Verify version choices returned """ path = 'http://%s:%d/va.1/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_versions_path(self): """Assert GET /versions with no Accept: header Verify version choices returned """ path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.OK, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_versions_path_with_openstack_header(self): """Assert GET /versions with the `Accept: application/vnd.openstack.images-v1` header. Verify version choices returned. """ path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port) http = httplib2.Http() headers = {'Accept': 'application/vnd.openstack.images-v1'} response, content_json = http.request(path, 'GET', headers=headers) self.assertEqual(http_client.OK, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_v1_versions_path(self): """Assert GET /v1/versions with `no Accept:` header Verify 404 returned """ path = 'http://%s:%d/v1/versions' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content = http.request(path, 'GET') self.assertEqual(http_client.NOT_FOUND, response.status) def test_get_versions_choices(self): """Verify version choices returned""" path = 'http://%s:%d/v10' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_images_path_with_openstack_v2_header(self): """Assert GET /images with a `Accept: application/vnd.openstack.compute-v2` header. Verify version choices returned. Verify message in API log about unknown version in accept header. """ path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() headers = {'Accept': 'application/vnd.openstack.images-v10'} response, content_json = http.request(path, 'GET', headers=headers) self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content) def test_get_v12_images_path(self): """Assert GET /v1.2/images with `no Accept:` header Verify version choices returned """ path = 'http://%s:%d/v1.2/images' % ('127.0.0.1', self.api_port) http = httplib2.Http() response, content_json = http.request(path, 'GET') self.assertEqual(http_client.MULTIPLE_CHOICES, response.status) content = jsonutils.loads(content_json.decode()) self.assertEqual(self.versions, content)
39.261682
78
0.614933
1,536
12,603
4.897786
0.120443
0.063804
0.014622
0.017546
0.793301
0.768045
0.721521
0.706367
0.677788
0.666091
0
0.027053
0.246211
12,603
320
79
39.384375
0.764842
0.190669
0
0.5625
0
0
0.124477
0.014488
0
0
0
0.003125
0.153846
1
0.105769
false
0
0.019231
0
0.149038
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0a82a3ff8df3d7d05c880b80e09b0d2ae4679de0
16,834
py
Python
ruleex/hypinv/model.py
rohancode/ruleex_modified
ec974e7811fafc0c06d4d2c53b4e2898dd6b7305
[ "Apache-2.0" ]
18
2019-09-19T09:50:52.000Z
2022-03-20T13:59:20.000Z
ruleex/hypinv/model.py
rohancode/ruleex_modified
ec974e7811fafc0c06d4d2c53b4e2898dd6b7305
[ "Apache-2.0" ]
3
2020-10-31T05:15:32.000Z
2022-02-10T00:34:05.000Z
ruleex/hypinv/model.py
rohancode/ruleex_modified
ec974e7811fafc0c06d4d2c53b4e2898dd6b7305
[ "Apache-2.0" ]
7
2020-12-06T20:55:50.000Z
2021-12-11T18:14:51.000Z
from gtrain import Model import numpy as np import tensorflow as tf class NetForHypinv(Model): """ Implementaion of the crutial function for the HypINV algorithm. Warning: Do not use this class but implement its subclass, for example see FCNetForHypinv """ def __init__(self, weights): self.eval_session = None self.grad_session = None self.initial_x = None self.center = None self.weights = weights self.out_for_eval = None #(going to be filled in build_for_eval method) self.boundary_out_for_eval = None self.trained_x = None self.training_class_index = None self.x = None # tf variable for inversion (going to be filled in build method) self.x_for_eval = None self.out = None self.boundary_out = None # list of tf tensorf for each class of softmax class vs others output self.loss = None self.boundary_loss = None self.t = None #target self.boundary_t = None self.x1 = None # this attribute is used of purposes of modified loss function def __del__(self): # close arr sessions if self.eval_session: self.eval_session.close() if self.grad_session: self.grad_session.close() def set_initial_x(self, initial_x): # sets starting point for the search of the closest point self.initial_x = initial_x def set_center(self, center): # sets center point self.center = center / np.linalg.norm(center) def set_x1(self, x1): # sets x1 to which we want to found the cosest point x0 self.x1 = x1 def has_modified_loss(self): pass # if uses modified loss then it returns true def set_initial_x_in_session(self, x, session=None): # sets initial x in certain session if session is None: self.set_initial_x(x) else: pass # overide this method def eval(self, x): if len(x.shape) == 1: x = x.reshape((1,len(x))) if not self.eval_session: self.eval_session = tf.Session() with self.eval_session.as_default(): self.build_for_eval() self.eval_session.run(tf.global_variables_initializer()) return self.eval_session.run(self.out_for_eval, {self.x_for_eval: x}) def boundary_eval(self, x, class_index): # evaluates binary classificaitons class_index and other classes if not self.eval_session: self.eval_session = tf.Session() with self.eval_session.as_default(): self.build_for_eval() self.eval_session.run(tf.global_variables_initializer()) return self.eval_session.run(self.boundary_out_for_eval[class_index], {self.x_for_eval: x}) def get_boundary_gradient(self, x, class_index): # computes gradient of the boundary for specified class_index if not self.grad_session: self.grad_session = tf.Session() with self.grad_session.as_default(): self.build_for_eval() self.grad = list() for i in range(len(self.weights[0][-1][0])): self.grad.append(tf.gradients(self.boundary_out_for_eval[i], [self.x_for_eval])[0]) self.grad_x = self.x_for_eval return self.grad_session.run(self.grad[class_index], {self.grad_x: x}) def build_for_eval(self): # build model for evaluation pass #override this method (fill self.out_for_eval) def train_ended(self, session): self.trained_x = session.run(self.x) def build(self): # build model for training pass #override this method (fill self.x, self.out) def set_train_class(self, class_index): # sets class of the x1 self.training_class_index = class_index # overided methods from gtrain.Model def get_loss(self): if self.training_class_index is None: return self.loss else: return self.boundary_loss[self.training_class_index] def get_hits(self): return self.get_loss() def get_count(self): return self.get_loss() def get_train_summaries(self): return [] def get_dev_summaries(self): return [] def get_placeholders(self): if self.training_class_index is None: return [self.t] else: return [self.boundary_t] #________________________________________EXAMPLES_OF_NetForHypinv_CLASS_____________________________________________ class FCNetForHypinv(NetForHypinv): """ Implementation of multi layer perceptron to by used in HypINV rule extraction algorithm """ def __init__(self, weights, function=tf.sigmoid, use_modified_loss=False, mu = 0.01): """ :param weights: saved as [list of weights for layers][0 weight, 1 bias] :param function: tf function for propagation. For example tf.nn.sigmoid, tf.atan :param use_modified_loss: weather the modified loss should be used :param mu: factor of the penalty terms that specified the distance between x0 and x1 and the distance x1 from the boundary """ super(FCNetForHypinv, self).__init__(weights) self.function = function self.layer_sizes = [len(self.weights[0][0])] for bias in weights[1]: self.layer_sizes.append(len(bias)) self.num_classes = self.layer_sizes[-1] self.initial_x = np.zeros([1, self.layer_sizes[0]]) self.use_modified_loss = use_modified_loss self.mu = mu def build(self): with tf.name_scope("Input"): if self.center is not None: self.point_weights = tf.Variable(self.center.reshape((1, len(self.center))), dtype=tf.float64, trainable=False, name="Boundary_point") init_factor = self.center init_factor[init_factor!=0] = self.initial_x[init_factor!=0] / self.center[init_factor!=0] self.factor = tf.Variable(init_factor.reshape((1, len(self.center))), dtype=tf.float64, name="factor") else: self.point_weights = tf.Variable(self.initial_x.reshape((1, len(self.initial_x))), dtype=tf.float64, trainable=False, name="Boundary_point") self.factor = tf.Variable(np.ones((1, len(self.center))), dtype=tf.float64, name="factor") self.x = self.point_weights * self.factor with tf.name_scope("Target"): if self.use_modified_loss: x1_constant = tf.constant(self.x1.reshape((1, len(self.x1))), dtype=tf.float64) self.t = tf.placeholder(tf.float64, shape=[None, self.num_classes], name="Target_output") self.boundary_t = tf.placeholder(tf.float64, shape=[None, 2], name="Target_boundary_output") with tf.name_scope("FC_net"): flowing_x = self.x for i, _ in enumerate(self.weights[0]): with tf.name_scope("layer_{}".format(i)): W = tf.constant(self.weights[0][i], name="Weight_{}".format(i), dtype=tf.float64) b = tf.constant(self.weights[1][i], name="Bias_{}".format(i), dtype=tf.float64) flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b)) y = flowing_x self.out = tf.nn.softmax(y) with tf.name_scope("Binary_class_output"): self.boundary_out = list() for i in range(self.num_classes): mask = True+np.zeros(self.num_classes, dtype=np.bool) mask[i] = False x0 = self.out[:,i] x1 = tf.reduce_max(tf.boolean_mask(self.out, mask, axis=1), axis=1) s = x0+x1 out = tf.stack([x0/s, x1/s], axis=1) self.boundary_out.append(out) with tf.name_scope("Loss_functions"): self.loss = tf.reduce_mean( tf.nn.l2_loss(self.out-self.t), name="loss") with tf.name_scope("Binary_class_loss"): self.boundary_loss = list() if self.use_modified_loss: for i in range(self.num_classes): self.boundary_loss.append( tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i]-self.boundary_t)) + self.mu * tf.reduce_mean(tf.nn.l2_loss(self.x - x1_constant)) ) else: for i in range(self.num_classes): self.boundary_loss.append( tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i] - self.boundary_t)) ) def set_initial_x_in_session(self, x, session=None): if session is None: self.set_initial_x(x) else: if self.center is None: session.run([ self.point_weights.assign(x.reshape((1, len(x)))), self.factor.assign(np.ones((1, len(x)))) ]) else: init_factor = self.center init_factor[init_factor!=0] = x[init_factor!=0] / self.center[init_factor!=0] session.run(self.factor.assign(init_factor.reshape((1,len(init_factor))))) def build_for_eval(self): with tf.name_scope("eInput"): self.x_for_eval = tf.placeholder(tf.float32, shape=[None, len(self.weights[0][0])])#tf.Variable(tf.constant(self.initial_x), name="Boundary_point") with tf.name_scope("eFC_net"): flowing_x = self.x_for_eval for i, _ in enumerate(self.weights[0]): W = tf.constant(self.weights[0][i], name="eWeight_{}".format(i)) b = tf.constant(self.weights[1][i], name="eBias_{}".format(i)) flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b), name="elayer_{}".format(i)) y = flowing_x self.out_for_eval = tf.nn.softmax(y) with tf.name_scope("Binary_class_output"): self.boundary_out_for_eval = list() for i in range(self.num_classes): mask = True+np.zeros(self.num_classes, dtype=np.bool) mask[i] = False x0 = self.out_for_eval[:, i] x1 = tf.reduce_max(tf.boolean_mask(self.out_for_eval, mask, axis=1), axis=1) s = x0+x1 out = tf.stack([x0/s, x1/s], axis=1) self.boundary_out_for_eval.append(out) def has_modified_loss(self): return self.use_modified_loss def name(self): return "Hypinv_FC_net_{}".format("-".join([str(ls) for ls in self.layer_sizes])) class FCNetForHypinvBinary(FCNetForHypinv): """ Implementation of multi layer perceptron to by used in HypINV rule extraction algorithm The task is simplified to the binary classificaiton base_class_index against the other classes """ def __init__(self, weights, base_class_index, function=tf.sigmoid, use_modified_loss=False, mu = 0.01): """ :param weights: saved as [list of weights for layers][0 weight, 1 bias] :param base_class_index: an index of the class which is used as the base class :param function: tf function for propagation. For example tf.nn.sigmoid, tf.atan :param use_modified_loss: weather the modified loss should be used :param mu: factor of the penalty terms that specified the distance between x0 and x1 and the distance x1 from the boundary """ super(FCNetForHypinvBinary, self).__init__(weights) self.base_class_index = base_class_index self.function = function self.layer_sizes = [len(self.weights[0][0])] for bias in weights[1]: self.layer_sizes.append(len(bias)) self.num_classes = self.layer_sizes[-1] self.initial_x = np.zeros([1, self.layer_sizes[0]]) self.use_modified_loss = use_modified_loss self.mu = mu def build(self): with tf.name_scope("Input"): self.init_point = tf.Variable(self.initial_x.reshape((1, len(self.initial_x))), dtype=tf.float64, trainable=False, name="Boundary_point") self.factor = tf.Variable(np.ones((1, len(self.initial_x))), dtype=tf.float64, name="factor") self.x = self.init_point * self.factor with tf.name_scope("Target"): if self.use_modified_loss: x1_constant = tf.constant(self.x1.reshape((1, len(self.x1))), dtype=tf.float64) self.t = tf.placeholder(tf.float64, shape=[None, 2], name="Target_output") self.boundary_t = tf.placeholder(tf.float64, shape=[None, 2], name="Target_boundary_output") with tf.name_scope("FC_net"): flowing_x = self.x for i, _ in enumerate(self.weights[0]): with tf.name_scope("layer_{}".format(i)): W = tf.constant(self.weights[0][i], name="Weight_{}".format(i), dtype=tf.float64) b = tf.constant(self.weights[1][i], name="Bias_{}".format(i), dtype=tf.float64) flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b)) y = flowing_x full_out = tf.nn.softmax(y) with tf.name_scope("Binary_class_output"): self.boundary_out = list() mask = True+np.zeros(self.num_classes, dtype=np.bool) mask[self.base_class_index] = False x0 = full_out[:,self.base_class_index] x1 = tf.reduce_max(tf.boolean_mask(full_out, mask, axis=1), axis=1) s = x0+x1 self.out = tf.stack([x0/s, x1/s], axis=1) self.boundary_out.append(self.out) self.boundary_out.append(tf.stack([x1/s, x0/s], axis=1)) with tf.name_scope("Loss_functions"): self.loss = tf.reduce_mean( tf.nn.l2_loss(self.out-self.t), name="loss") with tf.name_scope("Binary_class_loss"): self.boundary_loss = list() if self.use_modified_loss: for i in range(2): self.boundary_loss.append( tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i]-self.boundary_t)) + self.mu * tf.reduce_mean(tf.nn.l2_loss(self.x - x1_constant)) ) else: for i in range(2): self.boundary_loss.append( tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i] - self.boundary_t)) ) def build_for_eval(self): with tf.name_scope("eInput"): self.x_for_eval = tf.placeholder(tf.float32, shape=[None, len(self.weights[0][0])])#tf.Variable(tf.constant(self.initial_x), name="Boundary_point") with tf.name_scope("eFC_net"): flowing_x = self.x_for_eval for i, _ in enumerate(self.weights[0]): W = tf.constant(self.weights[0][i], name="eWeight_{}".format(i)) b = tf.constant(self.weights[1][i], name="eBias_{}".format(i)) flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b), name="elayer_{}".format(i)) y = flowing_x full_out = tf.nn.softmax(y) with tf.name_scope("Binary_class_output"): self.boundary_out_for_eval = list() mask = True+np.zeros(self.num_classes, dtype=np.bool) mask[self.base_class_index] = False x0 = full_out[:, self.base_class_index] x1 = tf.reduce_max(tf.boolean_mask(full_out, mask, axis=1), axis=1) s = x0+x1 self.out_for_eval = tf.stack([x0/s, x1/s], axis=1) self.boundary_out_for_eval.append(self.out_for_eval) self.boundary_out_for_eval.append(tf.stack([x1/s, x0/s], axis=1)) def get_boundary_gradient(self, x, class_index): if not self.grad_session: self.grad_session = tf.Session() with self.grad_session.as_default(): self.build_for_eval() self.grad = list() for i in range(2): self.grad.append(tf.gradients(self.boundary_out_for_eval[i], [self.x_for_eval])[0]) self.grad_x = self.x_for_eval return self.grad_session.run(self.grad[class_index], {self.grad_x: x}) def has_modified_loss(self): return self.use_modified_loss def name(self): return "Hypinv_FC_net_{}".format("-".join([str(ls) for ls in self.layer_sizes]))
45.010695
159
0.594214
2,283
16,834
4.142357
0.096364
0.026647
0.021148
0.031723
0.769271
0.744528
0.716824
0.711113
0.69134
0.663424
0
0.014812
0.294167
16,834
373
160
45.131367
0.781097
0.13342
0
0.662116
0
0
0.032796
0.003057
0
0
0
0
0
1
0.109215
false
0.013652
0.010239
0.027304
0.1843
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0a9876a51a89bf0aa93f351a61986d7fa1facb0f
211
py
Python
tests/asp/weakConstraints/testcase13.bug.weakconstraints.gringo.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
19
2015-12-03T08:53:45.000Z
2022-03-31T02:09:43.000Z
tests/asp/weakConstraints/testcase13.bug.weakconstraints.gringo.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
80
2017-11-25T07:57:32.000Z
2018-06-10T19:03:30.000Z
tests/asp/weakConstraints/testcase13.bug.weakconstraints.gringo.test.py
bernardocuteri/wasp
05c8f961776dbdbf7afbf905ee00fc262eba51ad
[ "Apache-2.0" ]
6
2015-01-15T07:51:48.000Z
2020-06-18T14:47:48.000Z
input = """ 2 18 3 0 3 19 20 21 1 1 1 0 18 2 23 3 0 3 19 24 25 1 1 2 1 21 23 3 5 21 19 20 24 25 0 0 6 0 5 5 21 19 20 24 25 1 1 1 1 1 0 21 a 19 b 20 c 24 d 25 e 28 f 0 B+ 0 B- 1 0 1 """ output = """ COST 1@1 """
8.115385
32
0.540284
75
211
1.52
0.306667
0.140351
0.105263
0.087719
0.192982
0.192982
0
0
0
0
0
0.694656
0.379147
211
25
33
8.44
0.175573
0
0
0.08
0
0
0.853081
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
0ac88c66372990e2da39877dd262a4baa72b4bfd
791
py
Python
yxtx/myApp/migrations/0017_chat.py
wjh112233/yxtx
f118c2b9983ca48b099f2c328487e23f5430303f
[ "Apache-2.0" ]
null
null
null
yxtx/myApp/migrations/0017_chat.py
wjh112233/yxtx
f118c2b9983ca48b099f2c328487e23f5430303f
[ "Apache-2.0" ]
null
null
null
yxtx/myApp/migrations/0017_chat.py
wjh112233/yxtx
f118c2b9983ca48b099f2c328487e23f5430303f
[ "Apache-2.0" ]
null
null
null
# Generated by Django 3.0.2 on 2020-03-17 08:44 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('myApp', '0016_usergroup_buyer'), ] operations = [ migrations.CreateModel( name='Chat', fields=[ ('id', models.CharField(max_length=31, primary_key=True, serialize=False)), ('chatinfo', models.CharField(max_length=20000)), ('shopid', models.CharField(max_length=30)), ('user1', models.CharField(max_length=50)), ('user2', models.CharField(max_length=50)), ('name1', models.CharField(max_length=50)), ('name2', models.CharField(max_length=50)), ], ), ]
30.423077
91
0.556258
80
791
5.375
0.6
0.244186
0.293023
0.390698
0.24186
0
0
0
0
0
0
0.072464
0.302149
791
25
92
31.64
0.706522
0.05689
0
0
1
0
0.087366
0
0
0
0
0
0
1
0
false
0
0.052632
0
0.210526
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e4011ff0a2fe000023c186be9341efbe90bde007
57
py
Python
formfyxer/__init__.py
SuffolkLITLab/FormFyxer
00a6a70b30f1899fc5273de1001f1f57c3728f60
[ "MIT" ]
1
2022-03-07T23:22:00.000Z
2022-03-07T23:22:00.000Z
formfyxer/__init__.py
SuffolkLITLab/FormFyxer
00a6a70b30f1899fc5273de1001f1f57c3728f60
[ "MIT" ]
32
2022-02-10T17:33:58.000Z
2022-03-23T18:27:08.000Z
formfyxer/__init__.py
SuffolkLITLab/FormFyxer
00a6a70b30f1899fc5273de1001f1f57c3728f60
[ "MIT" ]
null
null
null
from .lit_explorer import * from .pdf_wrangling import *
19
28
0.789474
8
57
5.375
0.75
0
0
0
0
0
0
0
0
0
0
0
0.140351
57
2
29
28.5
0.877551
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
7c0e9d465eeddf2a8eeee673a92ff1e660a22216
57
py
Python
plans/config.py
datopian/plans
12bd9ff6f725703e7a73f3ad90680f5ade8cebdf
[ "MIT" ]
3
2019-11-18T12:04:27.000Z
2020-03-07T02:45:45.000Z
plans/config.py
datopian/plans
12bd9ff6f725703e7a73f3ad90680f5ade8cebdf
[ "MIT" ]
null
null
null
plans/config.py
datopian/plans
12bd9ff6f725703e7a73f3ad90680f5ade8cebdf
[ "MIT" ]
null
null
null
import os database_url = os.environ.get('DATABASE_URL')
14.25
45
0.77193
9
57
4.666667
0.666667
0.52381
0
0
0
0
0
0
0
0
0
0
0.105263
57
3
46
19
0.823529
0
0
0
0
0
0.210526
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
7c2377aec1cdd1edd01522b34885f68b9680468a
82
py
Python
src/app/database/__init__.py
roch1990/aiohttp-blog
32e7b76b5b293d4517631ea82dfa2b268a1662eb
[ "MIT" ]
20
2020-02-29T19:03:31.000Z
2022-02-18T21:13:12.000Z
src/app/database/__init__.py
roch1990/aiohttp-blog
32e7b76b5b293d4517631ea82dfa2b268a1662eb
[ "MIT" ]
465
2020-02-29T19:08:18.000Z
2022-03-18T22:21:49.000Z
src/app/database/__init__.py
roch1990/aiohttp-blog
32e7b76b5b293d4517631ea82dfa2b268a1662eb
[ "MIT" ]
26
2020-11-26T09:00:03.000Z
2022-02-16T04:20:53.000Z
from sqlalchemy.ext.declarative import declarative_base Base = declarative_base()
27.333333
55
0.853659
10
82
6.8
0.6
0.441176
0
0
0
0
0
0
0
0
0
0
0.085366
82
3
56
27.333333
0.906667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
27