hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e012a92e1f872614d01a6331fee5e35c430a31f7
| 261
|
py
|
Python
|
modules/moduleBase.py
|
saintaardvark/glouton-satnogs-data-downloader
|
dc8671340f558b1a21b41b9b04bab05fc15c7809
|
[
"MIT"
] | null | null | null |
modules/moduleBase.py
|
saintaardvark/glouton-satnogs-data-downloader
|
dc8671340f558b1a21b41b9b04bab05fc15c7809
|
[
"MIT"
] | null | null | null |
modules/moduleBase.py
|
saintaardvark/glouton-satnogs-data-downloader
|
dc8671340f558b1a21b41b9b04bab05fc15c7809
|
[
"MIT"
] | null | null | null |
from infrastructure.satnogClient import SatnogClient
import os
class ModuleBase:
def __init__(self, working_dir):
self.working_dir = working_dir
def runAfterDownload(self, file_name, full_path, observation):
raise NotImplementedError()
| 29
| 66
| 0.762452
| 29
| 261
| 6.551724
| 0.689655
| 0.157895
| 0.147368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176245
| 261
| 9
| 67
| 29
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.285714
| 0
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e0251d7f1fc5a3340792a778c923482bb49bcf13
| 316
|
py
|
Python
|
lite/__init__.py
|
CleverInsight/sparx-lite
|
1b729e8d11292e9737d57e092ee8916999ab1338
|
[
"MIT"
] | null | null | null |
lite/__init__.py
|
CleverInsight/sparx-lite
|
1b729e8d11292e9737d57e092ee8916999ab1338
|
[
"MIT"
] | null | null | null |
lite/__init__.py
|
CleverInsight/sparx-lite
|
1b729e8d11292e9737d57e092ee8916999ab1338
|
[
"MIT"
] | null | null | null |
import os
from tornado.template import Template
__SNIPPET__ = os.path.join(os.path.dirname(os.path.abspath(__file__)), '_snippet')
def T(name, **kw):
t = Template(open(os.path.join(__SNIPPET__, name + '.html'), 'rb').read())
return t.generate(**dict([('template_file', name)] + globals().items() + kw.items()))
| 31.6
| 86
| 0.686709
| 45
| 316
| 4.511111
| 0.533333
| 0.118227
| 0.098522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 316
| 9
| 87
| 35.111111
| 0.714789
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
e02e416aacee98cfdb91a5328f2267836f5a1229
| 6,862
|
py
|
Python
|
tests/test_ordering.py
|
deepio-oc/pabot
|
ebf1894c6d35b2ddd5c4bca01bceb25189358106
|
[
"Apache-2.0"
] | 379
|
2015-02-02T17:47:45.000Z
|
2022-03-20T16:51:05.000Z
|
tests/test_ordering.py
|
deepio-oc/pabot
|
ebf1894c6d35b2ddd5c4bca01bceb25189358106
|
[
"Apache-2.0"
] | 406
|
2015-02-12T07:41:53.000Z
|
2022-03-28T23:35:32.000Z
|
tests/test_ordering.py
|
deepio-oc/pabot
|
ebf1894c6d35b2ddd5c4bca01bceb25189358106
|
[
"Apache-2.0"
] | 159
|
2015-01-16T13:42:20.000Z
|
2022-03-30T19:48:15.000Z
|
from robot import __version__ as ROBOT_VERSION
import sys
import tempfile
import textwrap
import unittest
import shutil
import subprocess
class PabotOrderingGroupTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _run_tests_with(self, testfile, orderfile):
robot_file = open("{}/test.robot".format(self.tmpdir), "w")
robot_file.write(textwrap.dedent(testfile))
robot_file.close()
with open("{}/order.dat".format(self.tmpdir), "w") as f:
f.write(textwrap.dedent(orderfile))
process = subprocess.Popen(
[
sys.executable,
"-m" "pabot.pabot",
"--testlevelsplit",
"--ordering",
"{}/order.dat".format(self.tmpdir),
"{}/test.robot".format(self.tmpdir),
],
cwd=self.tmpdir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
return process.communicate()
def test_orders(self):
stdout, stderr = self._run_tests_with(
"""
*** Variables ***
${SCALAR} Hello, globe!
*** Test Cases ***
First Test
Set Suite Variable ${SCALAR} Hello, world!
Second Test
Should Be Equal ${SCALAR} Hello, world!
Third Test
Should Be Equal ${SCALAR} Hello, globe!
""",
"""
{
--test Test.First Test
--test Test.Second Test
}
--test Test.Third Test
""",
)
if sys.version_info < (3, 0):
self.assertIn("PASSED", stdout, stderr)
self.assertNotIn("FAILED", stdout, stderr)
self.assertEqual(stdout.count("PASSED"), 2)
else:
self.assertIn(b"PASSED", stdout, stderr)
self.assertNotIn(b"FAILED", stdout, stderr)
self.assertEqual(stdout.count(b"PASSED"), 2)
def test_two_orders(self):
stdout, stderr = self._run_tests_with(
"""
*** Variables ***
${SCALAR} Hello, globe!
*** Test Cases ***
First Test
Set Suite Variable ${SCALAR} Hello, world!
Second Test
Should Be Equal ${SCALAR} Hello, world!
Second And Quarter
Should Be Equal ${SCALAR} Hello, globe!
Second And Half
Should Be Equal ${SCALAR} Hello, globe!
Third Test
Should Be Equal ${SCALAR} Hello, globe!
""",
"""
{
--test Test.First Test
--test Test.Second Test
}
{
--test Test.Second And Quarter
--test Test.Second And Half
}
--test Test.Third Test
""",
)
if sys.version_info < (3, 0):
self.assertIn("PASSED", stdout, stderr)
self.assertNotIn("FAILED", stdout, stderr)
if ROBOT_VERSION < "4.0":
expected_write = "5 critical tests, 5 passed, 0 failed"
else:
expected_write = "5 tests, 5 passed, 0 failed, 0 skipped."
self.assertIn(expected_write, stdout, stderr)
self.assertEqual(stdout.count("PASSED"), 3)
else:
self.assertIn(b"PASSED", stdout, stderr)
self.assertNotIn(b"FAILED", stdout, stderr)
if ROBOT_VERSION < "4.0":
expected_write = b"5 critical tests, 5 passed, 0 failed"
else:
expected_write = b"5 tests, 5 passed, 0 failed, 0 skipped."
self.assertIn(expected_write, stdout, stderr)
self.assertEqual(stdout.count(b"PASSED"), 3)
def test_too_big_testname(self):
stdout, stderr = self._run_tests_with(
"""
*** Test Cases ***
Test Lorem ipsum dolor sit amet, consectetur adipiscing elit. Mauris eu velit nunc. Duis eget purus eget orci porta blandit sed ut tortor. Nunc vel nulla bibendum, auctor sem ac, molestie risus. Sed eu metus volutpat, hendrerit nibh in, auctor urna. Nunc a sodales.
Log Test
""",
"""
--test Invalid
""",
)
if sys.version_info < (3, 0):
self.assertIn("PASSED", stdout, stderr)
self.assertNotIn("FAILED", stdout, stderr)
self.assertEqual(stdout.count("PASSED"), 1)
else:
self.assertIn(b"PASSED", stdout, stderr)
self.assertNotIn(b"FAILED", stdout, stderr)
self.assertEqual(stdout.count(b"PASSED"), 1)
def test_longnames_in_tests(self):
stdout, stderr = self._run_tests_with(
"""
*** Settings ***
Test Template Test1
*** Test Cases ***
The Somewhat Long Name Of The Test S1Test 01 1
The Somewhat Long Name Of The Test S1Test 02 1
The Somewhat Long Name Of The Test S1Test 03 1
The Somewhat Long Name Of The Test S1Test 04 1
The Somewhat Long Name Of The Test S1Test 05 1
The Somewhat Long Name Of The Test S1Test 06 1
The Somewhat Long Name Of The Test S1Test 07 1
The Somewhat Long Name Of The Test S1Test 08 1
The Somewhat Long Name Of The Test S1Test 09 1
The Somewhat Long Name Of The Test S1Test 10 1
The Somewhat Long Name Of The Test S1Test 11 1
The Somewhat Long Name Of The Test S1Test 12 1
*** Keywords ***
Test1
[Arguments] ${arg}
Log Test
""",
"""
{
--test Test.The Somewhat Long Name Of The Test S1Test 01
--test Test.The Somewhat Long Name Of The Test S1Test 02
--test Test.The Somewhat Long Name Of The Test S1Test 03
--test Test.The Somewhat Long Name Of The Test S1Test 04
--test Test.The Somewhat Long Name Of The Test S1Test 05
--test Test.The Somewhat Long Name Of The Test S1Test 06
}
{
--test Test.The Somewhat Long Name Of The Test S1Test 07
--test Test.The Somewhat Long Name Of The Test S1Test 08
--test Test.The Somewhat Long Name Of The Test S1Test 09
--test Test.The Somewhat Long Name Of The Test S1Test 10
--test Test.The Somewhat Long Name Of The Test S1Test 11
--test Test.The Somewhat Long Name Of The Test S1Test 12
}
""",
)
if sys.version_info < (3, 0):
self.assertIn("PASSED", stdout, stderr)
self.assertNotIn("FAILED", stdout, stderr)
self.assertEqual(stdout.count("PASSED"), 2)
else:
self.assertIn(b"PASSED", stdout, stderr)
self.assertNotIn(b"FAILED", stdout, stderr)
self.assertEqual(stdout.count(b"PASSED"), 2)
| 35.189744
| 273
| 0.557418
| 812
| 6,862
| 4.656404
| 0.192118
| 0.055012
| 0.095213
| 0.120603
| 0.747157
| 0.721238
| 0.705898
| 0.687384
| 0.685533
| 0.562021
| 0
| 0.026738
| 0.345963
| 6,862
| 194
| 274
| 35.371134
| 0.815731
| 0
| 0
| 0.404255
| 0
| 0
| 0.108551
| 0
| 0
| 0
| 0
| 0
| 0.276596
| 1
| 0.074468
| false
| 0.212766
| 0.074468
| 0
| 0.170213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
e040676396d83dae688cf225c1f4290cf1100f35
| 192
|
py
|
Python
|
test_print_json.py
|
huangsen365/boto3-docker
|
42d46ce4433dd037006d6b8d01db3fe444b9d8dd
|
[
"Apache-2.0"
] | null | null | null |
test_print_json.py
|
huangsen365/boto3-docker
|
42d46ce4433dd037006d6b8d01db3fe444b9d8dd
|
[
"Apache-2.0"
] | null | null | null |
test_print_json.py
|
huangsen365/boto3-docker
|
42d46ce4433dd037006d6b8d01db3fe444b9d8dd
|
[
"Apache-2.0"
] | null | null | null |
import json
your_json = '["foo", {"bar":["baz", null, 1.0, 2]}]'
parsed = json.loads(your_json)
print(type(your_json))
print(type(parsed))
#print(json.dumps(parsed, indent=4, sort_keys=True))
| 27.428571
| 52
| 0.6875
| 32
| 192
| 4
| 0.625
| 0.1875
| 0.203125
| 0.265625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022857
| 0.088542
| 192
| 7
| 53
| 27.428571
| 0.708571
| 0.265625
| 0
| 0
| 0
| 0
| 0.269504
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.4
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e074a8e70a88cdf3e39529ffdda1dc94abc0febf
| 15,854
|
py
|
Python
|
gogapi/api.py
|
tikki/pygogapi
|
f1b3a811444dc521ea4ad7884104086b52348995
|
[
"MIT"
] | 23
|
2017-01-03T21:00:27.000Z
|
2022-01-25T22:08:39.000Z
|
gogapi/api.py
|
tikki/pygogapi
|
f1b3a811444dc521ea4ad7884104086b52348995
|
[
"MIT"
] | 3
|
2017-06-06T23:08:30.000Z
|
2019-01-28T02:20:34.000Z
|
gogapi/api.py
|
tikki/pygogapi
|
f1b3a811444dc521ea4ad7884104086b52348995
|
[
"MIT"
] | 8
|
2017-02-10T15:13:32.000Z
|
2020-04-18T11:17:15.000Z
|
import json
import re
import logging
import html.parser
import zlib
import requests
from gogapi import urls
from gogapi.base import NotAuthorizedError, logger
from gogapi.product import Product, Series
from gogapi.search import SearchResult
DEBUG_JSON = False
GOGDATA_RE = re.compile(r"gogData\.?(.*?) = (.+);")
CLIENT_VERSION = "1.2.17.9" # Just for their statistics
USER_AGENT = "GOGGalaxyClient/{} pygogapi/0.1".format(CLIENT_VERSION)
REQUEST_RETRIES = 3
PRODUCT_EXPANDABLE = [
"downloads", "expanded_dlcs", "description", "screenshots", "videos",
"related_products", "changelog"
]
USER_EXPANDABLE = ["friendStatus", "wishlistStatus", "blockedStatus"]
LOCALE_CODES = ["de-DE", "en-US", "fr-FR", "pt-BR", "pl-PL", "ru-RU", "zh-Hans"]
CURRENCY_CODES = [
"USD", "EUR", "GBP", "AUD", "RUB", "PLN", "CAD", "CHF", "NOK", "SEK", "DKK"
]
def find_scripts(site):
parser = ScriptParser()
parser.feed(site)
return parser.scripts
class ScriptParser(html.parser.HTMLParser):
def __init__(self):
super().__init__()
self.last_tag = None
self.scripts = []
def handle_starttag(self, tag, attrs):
self.last_tag = tag
def handle_data(self, data):
if self.last_tag == "script":
self.scripts.append(data)
class GogApi:
def __init__(self, token=None):
self.token = token
self.locale = (None, None, None) # TODO: replace tuple
self.session = requests.Session()
self.session.headers["User-Agent"] = USER_AGENT
self.force_authorize = False
# Helpers
def request(self, method, url, authorized=True, allow_redirects=False,
**kwargs):
"""
Wrapper around requests.request that also handles authorization,
retries and logging
"""
if authorized or self.force_authorize:
if self.token is None:
raise NotAuthorizedError()
if self.token.expired():
self.token.refresh()
self.session.headers["Authorization"] = \
"Bearer " + self.token.access_token
else:
self.session.headers.pop("Authorization", None)
# Retries
retries = REQUEST_RETRIES
while retries > 0:
resp = self.session.request(
method, url, allow_redirects=allow_redirects, **kwargs)
if resp.status_code < 400:
return resp
elif 400 <= resp.status_code < 500:
break
else:
retries -= 1
resp.raise_for_status()
def get(self, *args, **kwargs):
"""
Wrapper around requests.get
"""
return self.request("GET", *args, **kwargs)
def post(self, *args, **kwargs):
"""
Wrapper around requests.post
"""
return self.request("POST", *args, **kwargs)
def request_json(self, *args, compressed=False, **kwargs):
"""
Wrapper around GogApi.request that automatically parses the
JSON response. Also does zlib decompression because GOG decided
to reinvent the wheel instead of using HTTP gzip encoding for
their content system V2.
"""
resp = self.request(*args, **kwargs)
if not compressed:
if DEBUG_JSON:
print(resp.text)
return resp.json()
else:
json_comp = resp.content
json_text = zlib.decompress(json_comp, 15).decode("utf-8")
if DEBUG_JSON:
print(json_text)
return json.loads(json_text)
def get_json(self, *args, **kwargs):
"""
Wrapper around GogApi.get with JSON parsing
"""
return self.request_json("GET", *args, **kwargs)
def get_gogdata(self, url, *args, **kwargs):
"""
Downloads a page and returns the embedded JavaScript gogData
variable.
"""
resp = self.get(url, *args, **kwargs)
gogdata = {}
for script in find_scripts(resp.text):
matches = GOGDATA_RE.finditer(resp.text)
for match in matches:
subkey = match.group(1)
value = match.group(2)
value_parsed = json.loads(value)
if subkey:
data = {subkey: value_parsed}
else:
data = value_parsed
gogdata.update(data)
return gogdata
def set_locale(self, country, currency, locale):
"""
country: ISO 3166 Alpha-2
currency: ISO 4217
locale: ISO 639 + ISO 3166 like language[_territory]
"""
if len(country) != 2:
return AttributeError("Invalid country code {}".format(country))
elif currency not in CURRENCY_CODES:
return AttributeError("Invalid currency code {}".format(locale))
elif locale not in LOCALE_CODES:
return AttributeError("Invalid locale code {}".format(locale))
self.locale = (country, currency, locale)
self.session.cookies["gog_lc"] = "_".join(self.locale)
# Web APIs
def web_game_gogdata(self, slug):
return self.get_gogdata(urls.web("game", slug), authorized=False)
def web_games_gogdata(self):
return self.get_gogdata(urls.web("account.games"))
def web_movies_gogdata(self):
return self.get_gogdata(urls.web("account.movies"))
def web_wishlist_gogdata(self):
return self.get_gogdata(urls.web("account.wishlist"))
def web_friends_gogdata(self):
return self.get_gogdata(urls.web("account.friends"))
def web_chat_gogdata(self):
return self.get_gogdata(urls.web("account.chat"))
def web_wallet_gogdata(self):
return self.get_gogdata(urls.web("wallet"))
def web_orders_gogdata(self):
return self.get_gogdata(urls.web("settings.orders"))
def web_account_gamedetails(self, game_id):
return self.get_json(urls.web("account.gamedetails", game_id))
def web_account_search(self, **query):
"""
Allowed query keys:
category: Genre
feature: Feature
hiddenFlag: Show hidden games
language: Language
mediaType: Game or movie
page: Page number
search: Search string
sortBy: Sort order
system: OS
tags: Tags
totalPages: Total Pages
"""
return self.get_json(urls.web("account.get_filtered"), params=query)
def web_search(self, **query):
"""
Allowed query keys:
category: Genre
devpub: Developer or Published
feature: Features
language: Language
mediaType: Game or movie
page: Page number
price: Price range
release: Release timeframe
search: Search string
sort: Sort order
system: OS
limit: Max results
"""
return self.get_json(
urls.web("search.filtering"), params=query, authorized=False)
def web_user_data(self):
return self.get_json(urls.web("user.data"))
def web_user_games(self):
return self.get_json(urls.web("user.games"))
def web_user_wishlist(self):
return self.get_json(urls.web("user.wishlist"))
def web_user_wishlist_add(self, game_id):
"""Returns new wishlist"""
return self.get_json(urls.web("user.wishlist.add", game_id))
def web_user_wishlist_remove(self, game_id):
"""Returns new wishlist"""
return self.get_json(urls.web("user.wishlist.remove", game_id))
def web_user_ratings(self):
return self.get_json(urls.web("user.ratings"))
def web_user_review_votes(self):
return self.get_json(urls.web("user.review_votes"))
def web_user_change_currency(self, currency):
return self.get_json(urls.web("user.change_currency", currency))
def web_user_change_language(self, lang):
return self.get_json(urls.web("user.change_language", lang))
def web_user_set_redirect_url(self, url):
"""Set redirect url after login. Only know valid url: checkout"""
return self.get(urls.web("user.set_redirect_url", params={"url": url}))
def web_user_review_guidelines(self):
return self.get_json(urls.web("user.review_guidelines"))
def web_user_public_info(self, user_id, expand=None):
if not expand:
params = None
elif expand == True:
params = {"expand": ",".join(USER_EXPANDABLE)}
else:
params = {"expand": ",".join(expand)}
return self.get_json(
urls.web("user.public.info", user_id, params=params))
def web_user_public_block(self, user_id):
return self.get_json(urls.web("user.public.block", user_id))
def web_user_public_unblock(self, user_id):
return self.get_json(urls.web("user.public.unblock", user_id))
def web_friends_remove(self, user_id):
return self.get_json(urls.web("friends.remove", user_id))
def web_friends_invite(self, user_id):
return self.get_json(urls.web("friends.invite", user_id))
def web_friends_accept(self, user_id):
return self.get_json(urls.web("friends.accept", user_id))
def web_friends_decline(self, user_id):
return self.get_json(urls.web("friends.decline", user_id))
def web_cart_get(self):
return self.get_json(urls.web("cart.get"))
def web_cart_add(self, game_id):
return self.get_json(urls.web("cart.add", game_id))
def web_cart_add_series(self, series_id):
return self.get_json(urls.web("cart.add_series", series_id))
def web_cart_remove(self, game_id):
return self.get_json(urls.web("cart.remove", game_id))
def web_reviews_search(self, game_id):
return self.get_json(urls.web("reviews.search", game_id))
def web_reviews_vote(self, game_id):
return self.get_json(urls.web("reviews.vote", game_id))
def web_reviews_report(self, game_id):
return self.get_json(urls.web("reviews.report", game_id))
def web_reviews_rate(self, game_id):
return self.get_json(urls.web("reviews.rate", game_id))
def web_reviews_add(self, game_id):
return self.get_json(urls.web("reviews.add", game_id))
def web_order_change_currency(self, order_id, currency):
return self.get_json(
urls.web("order.change_currency", order_id, currency))
def web_order_add(self, order_id, game_id):
return self.get_json(urls.web("order.add", order_id, game_id))
def web_order_remove(self, order_id, game_id):
return self.get_json(urls.web("order.remove", order_id, game_id))
def web_order_enable_store_credit(self, order_id):
return self.get_json(urls.web("order.enable_store_credit", order_id))
def web_order_disable_store_credit(self, order_id):
return self.get_json(urls.web("order.disable_store_credit", order_id))
def web_order_set_as_gift(self, order_id):
return self.get_json(urls.web("order.set_as_gift", order_id))
def web_order_set_as_not_gift(self, order_id):
return self.get_json(urls.web("order.set_as_non_gift", order_id))
def web_order_process_order(self, order_id):
return self.get_json(urls.web("order.process_order", order_id))
def web_order_payment_status(self, order_id):
return self.get_json(urls.web("order.payment_status", order_id))
def web_order_check_status(self, order_id):
return self.get_json(urls.web("order.check_status", order_id))
def web_checkout(self, order_id=None):
if order_id is None:
return self.get_json(urls.web("checkout"))
else:
return self.get_json(urls.web("checkout_id", order_id))
def web_checkout_manual(self, order_id):
return self.get_json(urls.web("checkout_manual", order_id))
# Galaxy APIs
def galaxy_file(self, game_id, dl_url):
dl_url = dl_url.lstrip("/")
return self.get_json(urls.galaxy("file", game_id, dl_url))
def galaxy_user(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("user", user_id))
def galaxy_friends(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("friends", user_id))
def galaxy_invitations(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("invitations", user_id))
def galaxy_status(self, user_id=None):
if user_id is None:
user_id = self.token.user_id
reqdata = {"version": CLIENT_VERSION}
self.post(urls.galaxy("status", user_id), data=reqdata)
def galaxy_statuses(self, user_ids):
user_ids_str = ",".join(user_ids)
params = {"user_id": user_ids_str}
#self.request("OPTIONS", urls.galaxy("statuses"), params=params)
return self.get_json(urls.galaxy("statuses"), params=params)
def galaxy_achievements(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("achievements", game_id, user_id))
def galaxy_sessions(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("sessions", game_id, user_id))
def galaxy_friends_achievements(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(
urls.galaxy("friends.achievements", game_id, user_id))
def galaxy_friends_sessions(self, game_id, user_id=None):
if user_id is None:
user_id = self.token.user_id
return self.get_json(urls.galaxy("friends.sessions", game_id, user_id))
def galaxy_product(self, game_id, expand=None):
if not expand:
params = {}
elif expand is True:
params = {"expand": ",".join(PRODUCT_EXPANDABLE)}
else:
params = {"expand": ",".join(expand)}
if self.locale[2]:
params["locale"] = self.locale[2]
return self.get_json(
urls.galaxy("product", game_id), params=params,
authorized=False)
def galaxy_products(self, game_ids, expand=None):
if not expand:
params = {}
elif expand is True:
params = {"expand": ",".join(PRODUCT_EXPANDABLE)}
else:
params = {"expand": ",".join(expand)}
if self.locale[2]:
params["locale"] = self.locale[2]
ids_string = ",".join(str(game_id) for game_id in game_ids)
params["ids"] = ids_string
return self.get_json(
urls.galaxy("products"), params=params, authorized=False)
def galaxy_secure_link(self, game_id, path, generation):
return self.get_json(
urls.galaxy("cs.securelink", game_id),
params={"path": path, "generation": generation})
def galaxy_builds(self, game_id, system):
return self.get_json(
urls.galaxy("cs.builds", game_id, system), authorized=False)
def galaxy_cs_meta(self, meta_id):
return self.get_json(
urls.galaxy("cs.meta", meta_id[0:2], meta_id[2:4], meta_id),
compressed=True,
authorized=False)
def galaxy_client_config():
return self.get_json(urls.galaxy("client-config"), authorized=False)
def product(self, product_id, slug=None):
return Product(self, product_id, slug)
def search(self, **query):
search_data = self.web_search(**query)
return SearchResult(self, query, search_data)
| 32.892116
| 80
| 0.628233
| 2,065
| 15,854
| 4.60678
| 0.155448
| 0.072532
| 0.090192
| 0.101861
| 0.469358
| 0.411017
| 0.37002
| 0.309156
| 0.266898
| 0.213708
| 0
| 0.004235
| 0.255393
| 15,854
| 481
| 81
| 32.960499
| 0.801609
| 0.082503
| 0
| 0.173203
| 0
| 0
| 0.09924
| 0.009661
| 0
| 0
| 0
| 0.002079
| 0
| 1
| 0.261438
| false
| 0
| 0.03268
| 0.156863
| 0.558824
| 0.006536
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
e076824b715f780b36bdb8e03020e256a3cf8b8d
| 156
|
py
|
Python
|
csv_experiment.py
|
komax/spanningtree-crossingnumber
|
444c8809a543905000a63c9d2ff1dcfb31835766
|
[
"MIT"
] | 2
|
2019-01-07T22:12:09.000Z
|
2020-05-08T06:44:19.000Z
|
csv_experiment.py
|
komax/spanningtree-crossingnumber
|
444c8809a543905000a63c9d2ff1dcfb31835766
|
[
"MIT"
] | null | null | null |
csv_experiment.py
|
komax/spanningtree-crossingnumber
|
444c8809a543905000a63c9d2ff1dcfb31835766
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
import os
import sys
args = sys.argv[1:]
os.system('python -O -m spanningtree.csv_experiment_statistics ' +
' '.join(args))
| 19.5
| 66
| 0.673077
| 23
| 156
| 4.478261
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007752
| 0.173077
| 156
| 7
| 67
| 22.285714
| 0.790698
| 0.134615
| 0
| 0
| 0
| 0
| 0.395522
| 0.283582
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
e077be2cbaa5c0711f376c7e5a696aa0b37ee960
| 1,526
|
py
|
Python
|
qubiter/device_specific/chip_couplings_ibm.py
|
yourball/qubiter
|
5ef0ea064fa8c9f125f7951a01fbb88504a054a5
|
[
"Apache-2.0"
] | 3
|
2019-10-03T04:27:36.000Z
|
2021-02-13T17:49:34.000Z
|
qubiter/device_specific/chip_couplings_ibm.py
|
yourball/qubiter
|
5ef0ea064fa8c9f125f7951a01fbb88504a054a5
|
[
"Apache-2.0"
] | null | null | null |
qubiter/device_specific/chip_couplings_ibm.py
|
yourball/qubiter
|
5ef0ea064fa8c9f125f7951a01fbb88504a054a5
|
[
"Apache-2.0"
] | 2
|
2020-10-07T15:22:19.000Z
|
2021-06-07T04:59:58.000Z
|
def aaa():
# trick sphinx to build link in doc
pass
# retired
ibmqx2_c_to_tars =\
{
0: [1, 2],
1: [2],
2: [],
3: [2, 4],
4: [2]
} # 6 edges
# retired
ibmqx4_c_to_tars =\
{
0: [],
1: [0],
2: [0, 1, 4],
3: [2, 4],
4: []
} # 6 edges
# retired
ibmq16Rus_c_to_tars = \
{
0: [],
1: [0, 2],
2: [3],
3: [4, 14],
4: [],
5: [4],
6: [5, 7, 11],
7: [10],
8: [7],
9: [8, 10],
10: [],
11: [10],
12: [5, 11, 13],
13: [4, 14],
14: [],
15: [0, 2, 14]
} # 22 edges
ibm20AustinTokyo_c_to_tars = \
{
0: [1, 5],
1: [0, 2, 6, 7],
2: [1, 3, 6, 7],
3: [2, 4, 8, 9],
4: [3, 8, 9],
5: [0, 6, 10, 11],
6: [1, 2, 5, 7, 10, 11],
7: [1, 2, 6, 8, 12, 13],
8: [3, 4, 7, 9, 12, 13],
9: [3, 4, 8, 14],
10: [5, 6, 11, 15],
11: [5, 6, 10, 12, 16, 17],
12: [7, 8, 11, 13, 16, 17],
13: [7, 8, 12, 14, 18, 19],
14: [9, 13, 18, 19],
15: [10, 16],
16: [11, 12, 15, 17],
17: [11, 12, 16, 18],
18: [13, 14, 17, 19],
19: [13, 14, 18]
} # 86 edges
ibmq5YorktownTenerife_c_to_tars = \
{
0: [1, 2],
1: [0, 2],
2: [0, 1, 3, 4],
3: [2, 4],
4: [2, 3]
} # 12 edges
ibmq14Melb_c_to_tars = \
{
0: [1],
1: [0, 2, 13],
2: [1, 3, 12],
3: [2, 4, 11],
4: [3, 5, 10],
5: [4, 6, 9],
6: [5, 8],
7: [8],
8: [6, 7, 9],
9: [5, 8, 10],
10: [4, 9, 11],
11: [3, 10, 12],
12: [2, 11, 13],
13: [1, 12]
} # 36 edges
| 15.895833
| 39
| 0.355177
| 282
| 1,526
| 1.858156
| 0.152482
| 0.030534
| 0.080153
| 0.091603
| 0.148855
| 0.083969
| 0.083969
| 0
| 0
| 0
| 0
| 0.355249
| 0.382045
| 1,526
| 95
| 40
| 16.063158
| 0.200424
| 0.071429
| 0
| 0.105882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011765
| false
| 0.011765
| 0
| 0
| 0.011765
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
0eb9c920aa1f94bcf5b75523167a5791a71d6de8
| 1,150
|
py
|
Python
|
modle/__init__.py
|
Rex0519/NessusToReport
|
047dd4a2f749addab3991b0ebc8ab609140c32a7
|
[
"Apache-2.0"
] | 244
|
2020-06-27T12:07:52.000Z
|
2022-03-30T02:36:27.000Z
|
modle/__init__.py
|
Rex0519/NessusToReport
|
047dd4a2f749addab3991b0ebc8ab609140c32a7
|
[
"Apache-2.0"
] | 23
|
2021-05-20T07:38:55.000Z
|
2022-03-13T14:13:01.000Z
|
modle/__init__.py
|
Rex0519/NessusToReport
|
047dd4a2f749addab3991b0ebc8ab609140c32a7
|
[
"Apache-2.0"
] | 74
|
2020-06-27T12:07:53.000Z
|
2022-03-11T19:07:45.000Z
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# ------------------------------------------------------------
# File: __init__.py.py
# Created Date: 2020/6/24
# Created Time: 0:12
# Author: Hypdncy
# Author Mail: [email protected]
# Copyright (c) 2020 Hypdncy
# ------------------------------------------------------------
# .::::.
# .::::::::.
# :::::::::::
# ..:::::::::::'
# '::::::::::::'
# .::::::::::
# '::::::::::::::..
# ..::::::::::::.
# ``::::::::::::::::
# ::::``:::::::::' .:::.
# ::::' ':::::' .::::::::.
# .::::' :::: .:::::::'::::.
# .:::' ::::: .:::::::::' ':::::.
# .::' :::::.:::::::::' ':::::.
# .::' ::::::::::::::' ``::::.
# ...::: ::::::::::::' ``::.
# ````':. ':::::::::' ::::..
# '.:::::' ':'````..
# ------------------------------------------------------------
| 39.655172
| 62
| 0.117391
| 31
| 1,150
| 4.225806
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02289
| 0.392174
| 1,150
| 29
| 63
| 39.655172
| 0.164521
| 0.950435
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
0ede1b7c3e14b744474c60d6f5f4a702ad5ce8ca
| 281
|
py
|
Python
|
common/__init__.py
|
whyh/FavourDemo
|
1b19882fb2e79dee9c3332594bf45c91e7476eaa
|
[
"Unlicense"
] | 1
|
2020-09-14T12:10:22.000Z
|
2020-09-14T12:10:22.000Z
|
common/__init__.py
|
whyh/FavourDemo
|
1b19882fb2e79dee9c3332594bf45c91e7476eaa
|
[
"Unlicense"
] | 4
|
2021-04-30T20:54:31.000Z
|
2021-06-02T00:28:04.000Z
|
common/__init__.py
|
whyh/FavourDemo
|
1b19882fb2e79dee9c3332594bf45c91e7476eaa
|
[
"Unlicense"
] | null | null | null |
from . import (emoji as emj,
keyboards as kb,
telegram as tg,
phrases as phr,
finance as fin,
utils,
glossary,
bots,
gcp,
sed,
db)
| 23.416667
| 31
| 0.33452
| 23
| 281
| 4.086957
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.619217
| 281
| 11
| 32
| 25.545455
| 0.878505
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
0eeb15222dc7d564fdca952a76722513fa52548a
| 144
|
py
|
Python
|
tlp/django_app/app/urls.py
|
munisisazade/create-django-app
|
f62395af2adaacacc4d3a3857c6570c9647d13a1
|
[
"MIT"
] | 14
|
2018-01-08T12:50:10.000Z
|
2021-12-26T18:38:14.000Z
|
tlp/django_app/app/urls.py
|
munisisazade/create-django-app
|
f62395af2adaacacc4d3a3857c6570c9647d13a1
|
[
"MIT"
] | 10
|
2018-03-01T14:17:05.000Z
|
2022-03-11T23:26:11.000Z
|
tlp/django_app/app/urls.py
|
munisisazade/create-django-app
|
f62395af2adaacacc4d3a3857c6570c9647d13a1
|
[
"MIT"
] | 4
|
2019-04-09T17:29:34.000Z
|
2020-06-07T14:46:23.000Z
|
from django.conf.urls import url
# from .views import BaseIndexView
urlpatterns = [
# url(r'^$', BaseIndexView.as_view(), name="index"),
]
| 20.571429
| 56
| 0.6875
| 18
| 144
| 5.444444
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152778
| 144
| 7
| 57
| 20.571429
| 0.803279
| 0.576389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
0ef0b2cd3d796c7cb5a1c89e97057d03988cd5cd
| 1,436
|
py
|
Python
|
release/stubs.min/System/Windows/Forms/__init___parts/PaintEventArgs.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Windows/Forms/__init___parts/PaintEventArgs.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Windows/Forms/__init___parts/PaintEventArgs.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
class PaintEventArgs(EventArgs,IDisposable):
"""
Provides data for the System.Windows.Forms.Control.Paint event.
PaintEventArgs(graphics: Graphics,clipRect: Rectangle)
"""
def Instance(self):
""" This function has been arbitrarily put into the stubs"""
return PaintEventArgs()
def Dispose(self):
"""
Dispose(self: PaintEventArgs)
Releases all resources used by the System.Windows.Forms.PaintEventArgs.
"""
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,graphics,clipRect):
""" __new__(cls: type,graphics: Graphics,clipRect: Rectangle) """
pass
ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the rectangle in which to paint.
Get: ClipRectangle(self: PaintEventArgs) -> Rectangle
"""
Graphics=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the graphics used to paint.
Get: Graphics(self: PaintEventArgs) -> Graphics
"""
| 31.911111
| 215
| 0.699164
| 170
| 1,436
| 5.511765
| 0.376471
| 0.064034
| 0.051227
| 0.060832
| 0.248666
| 0.248666
| 0.248666
| 0.248666
| 0.248666
| 0.248666
| 0
| 0
| 0.169916
| 1,436
| 44
| 216
| 32.636364
| 0.786074
| 0.462396
| 0
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0.3125
| 0
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
162335a5b07a8e07ba6397644e3e4ed7a9f459e2
| 8,442
|
py
|
Python
|
uproot_methods/common/TVector.py
|
marinang/uproot-methods
|
1d16d51ab7da19b4f31070d24e8fbfed3ae3ec8f
|
[
"BSD-3-Clause"
] | null | null | null |
uproot_methods/common/TVector.py
|
marinang/uproot-methods
|
1d16d51ab7da19b4f31070d24e8fbfed3ae3ec8f
|
[
"BSD-3-Clause"
] | null | null | null |
uproot_methods/common/TVector.py
|
marinang/uproot-methods
|
1d16d51ab7da19b4f31070d24e8fbfed3ae3ec8f
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2018, DIANA-HEP
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import math
import numbers
import operator
import awkward
import awkward.util
class Common(object):
@property
def mag2(self):
return self.dot(self)
@property
def mag(self):
return awkward.util.numpy.sqrt(self.mag2)
@property
def rho2(self):
out = self.x*self.x
out = out + self.y*self.y
return out
def delta_phi(self, other):
return (self.phi - other.phi + math.pi) % (2*math.pi) - math.pi
def isparallel(self, other, tolerance=1e-10):
return 1 - self.cosdelta(other) < tolerance
def isantiparallel(self, other, tolerance=1e-10):
return self.cosdelta(other) - (-1) < tolerance
def iscollinear(self, other, tolerance=1e-10):
return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance
def __lt__(self, other):
raise TypeError("spatial vectors have no natural ordering")
def __gt__(self, other):
raise TypeError("spatial vectors have no natural ordering")
def __le__(self, other):
raise TypeError("spatial vectors have no natural ordering")
def __ge__(self, other):
raise TypeError("spatial vectors have no natural ordering")
class ArrayMethods(Common):
@property
def unit(self):
return self / self.mag
@property
def rho(self):
out = self.rho2
return awkward.util.numpy.sqrt(out)
@property
def phi(self):
return awkward.util.numpy.arctan2(self.y, self.x)
def cosdelta(self, other):
denom = self.mag2 * other.mag2
mask = (denom > 0)
denom = denom[mask]
denom[:] = awkward.util.numpy.sqrt(denom)
out = self.dot(other)
out[mask] /= denom
mask = awkward.util.numpy.logical_not(mask)
out[mask] = 1
return awkward.util.numpy.clip(out, -1, 1)
def angle(self, other, normal=None, degrees=False):
out = awkward.util.numpy.arccos(self.cosdelta(other))
if normal is not None:
a = self.unit
b = other.unit
out = out * awkward.util.numpy.sign(normal.dot(a.cross(b)))
if degrees:
out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi)
return out
def isopposite(self, other, tolerance=1e-10):
tmp = self + other
tmp.x = awkward.util.numpy.absolute(tmp.x)
tmp.y = awkward.util.numpy.absolute(tmp.y)
tmp.z = awkward.util.numpy.absolute(tmp.z)
out = (tmp.x < tolerance)
out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance)
out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance)
return out
def isperpendicular(self, other, tolerance=1e-10):
tmp = self.dot(other)
tmp.x = awkward.util.numpy.absolute(tmp.x)
tmp.y = awkward.util.numpy.absolute(tmp.y)
tmp.z = awkward.util.numpy.absolute(tmp.z)
out = (tmp.x < tolerance)
out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance)
out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance)
return out
class Methods(Common):
@property
def unit(self):
return self / self.mag
@property
def rho(self):
return math.sqrt(self.rho2)
@property
def phi(self):
return math.atan2(self.y, self.x)
def cosdelta(self, other):
m1 = self.mag2
m2 = other.mag2
if m1 == 0 or m2 == 0:
return 1.0
r = self.dot(other) / math.sqrt(m1 * m2)
return max(-1.0, min(1.0, r))
def angle(self, other, degrees=False):
out = math.acos(self.cosdelta(other))
if degrees:
out = out * 180.0/math.pi
return out
def isopposite(self, other, tolerance=1e-10):
tmp = self + other
return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance and abs(tmp.z) < tolerance
def isperpendicular(self, other, tolerance=1e-10):
tmp = self.dot(other)
return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance and abs(tmp.z) < tolerance
def __add__(self, other):
return self._vector(operator.add, other)
def __radd__(self, other):
return self._vector(operator.add, other, True)
def __sub__(self, other):
return self._vector(operator.sub, other)
def __rsub__(self, other):
return self._vector(operator.sub, other, True)
def __mul__(self, other):
return self._scalar(operator.mul, other)
def __rmul__(self, other):
return self._scalar(operator.mul, other, True)
def __div__(self, other):
return self._scalar(operator.div, other)
def __rdiv__(self, other):
return self._scalar(operator.div, other, True)
def __truediv__(self, other):
return self._scalar(operator.truediv, other)
def __rtruediv__(self, other):
return self._scalar(operator.truediv, other, True)
def __floordiv__(self, other):
return self._scalar(operator.floordiv, other)
def __rfloordiv__(self, other):
return self._scalar(operator.floordiv, other, True)
def __mod__(self, other):
return self._scalar(operator.mod, other)
def __rmod__(self, other):
return self._scalar(operator.mod, other, True)
def __divmod__(self, other):
return self._scalar(operator.divmod, other)
def __rdivmod__(self, other):
return self._scalar(operator.divmod, other, True)
def __pow__(self, other):
if isinstance(other, (numbers.Number, awkward.util.numpy.number)):
if other == 2:
return self.mag2
else:
return self.mag2**(0.5*other)
else:
self._scalar(operator.pow, other)
# no __rpow__
def __lshift__(self, other):
return self._scalar(operator.lshift, other)
def __rlshift__(self, other):
return self._scalar(operator.lshift, other, True)
def __rshift__(self, other):
return self._scalar(operator.rshift, other)
def __rrshift__(self, other):
return self._scalar(operator.rshift, other, True)
def __and__(self, other):
return self._scalar(operator.and_, other)
def __rand__(self, other):
return self._scalar(operator.and_, other, True)
def __or__(self, other):
return self._scalar(operator.or_, other)
def __ror__(self, other):
return self._scalar(operator.or_, other, True)
def __xor__(self, other):
return self._scalar(operator.xor, other)
def __rxor__(self, other):
return self._scalar(operator.xor, other, True)
def __neg__(self):
return self._unary(operator.neg)
def __pos__(self):
return self._unary(operator.pos)
def __abs__(self):
return self.mag
def __invert__(self):
return self._unary(operator.invert)
| 31.036765
| 91
| 0.650912
| 1,118
| 8,442
| 4.750447
| 0.217352
| 0.076257
| 0.079081
| 0.096592
| 0.531162
| 0.473357
| 0.468085
| 0.457164
| 0.23875
| 0.23875
| 0
| 0.011148
| 0.245558
| 8,442
| 271
| 92
| 31.151292
| 0.822735
| 0.179934
| 0
| 0.305085
| 0
| 0
| 0.023209
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.316384
| false
| 0
| 0.028249
| 0.231638
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
16286e428c3bbec3fb9fbe61340a4121c6311a63
| 274
|
py
|
Python
|
tests/attacks/class_test.py
|
henrik997/privacy-evaluator
|
f1d0e6c10ff58e582a44243788ab66c1d453bfa0
|
[
"MIT"
] | null | null | null |
tests/attacks/class_test.py
|
henrik997/privacy-evaluator
|
f1d0e6c10ff58e582a44243788ab66c1d453bfa0
|
[
"MIT"
] | null | null | null |
tests/attacks/class_test.py
|
henrik997/privacy-evaluator
|
f1d0e6c10ff58e582a44243788ab66c1d453bfa0
|
[
"MIT"
] | null | null | null |
import pytest
from privacy_evaluator.attacks.sample_attack import Sample_Attack
"""
This test only test if no error is thrown when calling the function, can be removed in the future
"""
def test_sample_attack():
test = Sample_Attack(0, 0, 0)
test.perform_attack()
| 24.909091
| 97
| 0.762774
| 43
| 274
| 4.697674
| 0.651163
| 0.237624
| 0.158416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0131
| 0.164234
| 274
| 10
| 98
| 27.4
| 0.868996
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
162cf5942b39cb55c7afb1cde65c73f78fbc4d55
| 8,182
|
py
|
Python
|
test/spec/test_spec.py
|
raghu1121/SLM-Lab
|
58e98b6521f581515d04ebacff5226105237ed9b
|
[
"MIT"
] | 1
|
2021-09-01T11:57:04.000Z
|
2021-09-01T11:57:04.000Z
|
test/spec/test_spec.py
|
ragtz/SLM-Lab
|
42c48af308dfe36401990aca3795bc481cf28c17
|
[
"MIT"
] | null | null | null |
test/spec/test_spec.py
|
ragtz/SLM-Lab
|
42c48af308dfe36401990aca3795bc481cf28c17
|
[
"MIT"
] | null | null | null |
from flaky import flaky
from slm_lab.experiment.control import Trial
from slm_lab.experiment.monitor import InfoSpace
from slm_lab.lib import util
from slm_lab.spec import spec_util
import os
import pandas as pd
import pytest
import sys
# helper method to run all tests in test_spec
def run_trial_test(spec_file, spec_name=False):
spec = spec_util.get(spec_file, spec_name)
spec = spec_util.override_test_spec(spec)
info_space = InfoSpace()
info_space.tick('trial')
trial = Trial(spec, info_space)
trial_data = trial.run()
assert isinstance(trial_data, pd.DataFrame)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/reinforce.json', 'reinforce_mlp_cartpole'),
('experimental/reinforce.json', 'reinforce_rnn_cartpole'),
# ('experimental/reinforce.json', 'reinforce_conv_breakout'),
])
def test_reinforce(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/reinforce.json', 'reinforce_mlp_pendulum'),
('experimental/reinforce.json', 'reinforce_rnn_pendulum'),
])
def test_reinforce_cont(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/a2c.json', 'a2c_mlp_shared_cartpole'),
('experimental/a2c.json', 'a2c_mlp_separate_cartpole'),
('experimental/a2c.json', 'a2c_rnn_shared_cartpole'),
('experimental/a2c.json', 'a2c_rnn_separate_cartpole'),
# ('experimental/a2c.json', 'a2c_conv_shared_breakout'),
# ('experimental/a2c.json', 'a2c_conv_separate_breakout'),
('experimental/a2c.json', 'a2c_mlp_concat_cartpole'),
])
def test_a2c(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/a2c.json', 'a2c_mlp_shared_pendulum'),
('experimental/a2c.json', 'a2c_mlp_separate_pendulum'),
('experimental/a2c.json', 'a2c_rnn_shared_pendulum'),
('experimental/a2c.json', 'a2c_rnn_separate_pendulum'),
])
def test_a2c_cont(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/ppo.json', 'ppo_mlp_shared_cartpole'),
('experimental/ppo.json', 'ppo_mlp_separate_cartpole'),
('experimental/ppo.json', 'ppo_rnn_shared_cartpole'),
('experimental/ppo.json', 'ppo_rnn_separate_cartpole'),
# ('experimental/ppo.json', 'ppo_conv_shared_breakout'),
# ('experimental/ppo.json', 'ppo_conv_separate_breakout'),
])
def test_ppo(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/ppo.json', 'ppo_mlp_shared_pendulum'),
('experimental/ppo.json', 'ppo_mlp_separate_pendulum'),
('experimental/ppo.json', 'ppo_rnn_shared_pendulum'),
('experimental/ppo.json', 'ppo_rnn_separate_pendulum'),
])
def test_ppo_cont(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@flaky
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/ppo_sil.json', 'ppo_sil_mlp_shared_cartpole'),
('experimental/ppo_sil.json', 'ppo_sil_mlp_separate_cartpole'),
('experimental/ppo_sil.json', 'ppo_sil_rnn_shared_cartpole'),
('experimental/ppo_sil.json', 'ppo_sil_rnn_separate_cartpole'),
])
def test_ppo_sil(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@flaky
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/ppo_sil.json', 'ppo_sil_mlp_shared_pendulum'),
('experimental/ppo_sil.json', 'ppo_sil_mlp_separate_pendulum'),
('experimental/ppo_sil.json', 'ppo_sil_rnn_shared_pendulum'),
('experimental/ppo_sil.json', 'ppo_sil_rnn_separate_pendulum'),
])
def test_ppo_sil_cont(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@flaky
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/sil.json', 'sil_mlp_shared_cartpole'),
('experimental/sil.json', 'sil_mlp_separate_cartpole'),
('experimental/sil.json', 'sil_rnn_shared_cartpole'),
('experimental/sil.json', 'sil_rnn_separate_cartpole'),
# ('experimental/sil.json', 'sil_conv_shared_breakout'),
# ('experimental/sil.json', 'sil_conv_separate_breakout'),
])
def test_sil(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@flaky
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/sil.json', 'sil_mlp_shared_pendulum'),
('experimental/sil.json', 'sil_mlp_separate_pendulum'),
('experimental/sil.json', 'sil_rnn_shared_pendulum'),
('experimental/sil.json', 'sil_rnn_separate_pendulum'),
])
def test_sil_cont(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/sarsa.json', 'sarsa_mlp_boltzmann_cartpole'),
('experimental/sarsa.json', 'sarsa_mlp_epsilon_greedy_cartpole'),
('experimental/sarsa.json', 'sarsa_rnn_boltzmann_cartpole'),
('experimental/sarsa.json', 'sarsa_rnn_epsilon_greedy_cartpole'),
# ('experimental/sarsa.json', 'sarsa_conv_boltzmann_breakout'),
# ('experimental/sarsa.json', 'sarsa_conv_epsilon_greedy_breakout'),
])
def test_sarsa(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/dqn.json', 'vanilla_dqn_cartpole'),
('experimental/dqn.json', 'dqn_boltzmann_cartpole'),
('experimental/dqn.json', 'dqn_epsilon_greedy_cartpole'),
('experimental/dqn.json', 'drqn_boltzmann_cartpole'),
('experimental/dqn.json', 'drqn_epsilon_greedy_cartpole'),
# ('experimental/dqn.json', 'dqn_boltzmann_breakout'),
# ('experimental/dqn.json', 'dqn_epsilon_greedy_breakout'),
('experimental/dqn.json', 'dqn_stack_epsilon_greedy_lunar'),
])
def test_dqn(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/ddqn.json', 'ddqn_boltzmann_cartpole'),
('experimental/ddqn.json', 'ddqn_epsilon_greedy_cartpole'),
('experimental/ddqn.json', 'ddrqn_boltzmann_cartpole'),
('experimental/ddqn.json', 'ddrqn_epsilon_greedy_cartpole'),
# ('experimental/ddqn.json', 'ddqn_boltzmann_breakout'),
# ('experimental/ddqn.json', 'ddqn_epsilon_greedy_breakout'),
])
def test_ddqn(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/dueling_dqn.json', 'dueling_dqn_boltzmann_cartpole'),
('experimental/dueling_dqn.json', 'dueling_dqn_epsilon_greedy_cartpole'),
# ('experimental/dueling_dqn.json', 'dueling_dqn_boltzmann_breakout'),
# ('experimental/dueling_dqn.json', 'dueling_dqn_epsilon_greedy_breakout'),
])
def test_dueling_dqn(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/hydra_dqn.json', 'hydra_dqn_boltzmann_cartpole'),
('experimental/hydra_dqn.json', 'hydra_dqn_epsilon_greedy_cartpole'),
# ('experimental/hydra_dqn.json', 'hydra_dqn_epsilon_greedy_cartpole_2dball'),
])
def test_hydra_dqn(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@flaky
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/dqn.json', 'dqn_pong'),
# ('experimental/a2c.json', 'a2c_pong'),
])
def test_atari(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/reinforce.json', 'reinforce_conv_vizdoom'),
])
def test_reinforce_vizdoom(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('base.json', 'base_case_unity'),
('base.json', 'base_case_openai'),
('random.json', 'random_cartpole'),
('random.json', 'random_pendulum'),
# ('base.json', 'multi_agent'),
# ('base.json', 'multi_agent_multi_env'),
])
def test_base(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
@pytest.mark.parametrize('spec_file,spec_name', [
('base.json', 'multi_body'),
('base.json', 'multi_env'),
])
def test_base_multi(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
| 36.855856
| 82
| 0.744072
| 1,080
| 8,182
| 5.218519
| 0.077778
| 0.083747
| 0.125621
| 0.167495
| 0.841732
| 0.725869
| 0.493435
| 0.45511
| 0.388396
| 0.388396
| 0
| 0.003695
| 0.106942
| 8,182
| 221
| 83
| 37.022624
| 0.76762
| 0.137253
| 0
| 0.385093
| 0
| 0
| 0.461211
| 0.385621
| 0
| 0
| 0
| 0
| 0.006211
| 1
| 0.124224
| false
| 0
| 0.055901
| 0
| 0.180124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
1635645909c86684dc1d01665725f73b3baa25cb
| 348
|
py
|
Python
|
tests/utils/test_clean_accounting_column.py
|
richardqiu/pyjanitor
|
aa3150e7b8e2adc4733ea206ea9c3093e21d4025
|
[
"MIT"
] | 2
|
2020-09-06T22:11:01.000Z
|
2022-03-19T23:57:24.000Z
|
tests/utils/test_clean_accounting_column.py
|
richardqiu/pyjanitor
|
aa3150e7b8e2adc4733ea206ea9c3093e21d4025
|
[
"MIT"
] | 1
|
2021-05-17T15:30:04.000Z
|
2021-07-29T09:39:56.000Z
|
tests/utils/test_clean_accounting_column.py
|
richardqiu/pyjanitor
|
aa3150e7b8e2adc4733ea206ea9c3093e21d4025
|
[
"MIT"
] | 1
|
2020-08-10T20:30:20.000Z
|
2020-08-10T20:30:20.000Z
|
import pytest
from janitor.utils import _clean_accounting_column
@pytest.mark.utils
def test_clean_accounting_column():
test_str = "(1,000)"
assert _clean_accounting_column(test_str) == float(-1000)
@pytest.mark.utils
def test_clean_accounting_column_zeroes():
test_str = "()"
assert _clean_accounting_column(test_str) == 0.00
| 21.75
| 61
| 0.761494
| 48
| 348
| 5.104167
| 0.416667
| 0.306122
| 0.428571
| 0.306122
| 0.657143
| 0.628571
| 0.35102
| 0.35102
| 0
| 0
| 0
| 0.036545
| 0.135057
| 348
| 15
| 62
| 23.2
| 0.777409
| 0
| 0
| 0.2
| 0
| 0
| 0.025862
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
163995382115c67384ddb8a508342f8bf7650216
| 1,164
|
py
|
Python
|
cyberbrain/frame_tree.py
|
testinggg-art/Cyberbrain
|
e38c74c174e23aa386d005b03f09b30aa1b3a0ae
|
[
"MIT"
] | null | null | null |
cyberbrain/frame_tree.py
|
testinggg-art/Cyberbrain
|
e38c74c174e23aa386d005b03f09b30aa1b3a0ae
|
[
"MIT"
] | null | null | null |
cyberbrain/frame_tree.py
|
testinggg-art/Cyberbrain
|
e38c74c174e23aa386d005b03f09b30aa1b3a0ae
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from .frame import Frame
from .generated.communication_pb2 import CursorPosition
class FrameTree:
"""A tree to store all frames. For now it's a fake implementation.
Each node in the tree represents a frame that ever exists during program execution.
Caller and callee frames are connected. Call order is preserved among callee frames
of the same caller frame.
Nodes are also indexed by frames' physical location (file name, line range).
TODO:
- Add indexes.
- Implement frame search.
"""
# Keyed by frame ID.
frames: dict[str, Frame] = dict()
@classmethod
def add_frame(cls, frame_id, frame: Frame):
cls.frames[frame_id] = frame
print(frame_id, frame)
@classmethod
def find_frames(cls, position: CursorPosition) -> list[Frame]:
"""
Right now it's a fake implementation, where we return the only existing frame.
"""
assert cls.frames
return [next(iter(cls.frames.values()))]
@classmethod
def get_frame(cls, frame_id) -> Frame:
assert cls.frames
return cls.frames[frame_id]
| 28.390244
| 87
| 0.670103
| 154
| 1,164
| 4.980519
| 0.538961
| 0.054759
| 0.062581
| 0.018253
| 0.178618
| 0.065189
| 0
| 0
| 0
| 0
| 0
| 0.001149
| 0.252577
| 1,164
| 40
| 88
| 29.1
| 0.88046
| 0.421821
| 0
| 0.294118
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.117647
| 1
| 0.176471
| false
| 0
| 0.176471
| 0
| 0.588235
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
166f10041a007d09adb3797f8fd4bf54942b5eeb
| 1,513
|
py
|
Python
|
prelude/monads.py
|
michel-slm/python-prelude
|
b3ca89ff2bf150f772764f59d2796d2fcce1013d
|
[
"MIT"
] | 2
|
2015-05-12T16:12:56.000Z
|
2020-08-26T20:52:47.000Z
|
prelude/monads.py
|
michel-slm/python-prelude
|
b3ca89ff2bf150f772764f59d2796d2fcce1013d
|
[
"MIT"
] | null | null | null |
prelude/monads.py
|
michel-slm/python-prelude
|
b3ca89ff2bf150f772764f59d2796d2fcce1013d
|
[
"MIT"
] | null | null | null |
from abc import ABCMeta, abstractmethod
from prelude.typeclasses import Monad
from prelude.decorators import monad_eq, singleton
@monad_eq
class Either(Monad):
__metaclass__ = ABCMeta
@classmethod
def mreturn(cls, val):
return Right(val)
@abstractmethod
def __iter__(self):
pass
class Left(Either):
def __init__(self, val):
self.__val = val
def __rshift__(self, f):
return self
def __iter__(self):
return iter([])
def __eq__(self, other):
return type(self) == type(other)
def __repr__(self):
return "Left({})".format(self.__val)
class Right(Either):
def __init__(self, val):
self.__val = val
def __rshift__(self, f):
return f(self.__val)
def __iter__(self):
yield self.__val
def __repr__(self):
return "Right({})".format(self.__val)
class Maybe(Monad):
__metaclass__ = ABCMeta
@classmethod
def mreturn(cls, val):
return Just(val)
@abstractmethod
def __iter__(self):
pass
@monad_eq
class Just(Maybe):
def __init__(self, val):
self.__val = val
def __rshift__(self, f):
return f(self.__val)
def __iter__(self):
yield self.__val
def __repr__(self):
return "Just({})".format(self.__val)
@singleton
class Nothing(Maybe):
def __rshift__(self, f):
return self
def __iter__(self):
return iter([])
def __repr__(self):
return "Nothing()"
| 18.9125
| 50
| 0.613351
| 179
| 1,513
| 4.608939
| 0.201117
| 0.110303
| 0.08
| 0.067879
| 0.591515
| 0.591515
| 0.513939
| 0.513939
| 0.513939
| 0.38303
| 0
| 0
| 0.276272
| 1,513
| 79
| 51
| 19.151899
| 0.753425
| 0
| 0
| 0.689655
| 0
| 0
| 0.022472
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.344828
| false
| 0.034483
| 0.051724
| 0.224138
| 0.758621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
16718d7813439bbbc33bc80e98b6e4741d2b5b6c
| 261
|
py
|
Python
|
foodx_devops_tools/azure/__init__.py
|
Food-X-Technologies/foodx_devops_tools
|
57d1bf1304d9c9a386eaffa427f9eb36c410c350
|
[
"MIT"
] | 3
|
2021-06-23T20:53:43.000Z
|
2022-01-26T14:19:43.000Z
|
foodx_devops_tools/azure/__init__.py
|
Food-X-Technologies/foodx_devops_tools
|
57d1bf1304d9c9a386eaffa427f9eb36c410c350
|
[
"MIT"
] | 33
|
2021-08-09T15:44:51.000Z
|
2022-03-03T18:28:02.000Z
|
foodx_devops_tools/azure/__init__.py
|
Food-X-Technologies/foodx_devops_tools
|
57d1bf1304d9c9a386eaffa427f9eb36c410c350
|
[
"MIT"
] | 1
|
2021-06-23T20:53:52.000Z
|
2021-06-23T20:53:52.000Z
|
# Copyright (c) 2021 Food-X Technologies
#
# This file is part of foodx_devops_tools.
#
# You should have received a copy of the MIT License along with
# foodx_devops_tools. If not, see <https://opensource.org/licenses/MIT>.
"""Azure related utilities."""
| 29
| 73
| 0.731801
| 40
| 261
| 4.675
| 0.875
| 0.117647
| 0.171123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018265
| 0.16092
| 261
| 8
| 74
| 32.625
| 0.835616
| 0.927203
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
1680693e61459262ca19480a0c2453b04b05a5a0
| 992
|
py
|
Python
|
StorageSystem.py
|
aaronFritz2302/ZoomAuto
|
41af90dc35104bfea970b6b61694e105a625535c
|
[
"MIT"
] | null | null | null |
StorageSystem.py
|
aaronFritz2302/ZoomAuto
|
41af90dc35104bfea970b6b61694e105a625535c
|
[
"MIT"
] | null | null | null |
StorageSystem.py
|
aaronFritz2302/ZoomAuto
|
41af90dc35104bfea970b6b61694e105a625535c
|
[
"MIT"
] | null | null | null |
import sqlite3
from pandas import DataFrame
conn = sqlite3.connect('./data.db',check_same_thread=False)
class DataBase():
cursor = conn.cursor()
def __init__(self):
self.createTable()
def createTable(self):
'''
Creates A Table If it Doesnt Exist
'''
conn.execute("""CREATE TABLE IF NOT EXISTS MeetingData (Name text,ID text,Password text, DateTime text,Audio text,Video Text)""")
def enterData(self,meetingData):
'''
Enters Data From The UI Table To The DataBase
'''
meetingData.to_sql('MeetingData', con = conn, if_exists='replace', index = False)
def readData(self):
'''
Reads Data From The SQL DataBase
'''
self.cursor.execute('''SELECT * FROM MeetingData''')
retVal = DataFrame(self.cursor.fetchall(),columns=['Name','ID','Password','DateTime','Audio','Video'])
return retVal
| 32
| 138
| 0.582661
| 108
| 992
| 5.277778
| 0.509259
| 0.024561
| 0.038596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002865
| 0.296371
| 992
| 31
| 139
| 32
| 0.813754
| 0.113911
| 0
| 0
| 0
| 0.066667
| 0.247436
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0.133333
| 0.133333
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
16a2ce4183cf617439f69c8fd39f2dded2cf7d88
| 180
|
py
|
Python
|
analisador_sintatico/blueprints/api/parsers.py
|
viniciusandd/uri-analisador-sintatico
|
b347f4293e4c60bd3b2c838c8cef0d75db2c0bec
|
[
"MIT"
] | null | null | null |
analisador_sintatico/blueprints/api/parsers.py
|
viniciusandd/uri-analisador-sintatico
|
b347f4293e4c60bd3b2c838c8cef0d75db2c0bec
|
[
"MIT"
] | null | null | null |
analisador_sintatico/blueprints/api/parsers.py
|
viniciusandd/uri-analisador-sintatico
|
b347f4293e4c60bd3b2c838c8cef0d75db2c0bec
|
[
"MIT"
] | null | null | null |
from flask_restful import reqparse
def retornar_parser():
parser = reqparse.RequestParser()
parser.add_argument('sentenca', type=str, required=True)
return parser
| 25.714286
| 64
| 0.738889
| 21
| 180
| 6.190476
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172222
| 180
| 6
| 65
| 30
| 0.872483
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
16a7758cb5092239aa048ae598f5849367159b11
| 647
|
py
|
Python
|
src/controllers/__init__.py
|
TonghanWang/NDQ
|
575f2e243bac1a567c072dbea8e093aaa4959511
|
[
"Apache-2.0"
] | 63
|
2020-02-23T09:37:15.000Z
|
2022-01-17T01:30:50.000Z
|
src/controllers/__init__.py
|
fringsoo/NDQ
|
e243ba917e331065e82c6634cb1d756873747be5
|
[
"Apache-2.0"
] | 14
|
2020-04-20T02:20:11.000Z
|
2022-03-12T00:16:33.000Z
|
src/controllers/__init__.py
|
mig-zh/NDQ
|
5720e3e8b529724e8d96a9a24c73bca24a11e7f9
|
[
"Apache-2.0"
] | 16
|
2020-03-12T02:57:52.000Z
|
2021-11-27T13:07:08.000Z
|
from .basic_controller import BasicMAC
from .cate_broadcast_comm_controller import CateBCommMAC
from .cate_broadcast_comm_controller_full import CateBCommFMAC
from .cate_broadcast_comm_controller_not_IB import CateBCommNIBMAC
from .tar_comm_controller import TarCommMAC
from .cate_pruned_broadcast_comm_controller import CatePBCommMAC
REGISTRY = {"basic_mac": BasicMAC,
"cate_broadcast_comm_mac": CateBCommMAC,
"cate_broadcast_comm_mac_full": CateBCommFMAC,
"cate_broadcast_comm_mac_not_IB": CateBCommNIBMAC,
"tar_comm_mac": TarCommMAC,
"cate_pruned_broadcast_comm_mac": CatePBCommMAC}
| 46.214286
| 66
| 0.797527
| 75
| 647
| 6.373333
| 0.266667
| 0.217573
| 0.213389
| 0.131799
| 0.194561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153014
| 647
| 13
| 67
| 49.769231
| 0.872263
| 0
| 0
| 0
| 0
| 0
| 0.204019
| 0.171561
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
16abab9c314c051765ffd991fb6c764e6cf24cb5
| 235
|
py
|
Python
|
solutions/pic_search/webserver/src/service/theardpool.py
|
naetimus/bootcamp
|
0182992df7c54012944b51fe9b70532ab6a0059b
|
[
"Apache-2.0"
] | 1
|
2020-03-10T07:43:08.000Z
|
2020-03-10T07:43:08.000Z
|
solutions/pic_search/webserver/src/service/theardpool.py
|
naetimus/bootcamp
|
0182992df7c54012944b51fe9b70532ab6a0059b
|
[
"Apache-2.0"
] | null | null | null |
solutions/pic_search/webserver/src/service/theardpool.py
|
naetimus/bootcamp
|
0182992df7c54012944b51fe9b70532ab6a0059b
|
[
"Apache-2.0"
] | 1
|
2020-04-03T05:24:47.000Z
|
2020-04-03T05:24:47.000Z
|
import threading
from concurrent.futures import ThreadPoolExecutor
from service.train import do_train
def thread_runner(thread_num, func, *args):
executor = ThreadPoolExecutor(thread_num)
f = executor.submit(do_train, *args)
| 26.111111
| 49
| 0.795745
| 30
| 235
| 6.066667
| 0.6
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131915
| 235
| 8
| 50
| 29.375
| 0.892157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
16bf36b1dcc9b129dcd361097fbc1ea1ea920674
| 1,654
|
py
|
Python
|
pytudes/_2021/educative/grokking_the_coding_interview/fast_and_slow_pointers/_1__linked_list_cycle__easy.py
|
TeoZosa/pytudes
|
4f01ab20f936bb4b3f42d1946180d4a20fd95fbf
|
[
"Apache-2.0"
] | 1
|
2022-02-08T09:47:35.000Z
|
2022-02-08T09:47:35.000Z
|
pytudes/_2021/educative/grokking_the_coding_interview/fast_and_slow_pointers/_1__linked_list_cycle__easy.py
|
TeoZosa/pytudes
|
4f01ab20f936bb4b3f42d1946180d4a20fd95fbf
|
[
"Apache-2.0"
] | 62
|
2021-04-02T23:41:16.000Z
|
2022-03-25T13:16:10.000Z
|
pytudes/_2021/educative/grokking_the_coding_interview/fast_and_slow_pointers/_1__linked_list_cycle__easy.py
|
TeoZosa/pytudes
|
4f01ab20f936bb4b3f42d1946180d4a20fd95fbf
|
[
"Apache-2.0"
] | null | null | null |
"""https://www.educative.io/courses/grokking-the-coding-interview/N7rwVyAZl6D
Categories:
- Binary
- Bit Manipulation
- Blind 75
See Also:
- pytudes/_2021/leetcode/blind_75/linked_list/_141__linked_list_cycle__easy.py
"""
from pytudes._2021.utils.linked_list import (
ListNode,
NodeType,
convert_list_to_linked_list,
)
def has_cycle(head: NodeType) -> bool:
"""
Args:
head: head of a singly-linked list of nodes
Returns:
whether or not the linked list has a cycle
Examples:
>>> has_cycle(None)
False
>>> head = ListNode("self-edge")
>>> head.next = head
>>> has_cycle(head)
True
>>> head = convert_list_to_linked_list([1,2,3,4,5,6])
>>> has_cycle(head)
False
>>> head.next.next.next.next.next.next = head.next.next
>>> has_cycle(head)
True
>>> head.next.next.next.next.next.next = head.next.next.next
>>> has_cycle(head)
True
"""
slow = fast = head
while fast is not None and fast.next is not None: # since fast ≥ slow
slow = slow.next
fast = fast.next.next
if slow == fast:
return True # found the cycle
else:
return False
def main():
head = convert_list_to_linked_list([1, 2, 3, 4, 5, 6])
print("LinkedList has cycle: " + str(has_cycle(head)))
head.next.next.next.next.next.next = head.next.next
print("LinkedList has cycle: " + str(has_cycle(head)))
head.next.next.next.next.next.next = head.next.next.next
print("LinkedList has cycle: " + str(has_cycle(head)))
main()
| 23.971014
| 82
| 0.605804
| 228
| 1,654
| 4.254386
| 0.328947
| 0.22268
| 0.22268
| 0.197938
| 0.431959
| 0.391753
| 0.358763
| 0.358763
| 0.358763
| 0.358763
| 0
| 0.023908
| 0.266626
| 1,654
| 68
| 83
| 24.323529
| 0.774938
| 0.484281
| 0
| 0.136364
| 0
| 0
| 0.087883
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.045455
| 0
| 0.227273
| 0.136364
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
16c04b9a7be2241d66d21a6886d268026e2fdc89
| 258
|
py
|
Python
|
metric/metric.py
|
riven314/ENetDepth_TimeAnlysis_Tmp
|
29bd864adf91700799d87b449d0c4e389f7028bc
|
[
"MIT"
] | null | null | null |
metric/metric.py
|
riven314/ENetDepth_TimeAnlysis_Tmp
|
29bd864adf91700799d87b449d0c4e389f7028bc
|
[
"MIT"
] | null | null | null |
metric/metric.py
|
riven314/ENetDepth_TimeAnlysis_Tmp
|
29bd864adf91700799d87b449d0c4e389f7028bc
|
[
"MIT"
] | null | null | null |
class Metric(object):
"""Base class for all metrics.
From: https://github.com/pytorch/tnt/blob/master/torchnet/meter/meter.py
"""
def reset(self):
pass
def add(self):
pass
def value(self):
pass
| 18.428571
| 77
| 0.554264
| 32
| 258
| 4.46875
| 0.75
| 0.167832
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.321705
| 258
| 13
| 78
| 19.846154
| 0.817143
| 0.391473
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
16df196ac8b1d19487d9f38ab432516956acf44f
| 13,440
|
py
|
Python
|
test.py
|
UnKafkaesque/Sentiment-Analysis
|
bd8517420534bcfe76f2f60a4f178d1dac540075
|
[
"MIT"
] | null | null | null |
test.py
|
UnKafkaesque/Sentiment-Analysis
|
bd8517420534bcfe76f2f60a4f178d1dac540075
|
[
"MIT"
] | null | null | null |
test.py
|
UnKafkaesque/Sentiment-Analysis
|
bd8517420534bcfe76f2f60a4f178d1dac540075
|
[
"MIT"
] | null | null | null |
import os
import sys
import time
import traceback
import project1_Copy as p1
import numpy as np
verbose = False
def green(s):
return '\033[1;32m%s\033[m' % s
def yellow(s):
return '\033[1;33m%s\033[m' % s
def red(s):
return '\033[1;31m%s\033[m' % s
def log(*m):
print(" ".join(map(str, m)))
def log_exit(*m):
log(red("ERROR:"), *m)
exit(1)
def check_real(ex_name, f, exp_res, *args):
try:
res = f(*args)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not np.isreal(res):
log(red("FAIL"), ex_name, ": does not return a real number, type: ", type(res))
return True
if res != exp_res:
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def equals(x, y):
if type(y) == np.ndarray:
return (x == y).all()
return x == y
def check_tuple(ex_name, f, exp_res, *args, **kwargs):
try:
res = f(*args, **kwargs)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not type(res) == tuple:
log(red("FAIL"), ex_name, ": does not return a tuple, type: ", type(res))
return True
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected a tuple of size ", len(exp_res), " but got tuple of size", len(res))
return True
if not all(equals(x, y) for x, y in zip(res, exp_res)):
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def check_array(ex_name, f, exp_res, *args):
try:
res = f(*args)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not type(res) == np.ndarray:
log(red("FAIL"), ex_name, ": does not return a numpy array, type: ", type(res))
return True
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected an array of shape ", exp_res.shape, " but got array of shape", res.shape)
return True
if not all(equals(x, y) for x, y in zip(res, exp_res)):
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def check_list(ex_name, f, exp_res, *args):
try:
res = f(*args)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return True
if not type(res) == list:
log(red("FAIL"), ex_name, ": does not return a list, type: ", type(res))
return True
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected a list of size ", len(exp_res), " but got list of size", len(res))
return True
if not all(equals(x, y) for x, y in zip(res, exp_res)):
log(red("FAIL"), ex_name, ": incorrect answer. Expected", exp_res, ", got: ", res)
return True
def check_get_order():
ex_name = "Get order"
if check_list(
ex_name, p1.get_order,
[0], 1):
log("You should revert `get_order` to its original implementation for this test to pass")
return
if check_list(
ex_name, p1.get_order,
[1, 0], 2):
log("You should revert `get_order` to its original implementation for this test to pass")
return
log(green("PASS"), ex_name, "")
def check_hinge_loss_single():
ex_name = "Hinge loss single"
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -0.2
exp_res = 1 - 0.8
if check_real(
ex_name, p1.hinge_loss_single,
exp_res, feature_vector, label, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_hinge_loss_full():
ex_name = "Hinge loss full"
feature_vector = np.array([[1, 2], [1, 2]])
label, theta, theta_0 = np.array([1, 1]), np.array([-1, 1]), -0.2
exp_res = 1 - 0.8
if check_real(
ex_name, p1.hinge_loss_full,
exp_res, feature_vector, label, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_perceptron_single_update():
ex_name = "Perceptron single update"
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -1.5
exp_res = (np.array([0, 3]), -0.5)
if check_tuple(
ex_name, p1.perceptron_single_step_update,
exp_res, feature_vector, label, theta, theta_0):
return
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -1
exp_res = (np.array([0, 3]), 0)
if check_tuple(
ex_name + " (boundary case)", p1.perceptron_single_step_update,
exp_res, feature_vector, label, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_perceptron():
ex_name = "Perceptron"
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 1
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 1
exp_res = (np.array([0, 2]), 2)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 2
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 2
exp_res = (np.array([0, 2]), 2)
if check_tuple(
ex_name, p1.perceptron,
exp_res, feature_matrix, labels, T):
return
log(green("PASS"), ex_name, "")
def check_average_perceptron():
ex_name = "Average perceptron"
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 1
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 1
exp_res = (np.array([-0.5, 1]), 1.5)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 2
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
feature_matrix = np.array([[1, 2], [-1, 0]])
labels = np.array([1, 1])
T = 2
exp_res = (np.array([-0.25, 1.5]), 1.75)
if check_tuple(
ex_name, p1.average_perceptron,
exp_res, feature_matrix, labels, T):
return
log(green("PASS"), ex_name, "")
def check_pegasos_single_update():
ex_name = "Pegasos single update"
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -1.5
L = 0.2
eta = 0.1
exp_res = (np.array([-0.88, 1.18]), -1.4)
if check_tuple(
ex_name, p1.pegasos_single_step_update,
exp_res,
feature_vector, label, L, eta, theta, theta_0):
return
feature_vector = np.array([1, 1])
label, theta, theta_0 = 1, np.array([-1, 1]), 1
L = 0.2
eta = 0.1
exp_res = (np.array([-0.88, 1.08]), 1.1)
if check_tuple(
ex_name + " (boundary case)", p1.pegasos_single_step_update,
exp_res,
feature_vector, label, L, eta, theta, theta_0):
return
feature_vector = np.array([1, 2])
label, theta, theta_0 = 1, np.array([-1, 1]), -2
L = 0.2
eta = 0.1
exp_res = (np.array([-0.88, 1.18]), -1.9)
if check_tuple(
ex_name, p1.pegasos_single_step_update,
exp_res,
feature_vector, label, L, eta, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_pegasos():
ex_name = "Pegasos"
feature_matrix = np.array([[1, 2]])
labels = np.array([1])
T = 1
L = 0.2
exp_res = (np.array([1, 2]), 1)
if check_tuple(
ex_name, p1.pegasos,
exp_res, feature_matrix, labels, T, L):
return
feature_matrix = np.array([[1, 1], [1, 1]])
labels = np.array([1, 1])
T = 1
L = 1
exp_res = (np.array([1-1/np.sqrt(2), 1-1/np.sqrt(2)]), 1)
if check_tuple(
ex_name, p1.pegasos,
exp_res, feature_matrix, labels, T, L):
return
log(green("PASS"), ex_name, "")
def check_classify():
ex_name = "Classify"
feature_matrix = np.array([[1, 1], [1, 1], [1, 1]])
theta = np.array([1, 1])
theta_0 = 0
exp_res = np.array([1, 1, 1])
if check_array(
ex_name, p1.classify,
exp_res, feature_matrix, theta, theta_0):
return
feature_matrix = np.array([[-1, 1]])
theta = np.array([1, 1])
theta_0 = 0
exp_res = np.array([-1])
if check_array(
ex_name + " (boundary case)", p1.classify,
exp_res, feature_matrix, theta, theta_0):
return
log(green("PASS"), ex_name, "")
def check_classifier_accuracy():
ex_name = "Classifier accuracy"
train_feature_matrix = np.array([[1, 0], [1, -1], [2, 3]])
val_feature_matrix = np.array([[1, 1], [2, -1]])
train_labels = np.array([1, -1, 1])
val_labels = np.array([-1, 1])
exp_res = 1, 0
T=1
if check_tuple(
ex_name, p1.classifier_accuracy,
exp_res,
p1.perceptron,
train_feature_matrix, val_feature_matrix,
train_labels, val_labels,
T=T):
return
train_feature_matrix = np.array([[1, 0], [1, -1], [2, 3]])
val_feature_matrix = np.array([[1, 1], [2, -1]])
train_labels = np.array([1, -1, 1])
val_labels = np.array([-1, 1])
exp_res = 1, 0
T=1
L=0.2
if check_tuple(
ex_name, p1.classifier_accuracy,
exp_res,
p1.pegasos,
train_feature_matrix, val_feature_matrix,
train_labels, val_labels,
T=T, L=L):
return
log(green("PASS"), ex_name, "")
def check_bag_of_words():
ex_name = "Bag of words"
texts = [
"He loves to walk on the beach",
"There is nothing better"]
try:
res = p1.bag_of_words(texts)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return
if not type(res) == dict:
log(red("FAIL"), ex_name, ": does not return a tuple, type: ", type(res))
return
vals = sorted(res.values())
exp_vals = list(range(len(res.keys())))
if not vals == exp_vals:
log(red("FAIL"), ex_name, ": wrong set of indices. Expected: ", exp_vals, " got ", vals)
return
log(green("PASS"), ex_name, "")
keys = sorted(res.keys())
exp_keys = ['beach', 'better', 'he', 'is', 'loves', 'nothing', 'on', 'the', 'there', 'to', 'walk']
stop_keys = ['beach', 'better', 'loves', 'nothing', 'walk']
if keys == exp_keys:
log(yellow("WARN"), ex_name, ": does not remove stopwords:", [k for k in keys if k not in stop_keys])
elif keys == stop_keys:
log(green("PASS"), ex_name, " stopwords removed")
else:
log(red("FAIL"), ex_name, ": keys are missing:", [k for k in stop_keys if k not in keys], " or are not unexpected:", [k for k in keys if k not in stop_keys])
def check_extract_bow_feature_vectors():
ex_name = "Extract bow feature vectors"
texts = [
"He loves her ",
"He really really loves her"]
keys = ["he", "loves", "her", "really"]
dictionary = {k:i for i, k in enumerate(keys)}
exp_res = np.array(
[[1, 1, 1, 0],
[1, 1, 1, 1]])
non_bin_res = np.array(
[[1, 1, 1, 0],
[1, 1, 1, 2]])
try:
res = p1.extract_bow_feature_vectors(texts, dictionary)
except NotImplementedError:
log(red("FAIL"), ex_name, ": not implemented")
return
if not type(res) == np.ndarray:
log(red("FAIL"), ex_name, ": does not return a numpy array, type: ", type(res))
return
if not len(res) == len(exp_res):
log(red("FAIL"), ex_name, ": expected an array of shape ", exp_res.shape, " but got array of shape", res.shape)
return
log(green("PASS"), ex_name)
if (res == exp_res).all():
log(yellow("WARN"), ex_name, ": uses binary indicators as features")
elif (res == non_bin_res).all():
log(green("PASS"), ex_name, ": correct non binary features")
else:
log(red("FAIL"), ex_name, ": unexpected feature matrix")
return
def main():
log(green("PASS"), "Import project1")
try:
check_get_order()
check_hinge_loss_single()
check_hinge_loss_full()
check_perceptron_single_update()
check_perceptron()
check_average_perceptron()
check_pegasos_single_update()
check_pegasos()
check_classify()
check_classifier_accuracy()
check_bag_of_words()
check_extract_bow_feature_vectors()
except Exception:
log_exit(traceback.format_exc())
if __name__ == "__main__":
main()
| 29.154013
| 165
| 0.564658
| 1,974
| 13,440
| 3.659574
| 0.090172
| 0.064784
| 0.063123
| 0.03613
| 0.780592
| 0.743217
| 0.710963
| 0.691999
| 0.669574
| 0.651163
| 0
| 0.03633
| 0.283185
| 13,440
| 460
| 166
| 29.217391
| 0.713515
| 0
| 0
| 0.629243
| 0
| 0
| 0.12619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060052
| false
| 0.044386
| 0.018277
| 0.007833
| 0.208877
| 0.002611
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
16ebc077aad6a4dd684131dc7271bbdbd5696af9
| 743
|
py
|
Python
|
test.py
|
sbcshop/PiRelay-8
|
4d881f259c07cd4fdf3c57431feb1587aaa0e861
|
[
"MIT"
] | 2
|
2021-09-07T03:25:00.000Z
|
2021-09-07T17:28:46.000Z
|
test.py
|
sbcshop/PiRelay-8
|
4d881f259c07cd4fdf3c57431feb1587aaa0e861
|
[
"MIT"
] | null | null | null |
test.py
|
sbcshop/PiRelay-8
|
4d881f259c07cd4fdf3c57431feb1587aaa0e861
|
[
"MIT"
] | null | null | null |
from PiRelay8 import Relay
import time
r1 = Relay("RELAY1")
r2 = Relay("RELAY2")
r3 = Relay("RELAY3")
r4 = Relay("RELAY4")
r5 = Relay("RELAY5")
r6 = Relay("RELAY6")
r7 = Relay("RELAY7")
r8 = Relay("RELAY8")
r1.off()
r2.off()
r3.off()
r4.off()
r5.off()
r6.off()
r7.off()
r8.off()
r1.on()
time.sleep(0.5)
r1.off()
time.sleep(0.5)
r2.on()
time.sleep(0.5)
r2.off()
time.sleep(0.5)
r3.on()
time.sleep(0.5)
r3.off()
time.sleep(0.5)
r4.on()
time.sleep(0.5)
r4.off()
time.sleep(0.5)
r5.on()
time.sleep(0.5)
r5.off()
time.sleep(0.5)
r6.on()
time.sleep(0.5)
r6.off()
time.sleep(0.5)
r7.on()
time.sleep(0.5)
r7.off()
time.sleep(0.5)
r8.on()
time.sleep(0.5)
r8.off()
time.sleep(0.5)
| 11.983871
| 27
| 0.572005
| 142
| 743
| 2.992958
| 0.183099
| 0.338824
| 0.376471
| 0.414118
| 0.574118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121464
| 0.191117
| 743
| 61
| 28
| 12.180328
| 0.585691
| 0
| 0
| 0.64
| 0
| 0
| 0.070381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
bc4dbd4536189e5c83cb95261570c971ee7df77f
| 336
|
py
|
Python
|
ghostwriter/rolodex/apps.py
|
bbhunter/Ghostwriter
|
1b684ddd119feed9891e83b39c9b314b41d086ca
|
[
"BSD-3-Clause"
] | 601
|
2019-07-30T17:06:37.000Z
|
2022-03-31T00:55:31.000Z
|
ghostwriter/rolodex/apps.py
|
chrismaddalena/Ghostwriter
|
5a938358450cd0e69a42883b1b18e067644744a8
|
[
"BSD-3-Clause"
] | 150
|
2019-08-01T07:20:22.000Z
|
2022-03-29T19:18:02.000Z
|
ghostwriter/rolodex/apps.py
|
chrismaddalena/Ghostwriter
|
5a938358450cd0e69a42883b1b18e067644744a8
|
[
"BSD-3-Clause"
] | 126
|
2019-07-30T17:42:49.000Z
|
2022-03-21T20:43:35.000Z
|
"""This contains the configuration of the Rolodex application."""
# Django Imports
from django.apps import AppConfig
class RolodexConfig(AppConfig):
name = "ghostwriter.rolodex"
def ready(self):
try:
import ghostwriter.rolodex.signals # noqa F401 isort:skip
except ImportError:
pass
| 22.4
| 70
| 0.672619
| 36
| 336
| 6.277778
| 0.805556
| 0.159292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011952
| 0.252976
| 336
| 14
| 71
| 24
| 0.888446
| 0.285714
| 0
| 0
| 0
| 0
| 0.081545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.125
| 0.375
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 3
|
bc5478846dead2384e17349d8f75968c543992de
| 407
|
py
|
Python
|
pkg/maths/maths.py
|
prateekdegaons1991/experiment-loadtest
|
b53c70fac5b2f7d37df77844b26f79741c74c1b6
|
[
"Apache-2.0"
] | 8
|
2020-04-17T06:34:30.000Z
|
2021-12-18T10:54:50.000Z
|
pkg/maths/maths.py
|
oumkale/test-python
|
1f3d3e42ffbe1bf5ed9df8a0c6038e50129b2c4d
|
[
"Apache-2.0"
] | 15
|
2020-04-18T06:01:53.000Z
|
2022-02-15T08:56:25.000Z
|
pkg/maths/maths.py
|
oumkale/test-python
|
1f3d3e42ffbe1bf5ed9df8a0c6038e50129b2c4d
|
[
"Apache-2.0"
] | 12
|
2020-04-17T05:14:27.000Z
|
2022-03-29T19:24:20.000Z
|
#Atoi stands for ASCII to Integer Conversion
def atoi(string):
res = 0
# Iterate through all characters of
# input and update result
for i in range(len(string)):
res = res * 10 + (ord(string[i]) - ord('0'))
return res
#Adjustment contains rule of three for calculating an integer given another integer representing a percentage
def Adjustment(a, b):
return (a * b) / 100
| 27.133333
| 109
| 0.673219
| 60
| 407
| 4.566667
| 0.666667
| 0.065693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022654
| 0.240786
| 407
| 14
| 110
| 29.071429
| 0.864078
| 0.515971
| 0
| 0
| 0
| 0
| 0.005181
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
bc5b677ed37d940fc02b036d43d53f7c6322c3f1
| 599
|
py
|
Python
|
losses/all_lost.py
|
Liudzz/loss-chapter
|
22359b92ca5e155d5af32ef2f22eeddf0483b947
|
[
"Apache-2.0"
] | 2
|
2020-07-07T00:03:31.000Z
|
2020-07-08T09:58:48.000Z
|
losses/all_lost.py
|
Liudzz/loss-chapter
|
22359b92ca5e155d5af32ef2f22eeddf0483b947
|
[
"Apache-2.0"
] | null | null | null |
losses/all_lost.py
|
Liudzz/loss-chapter
|
22359b92ca5e155d5af32ef2f22eeddf0483b947
|
[
"Apache-2.0"
] | 2
|
2020-07-08T09:58:56.000Z
|
2020-07-11T13:43:53.000Z
|
"""
easy way to use losses
"""
from center_loss import Centerloss
import torch.nn as nn
from FocalLoss import FocalLoss
def center_loss(pred,label,num_calss,feature):
loss = Centerloss(num_calss,feature)
return loss(pred,label)
def Focal_loss(pred,label,num_calss,alaph=None, gamma):
loss = Centerloss(num_calss,gamma)
return loss(pred,label)
def L1_loss(pred,label):
loss = nn.L1Loss(pred,label)
return loss
def L2_loss(pred,label):
loss = nn.MSELoss(pred,label)
return loss
def SmoothL1_loss(pred,label):
loss = nn.SmoothL1Loss(pred,label)
return loss
| 22.185185
| 55
| 0.729549
| 91
| 599
| 4.692308
| 0.340659
| 0.210773
| 0.213115
| 0.119438
| 0.437939
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01002
| 0.166945
| 599
| 27
| 56
| 22.185185
| 0.845691
| 0
| 0
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.166667
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
bc6e8fa55969e186c06ce2946db2244dfbf09a10
| 7,334
|
py
|
Python
|
statsmodels/discrete/tests/test_conditional.py
|
porcpine1967/statsmodels
|
db4900056d80732ffff2733454fac88781ced8d2
|
[
"BSD-3-Clause"
] | null | null | null |
statsmodels/discrete/tests/test_conditional.py
|
porcpine1967/statsmodels
|
db4900056d80732ffff2733454fac88781ced8d2
|
[
"BSD-3-Clause"
] | null | null | null |
statsmodels/discrete/tests/test_conditional.py
|
porcpine1967/statsmodels
|
db4900056d80732ffff2733454fac88781ced8d2
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from statsmodels.discrete.conditional_models import (
ConditionalLogit, ConditionalPoisson)
from statsmodels.tools.numdiff import approx_fprime
from numpy.testing import assert_allclose
import pandas as pd
def test_logit_1d():
y = np.r_[0, 1, 0, 1, 0, 1, 0, 1, 1, 1]
g = np.r_[0, 0, 0, 1, 1, 1, 2, 2, 2, 2]
x = np.r_[0, 1, 0, 0, 1, 1, 0, 0, 1, 0]
x = x[:, None]
model = ConditionalLogit(y, x, groups=g)
# Check the gradient for the denominator of the partial likelihood
for x in -1, 0, 1, 2:
params = np.r_[x, ]
_, grad = model._denom_grad(0, params)
ngrad = approx_fprime(params, lambda x: model._denom(0, x))
assert_allclose(grad, ngrad)
# Check the gradient for the loglikelihood
for x in -1, 0, 1, 2:
grad = approx_fprime(np.r_[x, ], model.loglike)
score = model.score(np.r_[x, ])
assert_allclose(grad, score, rtol=1e-4)
result = model.fit()
# From Stata
assert_allclose(result.params, np.r_[0.9272407], rtol=1e-5)
assert_allclose(result.bse, np.r_[1.295155], rtol=1e-5)
def test_logit_2d():
y = np.r_[0, 1, 0, 1, 0, 1, 0, 1, 1, 1]
g = np.r_[0, 0, 0, 1, 1, 1, 2, 2, 2, 2]
x1 = np.r_[0, 1, 0, 0, 1, 1, 0, 0, 1, 0]
x2 = np.r_[0, 0, 1, 0, 0, 1, 0, 1, 1, 1]
x = np.empty((10, 2))
x[:, 0] = x1
x[:, 1] = x2
model = ConditionalLogit(y, x, groups=g)
# Check the gradient for the denominator of the partial likelihood
for x in -1, 0, 1, 2:
params = np.r_[x, -1.5*x]
_, grad = model._denom_grad(0, params)
ngrad = approx_fprime(params, lambda x: model._denom(0, x))
assert_allclose(grad, ngrad, rtol=1e-5)
# Check the gradient for the loglikelihood
for x in -1, 0, 1, 2:
params = np.r_[-0.5*x, 0.5*x]
grad = approx_fprime(params, model.loglike)
score = model.score(params)
assert_allclose(grad, score, rtol=1e-4)
result = model.fit()
# From Stata
assert_allclose(result.params, np.r_[1.011074, 1.236758], rtol=1e-3)
assert_allclose(result.bse, np.r_[1.420784, 1.361738], rtol=1e-5)
result.summary()
def test_formula():
for j in 0, 1:
np.random.seed(34234)
n = 200
y = np.random.randint(0, 2, size=n)
x1 = np.random.normal(size=n)
x2 = np.random.normal(size=n)
g = np.random.randint(0, 25, size=n)
x = np.hstack((x1[:, None], x2[:, None]))
if j == 0:
model1 = ConditionalLogit(y, x, groups=g)
else:
model1 = ConditionalPoisson(y, x, groups=g)
result1 = model1.fit()
df = pd.DataFrame({"y": y, "x1": x1, "x2": x2, "g": g})
if j == 0:
model2 = ConditionalLogit.from_formula(
"y ~ 0 + x1 + x2", groups="g", data=df)
else:
model2 = ConditionalPoisson.from_formula(
"y ~ 0 + x1 + x2", groups="g", data=df)
result2 = model2.fit()
assert_allclose(result1.params, result2.params, rtol=1e-5)
assert_allclose(result1.bse, result2.bse, rtol=1e-5)
assert_allclose(result1.cov_params(), result2.cov_params(), rtol=1e-5)
assert_allclose(result1.tvalues, result2.tvalues, rtol=1e-5)
def test_poisson_1d():
y = np.r_[3, 1, 1, 4, 5, 2, 0, 1, 6, 2]
g = np.r_[0, 0, 0, 0, 1, 1, 1, 1, 1, 1]
x = np.r_[0, 1, 0, 0, 1, 1, 0, 0, 1, 0]
x = x[:, None]
model = ConditionalPoisson(y, x, groups=g)
# Check the gradient for the loglikelihood
for x in -1, 0, 1, 2:
grad = approx_fprime(np.r_[x, ], model.loglike)
score = model.score(np.r_[x, ])
assert_allclose(grad, score, rtol=1e-4)
result = model.fit()
# From Stata
assert_allclose(result.params, np.r_[0.6466272], rtol=1e-4)
assert_allclose(result.bse, np.r_[0.4170918], rtol=1e-5)
def test_poisson_2d():
y = np.r_[3, 1, 4, 8, 2, 5, 4, 7, 2, 6]
g = np.r_[0, 0, 0, 1, 1, 1, 2, 2, 2, 2]
x1 = np.r_[0, 1, 0, 0, 1, 1, 0, 0, 1, 0]
x2 = np.r_[2, 1, 0, 0, 1, 2, 3, 2, 0, 1]
x = np.empty((10, 2))
x[:, 0] = x1
x[:, 1] = x2
model = ConditionalPoisson(y, x, groups=g)
# Check the gradient for the loglikelihood
for x in -1, 0, 1, 2:
params = np.r_[-0.5*x, 0.5*x]
grad = approx_fprime(params, model.loglike)
score = model.score(params)
assert_allclose(grad, score, rtol=1e-4)
result = model.fit()
# From Stata
assert_allclose(result.params, np.r_[-.9478957, -.0134279], rtol=1e-3)
assert_allclose(result.bse, np.r_[.3874942, .1686712], rtol=1e-5)
result.summary()
def test_lasso_logistic():
np.random.seed(3423948)
n = 200
groups = np.arange(10)
groups = np.kron(groups, np.ones(n // 10))
group_effects = np.random.normal(size=10)
group_effects = np.kron(group_effects, np.ones(n // 10))
x = np.random.normal(size=(n, 4))
params = np.r_[0, 0, 1, 0]
lin_pred = np.dot(x, params) + group_effects
mean = 1 / (1 + np.exp(-lin_pred))
y = (np.random.uniform(size=n) < mean).astype(np.int)
model0 = ConditionalLogit(y, x, groups=groups)
result0 = model0.fit()
# Should be the same as model0
model1 = ConditionalLogit(y, x, groups=groups)
result1 = model1.fit_regularized(L1_wt=0, alpha=0)
assert_allclose(result0.params, result1.params, rtol=1e-3)
model2 = ConditionalLogit(y, x, groups=groups)
result2 = model2.fit_regularized(L1_wt=1, alpha=0.05)
# Rxegression test
assert_allclose(result2.params, np.r_[0, 0, 0.55235152, 0], rtol=1e-4)
# Test with formula
df = pd.DataFrame({"y": y, "x1": x[:, 0], "x2": x[:, 1], "x3": x[:, 2],
"x4": x[:, 3], "groups": groups})
fml = "y ~ 0 + x1 + x2 + x3 + x4"
model3 = ConditionalLogit.from_formula(fml, groups="groups", data=df)
result3 = model3.fit_regularized(L1_wt=1, alpha=0.05)
assert_allclose(result2.params, result3.params)
def test_lasso_poisson():
np.random.seed(342394)
n = 200
groups = np.arange(10)
groups = np.kron(groups, np.ones(n // 10))
group_effects = np.random.normal(size=10)
group_effects = np.kron(group_effects, np.ones(n // 10))
x = np.random.normal(size=(n, 4))
params = np.r_[0, 0, 1, 0]
lin_pred = np.dot(x, params) + group_effects
mean = np.exp(lin_pred)
y = np.random.poisson(mean)
model0 = ConditionalPoisson(y, x, groups=groups)
result0 = model0.fit()
# Should be the same as model0
model1 = ConditionalPoisson(y, x, groups=groups)
result1 = model1.fit_regularized(L1_wt=0, alpha=0)
assert_allclose(result0.params, result1.params, rtol=1e-3)
model2 = ConditionalPoisson(y, x, groups=groups)
result2 = model2.fit_regularized(L1_wt=1, alpha=0.2)
# Regression test
assert_allclose(result2.params, np.r_[0, 0, 0.91697508, 0], rtol=1e-4)
# Test with formula
df = pd.DataFrame({"y": y, "x1": x[:, 0], "x2": x[:, 1], "x3": x[:, 2],
"x4": x[:, 3], "groups": groups})
fml = "y ~ 0 + x1 + x2 + x3 + x4"
model3 = ConditionalPoisson.from_formula(fml, groups="groups", data=df)
result3 = model3.fit_regularized(L1_wt=1, alpha=0.2)
assert_allclose(result2.params, result3.params)
| 30.558333
| 78
| 0.587947
| 1,176
| 7,334
| 3.566327
| 0.120748
| 0.018598
| 0.019075
| 0.009537
| 0.802098
| 0.744874
| 0.694087
| 0.648307
| 0.646876
| 0.63114
| 0
| 0.097174
| 0.252113
| 7,334
| 239
| 79
| 30.686192
| 0.667457
| 0.063267
| 0
| 0.564103
| 0
| 0
| 0.01897
| 0
| 0
| 0
| 0
| 0
| 0.160256
| 1
| 0.044872
| false
| 0
| 0.032051
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
bc889aea13c53b5ac47e25b4727f37433f19b834
| 322
|
py
|
Python
|
src/pymortests/benchmarks.py
|
TiKeil/pymor
|
5c6b3b6e1714b5ede11ce7cf03399780ab29d252
|
[
"Unlicense"
] | 1
|
2021-08-17T15:55:12.000Z
|
2021-08-17T15:55:12.000Z
|
src/pymortests/benchmarks.py
|
TreeerT/pymor
|
e8b18d2d4c4b5998f0bd84f6728e365e0693b753
|
[
"Unlicense"
] | 4
|
2022-03-17T10:07:38.000Z
|
2022-03-30T12:41:06.000Z
|
src/pymortests/benchmarks.py
|
TreeerT/pymor
|
e8b18d2d4c4b5998f0bd84f6728e365e0693b753
|
[
"Unlicense"
] | null | null | null |
# This file is part of the pyMOR project (http://www.pymor.org).
# Copyright 2013-2020 pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from pymortests.base import runmodule
if __name__ == "__main__":
runmodule(filename=__file__)
| 32.2
| 77
| 0.76087
| 45
| 322
| 5.177778
| 0.777778
| 0.034335
| 0.085837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.130435
| 322
| 9
| 78
| 35.777778
| 0.796429
| 0.664596
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
bcc54625026e4e77ba54fe67d05a342fde131c90
| 185
|
py
|
Python
|
cluster/density/test.py
|
michealowen/MachingLearning
|
9dcc908f2d3e468390e5abb7f051b449b0ecb455
|
[
"Apache-2.0"
] | 2
|
2019-09-11T07:02:25.000Z
|
2020-12-17T10:40:02.000Z
|
cluster/density/test.py
|
michealowen/MachingLearning
|
9dcc908f2d3e468390e5abb7f051b449b0ecb455
|
[
"Apache-2.0"
] | null | null | null |
cluster/density/test.py
|
michealowen/MachingLearning
|
9dcc908f2d3e468390e5abb7f051b449b0ecb455
|
[
"Apache-2.0"
] | null | null | null |
class a:
def __init__(self,da):
self.da = da
return
def go(self):
dd()
return None
def dd():
print('ok')
return None
aa = a(1)
aa.go()
| 12.333333
| 26
| 0.475676
| 27
| 185
| 3.111111
| 0.518519
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00885
| 0.389189
| 185
| 15
| 27
| 12.333333
| 0.734513
| 0
| 0
| 0.166667
| 0
| 0
| 0.010753
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.583333
| 0.083333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
bcf55731b896385e43f5d67d9f858074f1791555
| 38
|
py
|
Python
|
for1.py
|
satyamraj123/set-of-python-programs
|
c9a20b37cddc555704799e5ff277488b7eff58a9
|
[
"Apache-2.0"
] | null | null | null |
for1.py
|
satyamraj123/set-of-python-programs
|
c9a20b37cddc555704799e5ff277488b7eff58a9
|
[
"Apache-2.0"
] | null | null | null |
for1.py
|
satyamraj123/set-of-python-programs
|
c9a20b37cddc555704799e5ff277488b7eff58a9
|
[
"Apache-2.0"
] | null | null | null |
fruit='banana'
x=len(fruit)
print(x)
| 12.666667
| 15
| 0.684211
| 7
| 38
| 3.714286
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 3
| 16
| 12.666667
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
bcf8d925c5a3910be4a945e6cce5d1278db5fcb4
| 179
|
py
|
Python
|
json2yaml.py
|
cristicalin/tools
|
b8fe4efb1143a575d102d3a8e368052a4ecdceae
|
[
"MIT"
] | null | null | null |
json2yaml.py
|
cristicalin/tools
|
b8fe4efb1143a575d102d3a8e368052a4ecdceae
|
[
"MIT"
] | 1
|
2016-01-31T12:54:06.000Z
|
2016-02-29T13:45:46.000Z
|
json2yaml.py
|
cristicalin/tools
|
b8fe4efb1143a575d102d3a8e368052a4ecdceae
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys
import yaml
import json
if __name__ == '__main__':
content = json.load(sys.stdin)
print yaml.dump(content, indent=2, default_flow_style=False)
| 16.272727
| 62
| 0.73743
| 27
| 179
| 4.518519
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006494
| 0.139665
| 179
| 10
| 63
| 17.9
| 0.785714
| 0.089385
| 0
| 0
| 0
| 0
| 0.049689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
bcfdc746f65446c3a274947a1481597ff88c7469
| 5,406
|
py
|
Python
|
util/submission/templates.py
|
jeanlucf22/mgmol
|
4e79bc32c14c8a47ae18ad0659ea740719c8b77f
|
[
"BSD-3-Clause-LBNL",
"FSFAP"
] | 25
|
2018-12-29T03:33:01.000Z
|
2021-05-08T12:52:27.000Z
|
util/submission/templates.py
|
jeanlucf22/mgmol
|
4e79bc32c14c8a47ae18ad0659ea740719c8b77f
|
[
"BSD-3-Clause-LBNL",
"FSFAP"
] | 121
|
2018-12-19T02:38:21.000Z
|
2021-12-20T16:29:24.000Z
|
util/submission/templates.py
|
jeanlucf22/mgmol
|
4e79bc32c14c8a47ae18ad0659ea740719c8b77f
|
[
"BSD-3-Clause-LBNL",
"FSFAP"
] | 15
|
2019-02-17T05:28:43.000Z
|
2022-02-28T05:24:11.000Z
|
md_template_d144 = """verbosity=0
xcFunctional=PBE
FDtype=4th
[Mesh]
nx=160
ny=80
nz=80
[Domain]
ox=0.
oy=0.
oz=0.
lx=42.4813
ly=21.2406
lz=21.2406
[Potentials]
pseudopotential=pseudo.D_tm_pbe
[Poisson]
solver=@
max_steps_initial=@50
max_steps=@50
reset=@
bcx=periodic
bcy=periodic
bcz=periodic
[Run]
type=MD
[MD]
type=@
num_steps=@
dt=@15.
[XLBOMD]
dissipation=@5
align=@
[Quench]
max_steps=@5
max_steps_tight=@
atol=1.e-@10
num_lin_iterations=3
ortho_freq=100
[SpreadPenalty]
type=@energy
damping=@
[email protected]
[email protected]
[Orbitals]
initial_type=Gaussian
initial_width=1.5
overallocate_factor=@2.
[ProjectedMatrices]
solver=@short_sighted
[LocalizationRegions]
radius=@8.
auxiliary_radius=@
[email protected]
[Restart]
input_filename=wave.out
input_level=3
interval=@
"""
md_template_H2O_64 = """verbosity=1
xcFunctional=PBE
FDtype=4th
[Mesh]
nx=128
ny=128
nz=128
[Domain]
ox=0.
oy=0.
oz=0.
lx=23.4884
ly=23.4884
lz=23.4884
[Potentials]
pseudopotential=pseudo.O_ONCV_PBE_SG15
pseudopotential=pseudo.D_ONCV_PBE_SG15
[Poisson]
solver=@
max_steps=@
[Run]
type=MD
[Quench]
max_steps=1000
atol=1.e-@
[MD]
type=@
num_steps=@
dt=10.
print_interval=5
[XLBOMD]
dissipation=@
align=@
[Restart]
input_filename=wave.out
input_level=4
output_level=4
interval=@
"""
quench_template_H2O_64 = """verbosity=1
xcFunctional=PBE
FDtype=4th
[Mesh]
nx=128
ny=128
nz=128
[Domain]
ox=0.
oy=0.
oz=0.
lx=23.4884
ly=23.4884
lz=23.4884
[Potentials]
pseudopotential=pseudo.O_ONCV_PBE_SG15
pseudopotential=pseudo.D_ONCV_PBE_SG15
[Run]
type=QUENCH
[Quench]
max_steps=1000
atol=1.e-8
[Orbitals]
initial_type=Fourier
[Restart]
output_level=4
"""
quench_template_d144 = """verbosity=1
xcFunctional=PBE
FDtype=4th
[Mesh]
nx=160
ny=80
nz=80
[Domain]
ox=0.
oy=0.
oz=0.
lx=42.4813
ly=21.2406
lz=21.2406
[Potentials]
pseudopotential=pseudo.D_tm_pbe
[Poisson]
solver=@
max_steps_initial=@50
max_steps=@50
bcx=periodic
bcy=periodic
bcz=periodic
[Run]
type=QUENCH
[Quench]
max_steps=200
atol=1.e-7
num_lin_iterations=3
ortho_freq=100
[SpreadPenalty]
type=@energy
damping=@
[email protected]
[email protected]
[Orbitals]
initial_type=Gaussian
initial_width=1.5
[ProjectedMatrices]
solver=@short_sighted
[LocalizationRegions]
radius=@8.
[Restart]
output_type=distributed
"""
H2O_64_params={
'nodes': '32',
'ntasks': '256',
'omp_num_threads': 8 if omp_num_threads == 4 else omp_num_threads,
'cores_per_task': '2',
'potentials': 'ln -s $maindir/potentials/pseudo.O_ONCV_PBE_SG15\nln -s $maindir/potentials/pseudo.D_ONCV_PBE_SG15',
'lrs': '',
'jobname': 'H2O_64',
}
d144_params={
'nodes': '8',
'walltime': '01:30:00',
'ntasks': '125',
'omp_num_threads': omp_num_threads,
'cores_per_task': '1',
'potentials': 'ln -s $maindir/potentials/pseudo.D_tm_pbe',
'lrs': '-l lrs.in',
'jobname': 'd144',
}
vulcan_params={
'queue': 'psmall',
'scratch_path': '/p/lscratchv/mgmolu/dunn27/mgmol/',
'gres': 'lscratchv',
'exe': 'mgmol-bgq',
}
cab_params={
'queue': 'pbatch',
'scratch_path': '/p/lscratchd/dunn27/mgmol/',
'gres': 'lscratchd',
'omp_num_threads': '1',
'exe': 'mgmol-pel',
'walltime': '01:30:00',
}
runfile_quench_template="""#!/bin/tcsh
#MSUB -l nodes={nodes},walltime={walltime}
#MSUB -o mgmol.out
#MSUB -q {queue}
#MSUB -A comp
#MSUB -l gres={gres}
#MSUB -N {jobname}
rm -f queued
echo ' ' > running
use boost-nompi-1.55.0
export BOOST_ROOT=/usr/local/tools/boost-nompi-1.55.0
export Boost_NO_SYSTEM_PATHS=ON
setenv OMP_NUM_THREADS {omp_num_threads}
set ntasks = {ntasks}
set maindir = $home/mgmol
set exe = $maindir/bin/{exe}
set datadir = `pwd`
set scratchdir = {scratch_path}`basename $datadir`
mkdir $scratchdir
cd $scratchdir
echo ' ' > running
set cfg_quench = mgmol_quench.cfg
cp $datadir/$cfg_quench .
cp $datadir/coords.in .
cp $datadir/lrs.in .
{potentials}
#1st run
srun -n $ntasks -c {cores_per_task} $exe -c $cfg_quench -i coords.in {lrs}
#restart
rm -f wave.out
set restart_file=`ls -ld * | awk '/snapshot0/ {{ print $9 }}' | tail -n1`
ln -s -f $restart_file wave.out
rm -f running
echo ' ' > queued
"""
runfile_md_template="""#!/bin/tcsh
#MSUB -l nodes={nodes},walltime={walltime}
#MSUB -o mgmol.out
#MSUB -q {queue}
#MSUB -A comp
#MSUB -l gres={gres}
#MSUB -N {jobname}
rm -f queued
echo ' ' > running
use boost-nompi-1.55.0
export BOOST_ROOT=/usr/local/tools/boost-nompi-1.55.0
export Boost_NO_SYSTEM_PATHS=ON
setenv OMP_NUM_THREADS {omp_num_threads}
set ntasks = {ntasks}
set maindir = $home/mgmol
set exe = $maindir/bin/{exe}
set datadir = `pwd`
set scratchdir = {scratch_path}`basename $datadir`
mkdir $scratchdir
cd $scratchdir
echo ' ' > running
set cfg_md = mgmol_md.cfg
cp $datadir/$cfg_md .
#restart
rm -f wave.out
set restart_file=`ls -ld * | awk '/snapshot0/ {{ print $9 }}' | tail -n1`
ln -s -f $restart_file wave.out
#MD run
srun -n $ntasks -c {cores_per_task} $exe -c $cfg_md
#restart
rm -f wave.out
set restart_file=`ls -ld * | awk '/snapshot0/ {{ print $9 }}' | tail -n1`
ln -s -f $restart_file wave.out
rm -f running
echo ' ' > queued
"""
| 17.216561
| 119
| 0.663707
| 816
| 5,406
| 4.229167
| 0.243873
| 0.023182
| 0.03767
| 0.027818
| 0.761808
| 0.738337
| 0.689655
| 0.618951
| 0.592872
| 0.592872
| 0
| 0.064318
| 0.18609
| 5,406
| 313
| 120
| 17.271566
| 0.72
| 0
| 0
| 0.748175
| 0
| 0.018248
| 0.823529
| 0.167962
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.014599
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
4c0d5b44bfd54d1398052b98c3fc9dbc04be5c4f
| 601
|
py
|
Python
|
app/mod_ecomm/controllers.py
|
VikrantReddy/Instagram2Shop
|
8d9c3f39d277fafb56d10a87a1b62a6df8a74237
|
[
"MIT"
] | null | null | null |
app/mod_ecomm/controllers.py
|
VikrantReddy/Instagram2Shop
|
8d9c3f39d277fafb56d10a87a1b62a6df8a74237
|
[
"MIT"
] | null | null | null |
app/mod_ecomm/controllers.py
|
VikrantReddy/Instagram2Shop
|
8d9c3f39d277fafb56d10a87a1b62a6df8a74237
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, Flask, send_from_directory
from werkzeug.security import check_password_hash, generate_password_hash
from app import db
from app.mod_auth.forms import LoginForm
from app.mod_auth.models import User
mod_ecomm = Blueprint('products', __name__, url_prefix='/products',
static_folder='../../frontend/build')
@mod_ecomm.route("/", defaults={'path': ''})
def serve(path):
if path:
return send_from_directory(mod_ecomm.static_folder, path)
else:
return send_from_directory(mod_ecomm.static_folder, 'index.html')
| 33.388889
| 74
| 0.71381
| 78
| 601
| 5.192308
| 0.5
| 0.079012
| 0.125926
| 0.069136
| 0.212346
| 0.212346
| 0.212346
| 0.212346
| 0
| 0
| 0
| 0
| 0.183028
| 601
| 17
| 75
| 35.352941
| 0.824847
| 0
| 0
| 0
| 1
| 0
| 0.089041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.076923
| 0.384615
| 0
| 0.615385
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 3
|
4c0f174360fe29201e22d16e102aa2c61bad20f2
| 262
|
py
|
Python
|
production/pygsl-0.9.5/testing/__init__.py
|
juhnowski/FishingRod
|
457e7afb5cab424296dff95e1acf10ebf70d32a9
|
[
"MIT"
] | 1
|
2019-07-29T02:53:51.000Z
|
2019-07-29T02:53:51.000Z
|
production/pygsl-0.9.5/testing/__init__.py
|
juhnowski/FishingRod
|
457e7afb5cab424296dff95e1acf10ebf70d32a9
|
[
"MIT"
] | 1
|
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/testing/__init__.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 2
|
2016-12-19T02:27:46.000Z
|
2019-07-29T02:53:54.000Z
|
"""
Here you find either new implemented modules or alternate implementations
of already modules. This directory is intended to have a second implementation
beside the main implementation to have a discussion which implementation to
favor on the long run.
"""
| 37.428571
| 78
| 0.80916
| 38
| 262
| 5.578947
| 0.789474
| 0.056604
| 0.066038
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160305
| 262
| 6
| 79
| 43.666667
| 0.963636
| 0.965649
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
4c6a6e28161a83ca0b9ef2212d453c1bc1cfcfd6
| 232
|
py
|
Python
|
weather/apps.py
|
chrisjen83/rfb_weather_obs
|
8eab16358c5059655d208ef41aa38692fa21776f
|
[
"Apache-2.0"
] | 1
|
2020-12-05T05:23:26.000Z
|
2020-12-05T05:23:26.000Z
|
weather/apps.py
|
chrisjen83/rfb_weather_obs
|
8eab16358c5059655d208ef41aa38692fa21776f
|
[
"Apache-2.0"
] | null | null | null |
weather/apps.py
|
chrisjen83/rfb_weather_obs
|
8eab16358c5059655d208ef41aa38692fa21776f
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
import logging
logger = logging.getLogger(__name__)
class WeatherConfig(AppConfig):
name = 'weather'
def ready(self):
from forecastUpdater import updater
updater.start()
| 17.846154
| 43
| 0.715517
| 25
| 232
| 6.48
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211207
| 232
| 12
| 44
| 19.333333
| 0.885246
| 0
| 0
| 0
| 0
| 0
| 0.030172
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.375
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
4c6e61959c8414eed50a9b983937c8b1f9cf4b26
| 3,711
|
py
|
Python
|
flax/core/frozen_dict.py
|
juliuskunze/flax
|
929395cf5c7391bca3e33ef6760ff9591401d19e
|
[
"Apache-2.0"
] | null | null | null |
flax/core/frozen_dict.py
|
juliuskunze/flax
|
929395cf5c7391bca3e33ef6760ff9591401d19e
|
[
"Apache-2.0"
] | null | null | null |
flax/core/frozen_dict.py
|
juliuskunze/flax
|
929395cf5c7391bca3e33ef6760ff9591401d19e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Frozen Dictionary."""
from typing import TypeVar, Mapping, Dict, Tuple
from flax import serialization
import jax
K = TypeVar('K')
V = TypeVar('V')
@jax.tree_util.register_pytree_node_class
class FrozenDict(Mapping[K, V]):
"""An immutable variant of the Python dict."""
__slots__ = ('_dict', '_hash')
def __init__(self, *args, **kwargs):
self._dict = dict(*args, **kwargs)
self._hash = None
def __getitem__(self, key):
v = self._dict[key]
if isinstance(v, dict):
return FrozenDict(v)
return v
def __setitem__(self, key, value):
raise ValueError('FrozenDict is immutable.')
def __contains__(self, key):
return key in self._dict
def __iter__(self):
return iter(self._dict)
def __len__(self):
return len(self._dict)
def __repr__(self):
return 'FrozenDict(%r)' % self._dict
def __hash__(self):
if self._hash is None:
h = 0
for key, value in self.items():
h ^= hash((key, value))
self._hash = h
return self._hash
def copy(self, add_or_replace: Mapping[K, V]) -> 'FrozenDict[K, V]':
"""Create a new FrozenDict with additional or replaced entries."""
return type(self)(self, **unfreeze(add_or_replace))
def items(self):
for key in self._dict:
yield (key, self[key])
def pop(self, key: K) -> Tuple['FrozenDict[K, V]', V]:
"""Create a new FrozenDict where one entry is removed.
Example::
state, params = variables.pop('params')
Args:
key: the key to remove from the dict
Returns:
A pair with the new FrozenDict and the removed value.
"""
value = self[key]
new_dict = dict(self._dict)
new_dict.pop(key)
new_self = type(self)(new_dict)
return new_self, value
def unfreeze(self) -> Dict[K, V]:
return unfreeze(self)
def tree_flatten(self):
return (self._dict,), ()
@classmethod
def tree_unflatten(cls, _, data):
return cls(*data)
def freeze(xs: Dict[K, V]) -> FrozenDict[K, V]:
"""Freeze a nested dict.
Makes a nested `dict` immutable by transforming it into `FrozenDict`.
"""
# Turn the nested FrozenDict into a dict. This way the internal data structure
# of FrozenDict does not contain any FrozenDicts.
# instead we create those lazily in `__getitem__`.
# As a result tree_flatten/unflatten will be fast
# because it operates on native dicts.
xs = unfreeze(xs)
return FrozenDict(xs)
def unfreeze(x: FrozenDict[K, V]) -> Dict[K, V]:
"""Unfreeze a FrozenDict.
Makes a mutable copy of a `FrozenDict` mutable by transforming
it into (nested) dict.
"""
if not isinstance(x, (FrozenDict, dict)):
return x
ys = {}
for key, value in x.items():
ys[key] = unfreeze(value)
return ys
def _frozen_dict_state_dict(xs):
return {key: serialization.to_state_dict(value) for key, value in xs.items()}
def _restore_frozen_dict(xs, states):
return freeze(
{key: serialization.from_state_dict(value, states[key])
for key, value in xs.items()})
serialization.register_serialization_state(
FrozenDict,
_frozen_dict_state_dict,
_restore_frozen_dict)
| 25.770833
| 80
| 0.68041
| 535
| 3,711
| 4.547664
| 0.314019
| 0.00822
| 0.018085
| 0.021373
| 0.044801
| 0.016441
| 0
| 0
| 0
| 0
| 0
| 0.003061
| 0.207761
| 3,711
| 143
| 81
| 25.951049
| 0.82449
| 0.365131
| 0
| 0
| 0
| 0
| 0.036171
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.253521
| false
| 0
| 0.042254
| 0.126761
| 0.56338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
4c752c3e0e33ba7c7da469ab66cb6adfa9bb958a
| 669
|
py
|
Python
|
python/johnstarich/interval.py
|
JohnStarich/dotfiles
|
eaa07b09aa02fc2fa2516cebdd3628b4daf506e4
|
[
"Apache-2.0"
] | 3
|
2018-02-28T14:22:53.000Z
|
2022-01-24T02:38:22.000Z
|
python/johnstarich/interval.py
|
JohnStarich/dotfiles
|
eaa07b09aa02fc2fa2516cebdd3628b4daf506e4
|
[
"Apache-2.0"
] | null | null | null |
python/johnstarich/interval.py
|
JohnStarich/dotfiles
|
eaa07b09aa02fc2fa2516cebdd3628b4daf506e4
|
[
"Apache-2.0"
] | null | null | null |
import time
class Interval(object):
def __init__(self, delay_time: int):
self.delay_time = delay_time
self.current_time = 0
@staticmethod
def now():
return time.gmtime().tm_sec
def should_run(self) -> bool:
if self.current_time == 0:
self.current_time = Interval.now()
return True
return self.is_done()
def is_done(self) -> bool:
timestamp = Interval.now()
return self.current_time + self.delay_time < timestamp or \
self.current_time > timestamp
def start(self) -> int:
self.current_time = Interval.now()
return self.current_time
| 24.777778
| 67
| 0.606876
| 83
| 669
| 4.662651
| 0.337349
| 0.198966
| 0.271318
| 0.082687
| 0.286822
| 0.286822
| 0
| 0
| 0
| 0
| 0
| 0.004255
| 0.297459
| 669
| 26
| 68
| 25.730769
| 0.819149
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.05
| 0.05
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
d5bb9bbb0fed4afc892e132a8963124e532f19f2
| 845
|
py
|
Python
|
zenslackchat/zendesk_webhooks.py
|
uktrade/zenslackchat
|
8071757e1ea20a433783c6a7c47f25b046692682
|
[
"MIT"
] | 2
|
2020-12-30T07:46:12.000Z
|
2022-02-01T16:37:34.000Z
|
zenslackchat/zendesk_webhooks.py
|
uktrade/zenslackchat
|
8071757e1ea20a433783c6a7c47f25b046692682
|
[
"MIT"
] | 7
|
2021-04-14T16:17:29.000Z
|
2022-01-25T11:48:18.000Z
|
zenslackchat/zendesk_webhooks.py
|
uktrade/zenslackchat
|
8071757e1ea20a433783c6a7c47f25b046692682
|
[
"MIT"
] | 1
|
2021-06-06T09:46:47.000Z
|
2021-06-06T09:46:47.000Z
|
from zenslackchat.zendesk_base_webhook import BaseWebHook
from zenslackchat.zendesk_email_to_slack import email_from_zendesk
from zenslackchat.zendesk_comments_to_slack import comments_from_zendesk
class CommentsWebHook(BaseWebHook):
"""Handle Zendesk Comment Events.
"""
def handle_event(self, event, slack_client, zendesk_client):
"""Handle the comment trigger event we have been POSTed.
Recover and update the comments with lastest from Zendesk.
"""
comments_from_zendesk(event, slack_client, zendesk_client)
class EmailWebHook(BaseWebHook):
"""Handle Zendesk Email Events.
"""
def handle_event(self, event, slack_client, zendesk_client):
"""Handle an email created issue and create it on slack.
"""
email_from_zendesk(event, slack_client, zendesk_client)
| 33.8
| 72
| 0.744379
| 103
| 845
| 5.854369
| 0.368932
| 0.091211
| 0.106136
| 0.15257
| 0.328358
| 0.328358
| 0.328358
| 0.195688
| 0.195688
| 0.195688
| 0
| 0
| 0.186982
| 845
| 24
| 73
| 35.208333
| 0.877729
| 0.280473
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
d5c5f4f5c79da67180264a12457b76158e4ccc4b
| 4,814
|
py
|
Python
|
tests/test_simple.py
|
cprogrammer1994/miniglm
|
696764ff200dd106dd533264ff45a060d5f7b230
|
[
"MIT"
] | 4
|
2017-11-03T14:48:52.000Z
|
2019-03-07T03:48:11.000Z
|
tests/test_simple.py
|
cprogrammer1994/miniglm
|
696764ff200dd106dd533264ff45a060d5f7b230
|
[
"MIT"
] | 2
|
2017-11-27T15:40:01.000Z
|
2021-01-30T08:40:51.000Z
|
tests/test_simple.py
|
cprogrammer1994/miniglm
|
696764ff200dd106dd533264ff45a060d5f7b230
|
[
"MIT"
] | 3
|
2017-11-27T15:25:07.000Z
|
2021-03-02T10:31:30.000Z
|
import struct
import numpy as np
import pytest
import miniglm
def test_add_vec_vec():
res = miniglm.add((1.0, 2.0, 3.0), (1.5, 1.8, 1.2))
np.testing.assert_almost_equal(res, (2.5, 3.8, 4.2))
assert type(res) is tuple
def test_add_vec_scalar():
res = miniglm.add((1.0, 2.0, 3.0), 0.5)
np.testing.assert_almost_equal(res, (1.5, 2.5, 3.5))
assert type(res) is tuple
def test_sub_vec_vec():
res = miniglm.sub((5.0, 6.0, 7.0), (1.5, 1.8, 1.2))
np.testing.assert_almost_equal(res, (3.5, 4.2, 5.8))
assert type(res) is tuple
def test_sub_vec_scalar():
res = miniglm.sub((5.0, 6.0, 7.0), 1.5)
np.testing.assert_almost_equal(res, (3.5, 4.5, 5.5))
assert type(res) is tuple
def test_mul_vec_vec():
res = miniglm.mul((5.0, 6.0, 7.0), (1.5, 1.8, 1.2))
np.testing.assert_almost_equal(res, (7.5, 10.8, 8.4))
assert type(res) is tuple
def test_mul_vec_scalar():
res = miniglm.mul((1.0, 2.0, 3.0), 2.0)
np.testing.assert_almost_equal(res, (2.0, 4.0, 6.0))
assert type(res) is tuple
def test_cross():
res = miniglm.cross((2.0, 3.5, 7.1), (0.2, 10.0, 3.3))
np.testing.assert_almost_equal(res, (-59.45, -5.18, 19.3))
assert type(res) is tuple
def test_dot_vec():
res = miniglm.dot((2.0, 3.5, 7.1), (0.2, 10.0, 3.3))
np.testing.assert_almost_equal(res, 58.83)
def test_dot_quat():
res = miniglm.dot((2.0, 3.5, 7.1), (0.2, 10.0, 3.3))
np.testing.assert_almost_equal(res, 58.83)
def test_mix_vec():
res = miniglm.mix((2.5, 3.4, 4.6), (7.2, 1.1, 3.2), 0.2)
np.testing.assert_almost_equal(res, (3.44, 2.94, 4.32))
assert type(res) is tuple
def test_mix_scalar():
res = miniglm.mix(1.0, 3.0, 0.5)
np.testing.assert_almost_equal(res, 2.0)
def test_rotate():
res = miniglm.rotate(miniglm.pi / 3.0, miniglm.norm((0.48, 0.60, 0.64)))
expected = (0.24, 0.3, 0.32, 0.8660254037844387)
np.testing.assert_almost_equal(res, expected)
assert type(res) is tuple
def test_split_quat():
quat = (0.24, 0.3, 0.32, 0.8660254037844387)
angle, axis = miniglm.split(quat)
np.testing.assert_almost_equal(angle, miniglm.pi / 3.0)
np.testing.assert_almost_equal(axis, (0.48, 0.60, 0.64))
assert type(axis) is tuple
def test_rotate_x_90_deg():
res = miniglm.rotate(miniglm.pi / 2.0, (1.0, 0.0, 0.0))
np.testing.assert_almost_equal(res, (np.sqrt(2.0) / 2.0, 0.0, 0.0, np.sqrt(2.0) / 2.0))
def test_rotate_y_90_deg():
res = miniglm.rotate(miniglm.pi / 2.0, (0.0, 1.0, 0.0))
np.testing.assert_almost_equal(res, (0.0, np.sqrt(2.0) / 2.0, 0.0, np.sqrt(2.0) / 2.0))
def test_rotate_z_90_deg():
res = miniglm.rotate(miniglm.pi / 2.0, (0.0, 0.0, 1.0))
np.testing.assert_almost_equal(res, (0.0, 0.0, np.sqrt(2.0) / 2.0, np.sqrt(2.0) / 2.0))
def test_norm_vec():
res = miniglm.norm((48.0, 60.0, 64.0))
expected = (0.48, 0.60, 0.64)
np.testing.assert_almost_equal(res, expected)
assert type(res) is tuple
def test_norm_quat():
res = miniglm.norm((2.0, 4.0, 8.0, 4.0))
expected = (0.2, 0.4, 0.8, 0.4)
np.testing.assert_almost_equal(res, expected)
assert type(res) is tuple
def test_norm_mat():
mat = (
0.074, 0.962, -0.259,
-0.518, 0.259, 0.814,
0.851, 0.074, 0.518,
)
res = miniglm.norm(mat)
np.testing.assert_almost_equal(miniglm.det(res), 1.0)
np.testing.assert_almost_equal(miniglm.cross(res[0:3], res[3:6]), res[6:9])
np.testing.assert_almost_equal(miniglm.dot(res[0:3], res[3:6]), 0.0)
np.testing.assert_almost_equal(miniglm.dot(res[3:6], res[6:9]), 0.0)
np.testing.assert_almost_equal(miniglm.dot(res[0:3], res[6:9]), 0.0)
assert type(res) is tuple
def test_cast():
quat = (0.2, 0.4, 0.8, 0.4)
mat = (-0.6, 0.8, 0.0, -0.48, -0.36, 0.8, 0.64, 0.48, 0.6)
np.testing.assert_almost_equal(miniglm.cast(quat), mat)
np.testing.assert_almost_equal(miniglm.cast(mat), quat)
np.testing.assert_almost_equal(miniglm.cast(miniglm.cast(quat)), quat)
np.testing.assert_almost_equal(miniglm.cast(miniglm.cast(mat)), mat)
def test_swizzle_vec():
res = miniglm.swizzle((1.0, 2.0, 3.0), 'yxz')
np.testing.assert_almost_equal(res, (2.0, 1.0, 3.0))
def test_swizzle_quat():
res = miniglm.swizzle((0.1, 0.7, 0.5, 0.5), 'wxyz')
np.testing.assert_almost_equal(res, (0.5, 0.1, 0.7, 0.5))
def test_pack_scalar():
assert miniglm.pack(1.75) == struct.pack('f', 1.75)
def test_pack_vec():
vec = (1.0, 2.0, 3.0)
assert miniglm.pack(vec) == struct.pack('fff', *vec)
def test_pack_quat():
quat = (0.1, 0.7, 0.5, 0.5)
assert miniglm.pack(quat) == struct.pack('ffff', *quat)
def test_pack_mat():
mat = (1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)
assert miniglm.pack(mat) == struct.pack('fffffffff', *mat)
| 28.826347
| 91
| 0.623598
| 958
| 4,814
| 3.007307
| 0.086639
| 0.022215
| 0.156196
| 0.218674
| 0.705311
| 0.687608
| 0.636237
| 0.542867
| 0.454703
| 0.334606
| 0
| 0.130116
| 0.177815
| 4,814
| 166
| 92
| 29
| 0.597777
| 0
| 0
| 0.169643
| 0
| 0
| 0.004985
| 0
| 0
| 0
| 0
| 0
| 0.419643
| 1
| 0.232143
| false
| 0
| 0.035714
| 0
| 0.267857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
d5d20f7a81be3ee7ffae45e074584da66ec78259
| 210
|
py
|
Python
|
multistream_select/__init__.py
|
Projjol/py-multistream-select
|
624becaaeefa0a76d6841e27fbf7dea3240d2fe0
|
[
"MIT"
] | null | null | null |
multistream_select/__init__.py
|
Projjol/py-multistream-select
|
624becaaeefa0a76d6841e27fbf7dea3240d2fe0
|
[
"MIT"
] | null | null | null |
multistream_select/__init__.py
|
Projjol/py-multistream-select
|
624becaaeefa0a76d6841e27fbf7dea3240d2fe0
|
[
"MIT"
] | null | null | null |
__version = '0.1.0'
__all__ = ['MultiStreamSelect', 'hexify']
__author__ = 'Natnael Getahun ([email protected])'
__name__ = 'multistream'
from .multistream import MultiStreamSelect
from .utils import hexify
| 26.25
| 52
| 0.766667
| 23
| 210
| 6.391304
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016129
| 0.114286
| 210
| 7
| 53
| 30
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0.361905
| 0.1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
d5eae8227c1380d3fce1267b4a1949ca968db82b
| 1,041
|
py
|
Python
|
Utils.py
|
MartinEngen/NaiveBayesianClassifier
|
a28813708a4d2adcdcd629e6d4d8b4f438a9c799
|
[
"MIT"
] | null | null | null |
Utils.py
|
MartinEngen/NaiveBayesianClassifier
|
a28813708a4d2adcdcd629e6d4d8b4f438a9c799
|
[
"MIT"
] | null | null | null |
Utils.py
|
MartinEngen/NaiveBayesianClassifier
|
a28813708a4d2adcdcd629e6d4d8b4f438a9c799
|
[
"MIT"
] | null | null | null |
import os
import re
def get_subfolder_paths(folder_relative_path: str) -> list:
"""
Gets all subfolders of a given path
:param folder_relative_path: Relative path of folder to find subfolders of
:return: list of relative paths to any subfolders
"""
return [f.path for f in os.scandir(folder_relative_path) if f.is_dir()]
def get_group_name(group_path: str) -> str:
return group_path.split("\\")[-1]
def replace_unwanted_characters(line: str) -> str:
return re.sub(
r'([^\s\w]|_)+',
u' ',
line.replace('\n', ' ').replace('\t', ' '),
flags=re.UNICODE
)
def clean_document(document_file) -> list:
document = document_file.read().lower().split("\n\n")
cleaned_lines = list(map(replace_unwanted_characters, document[1:]))
# lambda x, y: x + y, a, b
list_of_lines = map(lambda x: x.split(" "), cleaned_lines)
flattened_list_of_lines = [val for sublist in list_of_lines for val in sublist]
return filter(lambda x: x != '', flattened_list_of_lines)
| 30.617647
| 83
| 0.659942
| 154
| 1,041
| 4.246753
| 0.409091
| 0.045872
| 0.067278
| 0.061162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002415
| 0.204611
| 1,041
| 33
| 84
| 31.545455
| 0.78744
| 0.178674
| 0
| 0
| 0
| 0
| 0.031288
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.105263
| 0.105263
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
9104cf33f9cb7c9a9e220cded851c4d2434c8d05
| 49,016
|
py
|
Python
|
services/object_storage/tests/integ/test_object_storage_bulk_operations.py
|
honzajavorek/oci-cli
|
6ea058afba323c6b3b70e98212ffaebb0d31985e
|
[
"Apache-2.0"
] | null | null | null |
services/object_storage/tests/integ/test_object_storage_bulk_operations.py
|
honzajavorek/oci-cli
|
6ea058afba323c6b3b70e98212ffaebb0d31985e
|
[
"Apache-2.0"
] | null | null | null |
services/object_storage/tests/integ/test_object_storage_bulk_operations.py
|
honzajavorek/oci-cli
|
6ea058afba323c6b3b70e98212ffaebb0d31985e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
import filecmp
import json
import pytest
import oci
import services.object_storage.src.oci_cli_object_storage as oci_cli_object_storage
import os
import random
import shutil
import six
import string
from tests import util
from tests import test_config_container
from mimetypes import guess_type
OBJECTS_TO_CREATE_IN_BUCKET_FOR_BULK_GET = 100
OBJECTS_TO_CREATE_IN_FOLDER_FOR_BULK_PUT = 20
CONTENT_STRING_LENGTH = 5000
MID_SIZED_FILE_IN_MEBIBTYES = 20
LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES = 150 # Default multipart is 128MiB
# Holds the objects we create and their content so that we can verify results
bulk_get_object_to_content = {}
bulk_get_prefix_to_object = {
'a/b/c/d': [],
'a/b/c': [],
'a/b': [],
'/a': [],
'': []
}
bulk_get_bucket_name = None
bulk_put_large_files = set()
bulk_put_mid_sized_files = set()
root_bulk_put_folder = None
bulk_put_bucket_name = None
@pytest.fixture
def vcr_fixture(request):
with test_config_container.create_vcr(cassette_library_dir='services/object_storage/tests/cassettes').use_cassette('object_storage_bulk_operations_{name}.yml'.format(name=request.function.__name__)):
yield
# Generate test data for different operations:
#
# Bulk Get: create a new bucket and populate it with some objects, then tear it all down afterwards
# Bulk Put: create a folder structure containing small and large files, then tear it all down afterwards
# Bulk Delete: uses the folders and files generated for bulk put
@pytest.fixture(scope='module', autouse=True)
def generate_test_data(object_storage_client):
global bulk_get_object_to_content, bulk_get_bucket_name, root_bulk_put_folder, bulk_put_large_files, bulk_put_mid_sized_files, bulk_put_bucket_name
# Create a test bucket
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageBulkGetTest_{}'.format(util.random_number_string())
create_bucket_request.compartment_id = util.COMPARTMENT_ID
util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
bulk_get_bucket_name = create_bucket_request.name
# Create items at various heirarchy levels (to be surfaced as different directories on disk)
for i in range(OBJECTS_TO_CREATE_IN_BUCKET_FOR_BULK_GET):
if i % 5 == 4:
object_name = 'a/b/c/d/Object_{}'.format(i)
bulk_get_prefix_to_object['a/b/c/d'].append(object_name)
elif i % 5 == 3:
object_name = 'a/b/c/Object_{}'.format(i)
bulk_get_prefix_to_object['a/b/c'].append(object_name)
elif i % 5 == 2:
object_name = 'a/b/Object_{}'.format(i)
bulk_get_prefix_to_object['a/b'].append(object_name)
elif i % 5 == 1:
# This is equivalent to a/ on the file system because we drop the leading slash (we drop path separators from the front to avoid unexpected results)
object_name = '/a/Object_{}'.format(i)
bulk_get_prefix_to_object['/a'].append(object_name)
else:
# At the root of the bucket
object_name = 'Object_{}'.format(i)
bulk_get_prefix_to_object[''].append(object_name)
object_content = generate_random_string(CONTENT_STRING_LENGTH)
object_storage_client.put_object(util.NAMESPACE, create_bucket_request.name, object_name, object_content)
bulk_get_object_to_content[object_name] = object_content
# makedirs creates all subfolders recursively
root_bulk_put_folder = 'tests/temp/bulk_put_{}'.format(util.random_number_string())
bulk_put_folder_leaf = '{}/subfolder1/subfolder2/subfolder3'.format(root_bulk_put_folder)
if not os.path.exists(bulk_put_folder_leaf):
os.makedirs(bulk_put_folder_leaf)
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageBulkPutTest_{}'.format(util.random_number_string())
create_bucket_request.compartment_id = util.COMPARTMENT_ID
util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
bulk_put_bucket_name = create_bucket_request.name
subfolders = ['', 'subfolder1', 'subfolder1/subfolder2', 'subfolder1/subfolder2/subfolder3']
for subfolder in subfolders:
if subfolder == '':
full_folder = root_bulk_put_folder
else:
full_folder = os.path.join(root_bulk_put_folder, subfolder)
for i in range(OBJECTS_TO_CREATE_IN_FOLDER_FOR_BULK_PUT + 1):
file_path = '{}/object_{}'.format(full_folder, i)
if i != 0 and i % OBJECTS_TO_CREATE_IN_FOLDER_FOR_BULK_PUT == 0:
# Put in one big file per subfolder
util.create_large_file(file_path, LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
bulk_put_large_files.add(file_path)
elif i != 0 and i % 10 == 0:
# Put in the occasional file with a reasonable size so that we can force multipart
util.create_large_file(file_path, MID_SIZED_FILE_IN_MEBIBTYES)
bulk_put_mid_sized_files.add(file_path)
else:
with open(file_path, 'w') as f:
f.write(generate_random_string(CONTENT_STRING_LENGTH))
yield
# Tear down stuff by deleting all the things and then deleting the buckets
delete_bucket_and_all_items(object_storage_client, bulk_get_bucket_name)
delete_bucket_and_all_items(object_storage_client, bulk_put_bucket_name)
# Remove all directories recursively
shutil.rmtree(root_bulk_put_folder)
@util.skip_while_rerecording
def test_normalize_object_name_path():
assert '/this/is/a/path' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('/this/is/a/path')
assert '/this/is/a/path' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('/this/is/a/path', '/')
assert '/this/is/a/path' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('\\this\\is\\a\\path', '\\')
assert '/this/is/a/path' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('\\this/is/a\\path', '\\')
assert 'thisisapath' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('thisisapath')
assert 'thisisapath' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('thisisapath', '/')
assert 'thisisapath' == oci_cli_object_storage.objectstorage_cli_extended.normalize_object_name_path_for_object_storage('thisisapath', '\\')
@util.skip_while_rerecording
def test_get_all_objects_in_bucket(vcr_fixture):
download_folder = 'tests/temp/get_all_{}'.format(bulk_get_bucket_name)
result = invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder])
print(result.output)
# Ensure that content matches
for object_name in bulk_get_object_to_content:
if object_name[0] == '/' or object_name[0] == '\\':
file_path = os.path.join(download_folder, object_name[1:])
else:
file_path = os.path.join(download_folder, object_name)
with open(file_path, 'r') as content_file:
content = content_file.read()
assert content == bulk_get_object_to_content[object_name]
assert len(bulk_get_object_to_content) == get_count_of_files_in_folder_and_subfolders(download_folder)
shutil.rmtree(download_folder)
@util.skip_while_rerecording
def test_get_directory_and_subdirectories(vcr_fixture):
download_folder = 'tests/temp/get_directory_and_subdirectories_{}'.format(bulk_get_bucket_name)
# This should get us a/b/<object>, a/b/c/<object> and a/b/c/d/<object>
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder, '--prefix', 'a/b'])
for object_name in bulk_get_prefix_to_object['a/b']:
file_path = os.path.join(download_folder, object_name)
with open(file_path, 'r') as content_file:
content = content_file.read()
assert content == bulk_get_object_to_content[object_name]
for object_name in bulk_get_prefix_to_object['a/b/c']:
file_path = os.path.join(download_folder, object_name)
with open(file_path, 'r') as content_file:
content = content_file.read()
assert content == bulk_get_object_to_content[object_name]
for object_name in bulk_get_prefix_to_object['a/b/c/d']:
file_path = os.path.join(download_folder, object_name)
with open(file_path, 'r') as content_file:
content = content_file.read()
assert content == bulk_get_object_to_content[object_name]
assert len(bulk_get_prefix_to_object['a/b']) + len(bulk_get_prefix_to_object['a/b/c']) + len(bulk_get_prefix_to_object['a/b/c/d']) == get_count_of_files_in_folder_and_subfolders(download_folder)
shutil.rmtree(download_folder)
@util.skip_while_rerecording
def test_get_directory_no_subdirectory(vcr_fixture):
download_folder = 'tests/temp/get_directory_only_{}'.format(bulk_get_bucket_name)
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder, '--prefix', 'a/b/c/', '--delimiter', '/'])
for object_name in bulk_get_prefix_to_object['a/b/c']:
file_path = os.path.join(download_folder, object_name)
with open(file_path, 'r') as content_file:
content = content_file.read()
assert content == bulk_get_object_to_content[object_name]
assert len(bulk_get_prefix_to_object['a/b/c']) == get_count_of_files_in_folder_and_subfolders(download_folder)
shutil.rmtree(download_folder)
@util.skip_while_rerecording
def test_get_files_skipped():
download_folder = 'tests/temp/skip_and_replace_{}'.format(bulk_get_bucket_name)
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder])
# Sanity check
assert len(bulk_get_object_to_content) == get_count_of_files_in_folder_and_subfolders(download_folder)
# We should skip over all objects since there is no --overwrite. There should be prompts
result = invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert 'Are you sure you want to overwrite it?' in result.output
assert len(parsed_result['skipped-objects']) == len(bulk_get_object_to_content)
# We should skip over all objects since we say --no-overwrite. Additionally there should be no prompts
result = invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder, '--no-overwrite'])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert 'Are you sure you want to overwrite it?' not in result.output
assert len(parsed_result['skipped-objects']) == len(bulk_get_object_to_content)
# We should skip over no objects since we --overwrite
result = invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder, '--overwrite'])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert len(parsed_result['skipped-objects']) == 0
shutil.rmtree(download_folder)
@util.skip_while_rerecording
def test_get_no_objects(vcr_fixture):
download_folder = 'tests/temp/no_objects_{}'.format(bulk_get_bucket_name)
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--download-dir', download_folder, '--prefix', 'batman'])
assert 0 == get_count_of_files_in_folder_and_subfolders(download_folder)
shutil.rmtree(download_folder)
@util.skip_while_rerecording
def test_get_multipart(object_storage_client):
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageBulkGetMultipartsTest_{}'.format(util.random_number_string())
create_bucket_request.compartment_id = util.COMPARTMENT_ID
util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
large_file_root_dir = os.path.join('tests', 'temp', 'multipart_get_large_files')
if not os.path.exists(large_file_root_dir):
os.makedirs(large_file_root_dir)
util.create_large_file(os.path.join(large_file_root_dir, '1.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
util.create_large_file(os.path.join(large_file_root_dir, '2.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
util.create_large_file(os.path.join(large_file_root_dir, '3.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
util.create_large_file(os.path.join(large_file_root_dir, '4.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
util.create_large_file(os.path.join(large_file_root_dir, '5.bin'), LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES)
util.create_large_file(os.path.join(large_file_root_dir, '6.bin'), 1) # Creates a 1 MiB file for variety
invoke([
'os', 'object', 'bulk-upload',
'--namespace', util.NAMESPACE,
'--bucket-name', create_bucket_request.name,
'--src-dir', large_file_root_dir
])
large_file_verify_dir = os.path.join('tests', 'temp', 'multipart_get_large_files_verify')
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name, '--download-dir', large_file_verify_dir, '--multipart-download-threshold', '128'])
assert get_count_of_files_in_folder_and_subfolders(large_file_verify_dir) == 6
assert filecmp.cmp(os.path.join(large_file_root_dir, '1.bin'), os.path.join(large_file_verify_dir, '1.bin'))
assert filecmp.cmp(os.path.join(large_file_root_dir, '2.bin'), os.path.join(large_file_verify_dir, '2.bin'))
assert filecmp.cmp(os.path.join(large_file_root_dir, '3.bin'), os.path.join(large_file_verify_dir, '3.bin'))
assert filecmp.cmp(os.path.join(large_file_root_dir, '4.bin'), os.path.join(large_file_verify_dir, '4.bin'))
assert filecmp.cmp(os.path.join(large_file_root_dir, '5.bin'), os.path.join(large_file_verify_dir, '5.bin'))
assert filecmp.cmp(os.path.join(large_file_root_dir, '6.bin'), os.path.join(large_file_verify_dir, '6.bin'))
shutil.rmtree(large_file_root_dir)
shutil.rmtree(large_file_verify_dir)
delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
# Since we've created a reasonable number of objects in this test suite, it's a good opportunity to test using the --all and --limit parameters
@util.skip_while_rerecording
def test_list_all_objects_operations(vcr_fixture):
result = invoke(['os', 'object', 'list', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--all'])
parsed_result = json.loads(result.output)
assert len(parsed_result['data']) == OBJECTS_TO_CREATE_IN_BUCKET_FOR_BULK_GET
assert 'next-start-with' not in result.output
result = invoke(['os', 'object', 'list', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--all', '--page-size', '20'])
parsed_result = json.loads(result.output)
assert len(parsed_result['data']) == OBJECTS_TO_CREATE_IN_BUCKET_FOR_BULK_GET
assert 'next-start-with' not in result.output
result = invoke(['os', 'object', 'list', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--limit', '47'])
parsed_result = json.loads(result.output)
assert len(parsed_result['data']) == 47
assert 'next-start-with' in result.output
result = invoke(['os', 'object', 'list', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--limit', '33', '--page-size', '3'])
parsed_result = json.loads(result.output)
assert len(parsed_result['data']) == 33
assert 'next-start-with' in result.output
# Bulk puts objects, uses multipart where appropriate (when we breach the default of 128MiB)
@util.skip_while_rerecording
def test_bulk_put_default_options():
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', root_bulk_put_folder])
# No failures or skips and we uploaded everything
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)
# Pull everything down and verify that the files match (everything in source appears in destination and they are equal)
download_folder = 'tests/temp/verify_files_{}'.format(bulk_put_bucket_name)
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--download-dir', download_folder])
object_name_set = set()
for dir_name, subdir_list, file_list in os.walk(root_bulk_put_folder):
for file in file_list:
source_file_path = os.path.join(dir_name, file)
downloaded_file_path = source_file_path.replace(root_bulk_put_folder, download_folder)
assert os.path.exists(downloaded_file_path)
assert filecmp.cmp(source_file_path, downloaded_file_path, shallow=False)
# Sanity check that we're reporting back that we uploaded the right files
assert get_object_name_from_path(root_bulk_put_folder, source_file_path) in parsed_result['uploaded-objects']
object_name_set.add(get_object_name_from_path(root_bulk_put_folder, source_file_path))
# If we try and put it in the same bucket without --overwrite then everything should be skipped. There should be prompts
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', root_bulk_put_folder])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert 'Are you sure you want to overwrite it?' in result.output
assert set(parsed_result['skipped-objects']) == object_name_set
assert parsed_result['upload-failures'] == {}
assert parsed_result['uploaded-objects'] == {}
# If we say to --no-overwrite then everything should be skipped. There should be no prompts
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', root_bulk_put_folder, '--no-overwrite'])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert 'Are you sure you want to overwrite it?' not in result.output
assert set(parsed_result['skipped-objects']) == object_name_set
assert parsed_result['upload-failures'] == {}
assert parsed_result['uploaded-objects'] == {}
# Now we force it
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', root_bulk_put_folder, '--overwrite'])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
assert len(parsed_result['uploaded-objects']) == len(object_name_set)
for object_name in object_name_set:
assert object_name in parsed_result['uploaded-objects']
shutil.rmtree(download_folder)
# Bulk puts objects with --content-type as auto
@util.skip_while_rerecording
def test_bulk_put_auto_content_type():
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', root_bulk_put_folder, '--content-type', 'auto', '--overwrite'])
# No failures or skips and we uploaded everything
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)
# Pull everything down and verify that the files match (everything in source appears in destination and they are equal)
download_folder = 'tests/temp/verify_files_{}'.format(bulk_put_bucket_name)
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--download-dir', download_folder])
object_name_set = set()
for dir_name, subdir_list, file_list in os.walk(root_bulk_put_folder):
for file in file_list:
source_file_path = os.path.join(dir_name, file)
downloaded_file_path = source_file_path.replace(root_bulk_put_folder, download_folder)
assert os.path.exists(downloaded_file_path)
assert filecmp.cmp(source_file_path, downloaded_file_path, shallow=False)
assert guess_type(source_file_path) == guess_type(downloaded_file_path)
# Sanity check that we're reporting back that we uploaded the right files
assert get_object_name_from_path(root_bulk_put_folder, source_file_path) in parsed_result['uploaded-objects']
object_name_set.add(get_object_name_from_path(root_bulk_put_folder, source_file_path))
shutil.rmtree(download_folder)
# Tests that multipart params are applied:
#
# - Try to upload with a part size of 10MiB (this will force the large and mid-sized files to be multipart uploaded)
# - Try to upload with multipart disabled
@util.skip_while_rerecording
def test_bulk_put_with_multipart_params(object_storage_client):
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageBulkPutMultipartsTest_{}'.format(util.random_number_string())
create_bucket_request.compartment_id = util.COMPARTMENT_ID
util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
result = invoke([
'os', 'object', 'bulk-upload',
'--namespace', util.NAMESPACE,
'--bucket-name', create_bucket_request.name,
'--src-dir', root_bulk_put_folder,
'--part-size', '10'
])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)
result = invoke([
'os', 'object', 'bulk-upload',
'--namespace', util.NAMESPACE,
'--bucket-name', create_bucket_request.name,
'--src-dir', root_bulk_put_folder,
'--no-multipart',
'--overwrite'
])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)
delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
@util.skip_while_rerecording
def test_bulk_put_with_prefix():
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', root_bulk_put_folder, '--object-prefix', 'bulk_put_prefix_test/'])
# No failures or skips and we uploaded everything
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
assert len(parsed_result['uploaded-objects']) == get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)
download_folder = 'tests/temp/verify_files_bulk_put_prefix_{}'.format(bulk_put_bucket_name)
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--download-dir', download_folder, '--prefix', 'bulk_put_prefix_test/'])
actual_download_folder = os.path.join(download_folder, 'bulk_put_prefix_test')
for dir_name, subdir_list, file_list in os.walk(root_bulk_put_folder):
for file in file_list:
source_file_path = os.path.join(dir_name, file)
downloaded_file_path = source_file_path.replace(root_bulk_put_folder, actual_download_folder)
assert os.path.exists(downloaded_file_path)
assert filecmp.cmp(source_file_path, downloaded_file_path, shallow=False)
# Sanity check that we're reporting back that we uploaded the right files
assert 'bulk_put_prefix_test/{}'.format(get_object_name_from_path(root_bulk_put_folder, source_file_path)) in parsed_result['uploaded-objects']
shutil.rmtree(download_folder)
@util.skip_while_rerecording
def test_bulk_put_with_non_existent_folder():
fake_directory = 'tests/folder/not/exist'
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--src-dir', fake_directory])
assert 'UsageError' in result.output
assert 'The specified --src-dir {} (expanded to: {}) does not exist'.format(fake_directory, fake_directory) in result.output
@util.skip_while_rerecording
def test_bulk_put_get_delete_with_inclusions(object_storage_client):
inclusion_test_folder = os.path.join('tests', 'temp', 'os_bulk_upload_inclusion_test')
if not os.path.exists(inclusion_test_folder):
os.makedirs(inclusion_test_folder)
# Make some files for include/exclude
folders_to_files = {
'': ['test_file1.txt', 'test_file2.png'],
'subfolder': ['blah.pdf', 'hello.txt', 'testfile3.png'],
'subfolder/subfolder2': ['xyz.jpg', 'blag.txt', 'byz.jpg', 'testfile4.png']
}
for folder, files in six.iteritems(folders_to_files):
folder_path = os.path.join(inclusion_test_folder, folder)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
for file in files:
file_path = os.path.join(folder_path, file)
with open(file_path, 'w') as f:
# For non-text extension types this won't create a valid file, but for testing is probably OK
f.write(generate_random_string(CONTENT_STRING_LENGTH))
result = invoke([
'os',
'object',
'bulk-upload',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--src-dir', inclusion_test_folder,
'--object-prefix', 'inclusion_test/',
'--include', '*.txt', # Matches test_file1.txt, subfolder/hello.txt, subfolder/subfolder2/blag.txt
'--include', 'subfolder/*.png', # Matches subfolder/testfile3.png, subfolder/subfolder2/testfile4.png
'--include', 'subfolder/[b]lah.pdf', # Matches subfolder/blah.pdf
'--include', '*/[ax]yz.jpg' # Matches subfolder/subfolder2/xyz.jpg
])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
expected_uploaded_files = [
'{}{}'.format('inclusion_test/', 'test_file1.txt'),
'{}{}'.format('inclusion_test/', 'subfolder/hello.txt'),
'{}{}'.format('inclusion_test/', 'subfolder/subfolder2/blag.txt'),
'{}{}'.format('inclusion_test/', 'subfolder/testfile3.png'),
'{}{}'.format('inclusion_test/', 'subfolder/subfolder2/testfile4.png'),
'{}{}'.format('inclusion_test/', 'subfolder/blah.pdf'),
'{}{}'.format('inclusion_test/', 'subfolder/subfolder2/xyz.jpg')
]
# Check that we uploaded what we said we did
assert len(parsed_result['uploaded-objects']) == len(expected_uploaded_files)
for f in expected_uploaded_files:
assert f in parsed_result['uploaded-objects']
download_folder_base = os.path.join('tests', 'temp', 'verify_os_bulk_upload_inclusion_test')
verify_downloaded_folders_for_inclusion_exclusion_tests(
expected_uploaded_files=expected_uploaded_files,
source_folder=inclusion_test_folder,
download_folder=download_folder_base,
download_prefix_no_slash='inclusion_test'
)
# Download objects with inclusions to make sure that works
target_download_folder = os.path.join(download_folder_base, 'get_with_include')
invoke([
'os', 'object', 'bulk-download',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--download-dir', target_download_folder,
'--prefix', 'inclusion_test/',
'--include', '*.txt',
'--include', 'subfolder/*.png',
'--include', 'subfolder/blah.pdf',
])
expected_uploaded_files.remove('{}{}'.format('inclusion_test/', 'subfolder/subfolder2/xyz.jpg')) # This is not in our --include switches
assert not os.path.exists(os.path.join(target_download_folder, 'inclusion_test', 'subfolder', 'subfolder2', 'xyz.jpg'))
for expected_file in expected_uploaded_files:
target_file = os.path.join(target_download_folder, expected_file)
original_file = target_file.replace(os.path.join(target_download_folder, 'inclusion_test'), inclusion_test_folder)
assert os.path.exists(target_file)
assert filecmp.cmp(original_file, target_file, shallow=False)
# Download a specific object with inclusions
invoke([
'os', 'object', 'bulk-download',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--download-dir', target_download_folder,
'--prefix', 'inclusion_test/',
'--include', 'subfolder/subfolder2/xyz.jpg'
])
assert os.path.exists(os.path.join(target_download_folder, 'inclusion_test', 'subfolder', 'subfolder2', 'xyz.jpg'))
# Delete objects with inclusions
result = invoke([
'os', 'object', 'bulk-delete',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--prefix', 'inclusion_test/',
'--include', '*.txt',
'--include', 'subfolder/blah.pdf',
'--dry-run'
])
parsed_dry_run_result = parse_json_response_from_mixed_output(result.output)
assert len(parsed_dry_run_result['deleted-objects']) == 4
result = invoke([
'os', 'object', 'bulk-delete',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--prefix', 'inclusion_test/',
'--include', '*.txt',
'--include', 'subfolder/blah.pdf',
'--force'
])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['delete-failures'] == {}
assert set(parsed_result['deleted-objects']) == set(parsed_dry_run_result['deleted-objects'])
list_objects_responses = oci_cli_object_storage.objectstorage_cli_extended.retrying_list_objects(
client=object_storage_client,
request_id=None,
namespace=util.NAMESPACE,
bucket_name=bulk_put_bucket_name,
prefix='inclusion_test/',
start=None,
end=None,
limit=1000,
delimiter=None,
fields='name',
retrieve_all=True
)
remaining_objects = []
for response in list_objects_responses:
remaining_objects.extend(map(lambda obj: obj.name, response.data.objects))
assert len(remaining_objects) == 3
assert '{}{}'.format('inclusion_test/', 'subfolder/testfile3.png') in remaining_objects
assert '{}{}'.format('inclusion_test/', 'subfolder/subfolder2/testfile4.png') in remaining_objects
assert '{}{}'.format('inclusion_test/', 'subfolder/subfolder2/xyz.jpg') in remaining_objects
shutil.rmtree(target_download_folder)
shutil.rmtree(inclusion_test_folder)
@util.skip_while_rerecording
def test_bulk_put_get_delete_with_exclusions(object_storage_client):
exclusion_test_folder = os.path.join('tests', 'temp', 'os_bulk_upload_exclusion_test')
if not os.path.exists(exclusion_test_folder):
os.makedirs(exclusion_test_folder)
# Make some files for include/exclude
folders_to_files = {
'': ['test_file1.txt', 'test_file2.png'],
'subfolder': ['blah.pdf', 'hello.txt', 'testfile3.png'],
'subfolder/subfolder2': ['xyz.jpg', 'blag.txt', 'byz.jpg', 'testfile4.png']
}
for folder, files in six.iteritems(folders_to_files):
folder_path = os.path.join(exclusion_test_folder, folder)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
for file in files:
file_path = os.path.join(folder_path, file)
with open(file_path, 'w') as f:
# For non-text extension types this won't create a valid file, but for testing is probably OK
f.write(generate_random_string(CONTENT_STRING_LENGTH))
result = invoke([
'os',
'object',
'bulk-upload',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--src-dir', exclusion_test_folder,
'--object-prefix', 'exclusion_test/',
'--exclude', '*.txt',
'--exclude', '*.ps1', # Shouldn't match anything
'--exclude', 'subfolder/subfolder2/xyz.jpg',
'--exclude', 'subfolder/[spqr]lah.pdf' # blah.pdf should still be included because it's not slah.pdf, plah.pdf, qlah.pdf or rlah.pdf
])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['skipped-objects'] == []
assert parsed_result['upload-failures'] == {}
expected_uploaded_files = [
'{}{}'.format('exclusion_test/', 'test_file2.png'),
'{}{}'.format('exclusion_test/', 'subfolder/blah.pdf'),
'{}{}'.format('exclusion_test/', 'subfolder/testfile3.png'),
'{}{}'.format('exclusion_test/', 'subfolder/subfolder2/byz.jpg'),
'{}{}'.format('exclusion_test/', 'subfolder/subfolder2/testfile4.png')
]
# Check that we uploaded what we said we did
assert len(parsed_result['uploaded-objects']) == len(expected_uploaded_files)
for f in expected_uploaded_files:
assert f in parsed_result['uploaded-objects']
download_folder_base = os.path.join('tests', 'temp', 'verify_os_bulk_upload_exclusion_test')
verify_downloaded_folders_for_inclusion_exclusion_tests(
expected_uploaded_files=expected_uploaded_files,
source_folder=exclusion_test_folder,
download_folder=download_folder_base,
download_prefix_no_slash='exclusion_test'
)
# Download objects with exclusions to make sure that works
target_download_folder = os.path.join(download_folder_base, 'get_with_exclude')
invoke([
'os', 'object', 'bulk-download',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--download-dir', target_download_folder,
'--prefix', 'exclusion_test/',
'--exclude', '*.jpg',
'--exclude', 'subfolder/subfolder2/*.png',
'--exclude', 'subfolder/blah.pdf',
])
assert not os.path.exists(os.path.join(target_download_folder, 'exclusion_test', 'subfolder', 'blah.pdf'))
assert not os.path.exists(os.path.join(target_download_folder, 'exclusion_test', 'subfolder', 'subfolder2', 'byz.jpg'))
assert not os.path.exists(os.path.join(target_download_folder, 'exclusion_test', 'subfolder', 'subfolder2', 'testfile4.png'))
assert get_count_of_files_in_folder_and_subfolders(target_download_folder) == 2
assert os.path.exists(os.path.join(target_download_folder, 'exclusion_test', 'test_file2.png'))
assert os.path.exists(os.path.join(target_download_folder, 'exclusion_test', 'subfolder', 'testfile3.png'))
assert filecmp.cmp(
os.path.join(exclusion_test_folder, 'test_file2.png'),
os.path.join(target_download_folder, 'exclusion_test', 'test_file2.png')
)
assert filecmp.cmp(
os.path.join(exclusion_test_folder, 'subfolder', 'testfile3.png'),
os.path.join(target_download_folder, 'exclusion_test', 'subfolder', 'testfile3.png')
)
# Delete objects with exclusions
result = invoke([
'os', 'object', 'bulk-delete',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--prefix', 'exclusion_test/',
'--exclude', '*.jpg',
'--exclude', 'subfolder/blah.pdf',
'--dry-run'
])
parsed_dry_run_result = parse_json_response_from_mixed_output(result.output)
assert len(parsed_dry_run_result['deleted-objects']) == 3
result = invoke([
'os', 'object', 'bulk-delete',
'--namespace', util.NAMESPACE,
'--bucket-name', bulk_put_bucket_name,
'--prefix', 'exclusion_test/',
'--exclude', '*.jpg',
'--exclude', 'subfolder/blah.pdf',
'--force'
])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['delete-failures'] == {}
assert set(parsed_result['deleted-objects']) == set(parsed_dry_run_result['deleted-objects'])
list_objects_responses = oci_cli_object_storage.objectstorage_cli_extended.retrying_list_objects(
client=object_storage_client,
request_id=None,
namespace=util.NAMESPACE,
bucket_name=bulk_put_bucket_name,
prefix='exclusion_test/',
start=None,
end=None,
limit=1000,
delimiter=None,
fields='name',
retrieve_all=True
)
remaining_objects = []
for response in list_objects_responses:
remaining_objects.extend(map(lambda obj: obj.name, response.data.objects))
assert len(remaining_objects) == 2
assert '{}{}'.format('exclusion_test/', 'subfolder/blah.pdf') in remaining_objects
assert '{}{}'.format('exclusion_test/', 'subfolder/subfolder2/byz.jpg') in remaining_objects
shutil.rmtree(target_download_folder)
shutil.rmtree(exclusion_test_folder)
@util.skip_while_rerecording
def test_delete_when_no_objects_in_bucket(vcr_fixture, object_storage_client):
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageBulkDelete_{}'.format(util.random_number_string())
create_bucket_request.compartment_id = util.COMPARTMENT_ID
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name])
assert 'There are no objects to delete in {}'.format(create_bucket_request.name) in result.output
delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
@util.skip_while_rerecording
def test_delete_dry_run(vcr_fixture):
# Dry-run against entire bucket
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--dry-run'])
parsed_result = json.loads(result.output)
assert set(parsed_result['deleted-objects']) == set(bulk_get_object_to_content.keys())
# Dry-run against a folder and all subfolders
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--prefix', 'a/b/', '--dry-run'])
parsed_result = json.loads(result.output)
expected_objects = set().union(bulk_get_prefix_to_object['a/b'], bulk_get_prefix_to_object['a/b/c'], bulk_get_prefix_to_object['a/b/c/d'])
assert set(parsed_result['deleted-objects']) == expected_objects
# Dry-run against a folder and no subfolders
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--prefix', 'a/b/', '--delimiter', '/', '--dry-run'])
parsed_result = json.loads(result.output)
assert set(parsed_result['deleted-objects']) == set(bulk_get_prefix_to_object['a/b'])
@util.skip_while_rerecording
def test_delete(object_storage_client):
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageBulkDelete_{}'.format(random.randint(0, 1000000))
create_bucket_request.compartment_id = util.COMPARTMENT_ID
util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name, '--src-dir', root_bulk_put_folder])
num_objects_to_delete = get_count_of_files_in_folder_and_subfolders(root_bulk_put_folder)
# Sanity check that the bucket has things in it
assert get_number_of_objects_in_bucket(object_storage_client, create_bucket_request.name) > 0
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name])
if num_objects_to_delete >= 1000:
confirm_prompt = 'WARNING: This command will delete at least {} objects. Are you sure you wish to continue?'.format(num_objects_to_delete)
else:
confirm_prompt = 'WARNING: This command will delete {} objects. Are you sure you wish to continue?'.format(num_objects_to_delete)
assert confirm_prompt in result.output
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name, '--force'])
parsed_result = parse_json_response_from_mixed_output(result.output)
assert parsed_result['delete-failures'] == {}
assert len(parsed_result['deleted-objects']) == num_objects_to_delete
# Check that the bucket is now empty
assert get_number_of_objects_in_bucket(object_storage_client, create_bucket_request.name) == 0
delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
@util.skip_while_rerecording
def test_bulk_operation_table_output_query(object_storage_client):
create_bucket_request = oci.object_storage.models.CreateBucketDetails()
create_bucket_request.name = 'ObjectStorageTableOutput_{}'.format(util.random_number_string())
create_bucket_request.compartment_id = util.COMPARTMENT_ID
util.clear_test_data(object_storage_client, util.NAMESPACE, util.COMPARTMENT_ID, create_bucket_request.name)
object_storage_client.create_bucket(util.NAMESPACE, create_bucket_request)
result = invoke(['os', 'object', 'bulk-upload', '--namespace', util.NAMESPACE, '--bucket-name', create_bucket_request.name, '--src-dir', root_bulk_put_folder, '--output', 'table', '--query', "[?action=='Uploaded'].{file: file, \"opc-content-md5\": \"opc-content-md5\"}"])
assert 'file' in result.output
assert 'opc-content-md5' in result.output
assert 'etag' not in result.output
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--dry-run', '--output', 'table'])
assert 'action' in result.output
assert 'object' in result.output
assert '/a/Object_1' in result.output
result = invoke(['os', 'object', 'bulk-delete', '--namespace', util.NAMESPACE, '--bucket-name', bulk_get_bucket_name, '--dry-run', '--output', 'table', '--query', "[?object=='Object_0'][object]"])
assert 'action' not in result.output
assert '/a/Object_1' not in result.output
assert 'Object_0' in result.output
target_download_folder = os.path.join('tests', 'temp', create_bucket_request.name)
result = invoke([
'os', 'object', 'bulk-download',
'--namespace', util.NAMESPACE,
'--bucket-name', create_bucket_request.name,
'--download-dir', target_download_folder,
'--output', 'table',
])
delete_bucket_and_all_items(object_storage_client, create_bucket_request.name)
shutil.rmtree(target_download_folder)
def invoke(commands, debug=False, ** args):
if debug is True:
commands = ['--debug'] + commands
return util.invoke_command(commands, ** args)
def get_count_of_files_in_folder_and_subfolders(directory):
file_count = 0
for dir_name, subdir_list, file_list in os.walk(directory):
file_count = file_count + len(file_list)
return file_count
def generate_random_string(length):
if test_config_container.using_vcr_with_mock_responses():
return 'a' * length
else:
return ''.join(random.choice(string.ascii_lowercase) for i in range(length))
# Pull JSON data out of output which may have stuff other than JSON in it. Assumes that nothing
# comes after the JSON data
def parse_json_response_from_mixed_output(output):
lines = output.split('\n')
json_str = ''
object_begun = False
for line in lines:
if object_begun or line.startswith('{'):
object_begun = True
json_str += line
return json.loads(json_str)
# For the bulk operations, object names are taken from the file path of the thing we uploaded. Normalize to
# / in the paths (Windows can go both ways) then chop the front bit off
def get_object_name_from_path(path_root, full_path):
return full_path.replace(os.sep, '/').replace(path_root + '/', '')
def delete_bucket_and_all_items(object_storage_client, bucket_name):
list_object_responses = oci_cli_object_storage.objectstorage_cli_extended.retrying_list_objects(
client=object_storage_client,
request_id=None,
namespace=util.NAMESPACE,
bucket_name=bucket_name,
prefix=None,
start=None,
end=None,
limit=1000,
delimiter=None,
fields='name',
retrieve_all=True
)
for response in list_object_responses:
for obj in response.data.objects:
object_storage_client.delete_object(util.NAMESPACE, bucket_name, obj.name)
object_storage_client.delete_bucket(util.NAMESPACE, bucket_name)
def get_number_of_objects_in_bucket(object_storage_client, bucket_name):
list_object_responses = oci_cli_object_storage.objectstorage_cli_extended.retrying_list_objects(
client=object_storage_client,
request_id=None,
namespace=util.NAMESPACE,
bucket_name=bucket_name,
prefix=None,
start=None,
end=None,
limit=1000,
delimiter=None,
fields='name',
retrieve_all=True
)
num_objects_in_bucket = 0
for response in list_object_responses:
num_objects_in_bucket = num_objects_in_bucket + len(response.data.objects)
return num_objects_in_bucket
def verify_downloaded_folders_for_inclusion_exclusion_tests(expected_uploaded_files, source_folder, download_folder, download_prefix_no_slash):
# Download uploaded files and check they are the same
invoke(['os', 'object', 'bulk-download', '--namespace', util.NAMESPACE, '--bucket-name', bulk_put_bucket_name, '--download-dir', download_folder, '--prefix', download_prefix_no_slash + '/'])
# The strings in the expected_uploaded_files array have a "/" in them, but this doesn't match with paths on Windows. Using normpath converts these of
# "\" on Windows and so our matching/comparison works. For Linux/Unix/macOS this doesn't appear to have an impact
normalized_expected_uploaded_files = []
for euf in expected_uploaded_files:
normalized_expected_uploaded_files.append(os.path.normpath(euf))
actual_download_folder = os.path.join(download_folder, download_prefix_no_slash)
files_compared = 0
for dir_name, subdir_list, file_list in os.walk(source_folder):
for file in file_list:
source_file_path = os.path.join(dir_name, file)
downloaded_file_path = source_file_path.replace(source_folder, actual_download_folder)
if downloaded_file_path.replace(actual_download_folder, download_prefix_no_slash) in normalized_expected_uploaded_files:
files_compared += 1
assert os.path.exists(downloaded_file_path)
assert filecmp.cmp(source_file_path, downloaded_file_path, shallow=False)
assert files_compared == len(expected_uploaded_files)
shutil.rmtree(actual_download_folder)
| 50.118609
| 275
| 0.718643
| 6,536
| 49,016
| 5.057222
| 0.071756
| 0.033884
| 0.017245
| 0.036879
| 0.783506
| 0.754281
| 0.722605
| 0.696587
| 0.667484
| 0.647849
| 0
| 0.004637
| 0.159723
| 49,016
| 977
| 276
| 50.169908
| 0.797897
| 0.091317
| 0
| 0.538881
| 0
| 0
| 0.181217
| 0.032609
| 0
| 0
| 0
| 0
| 0.177353
| 1
| 0.039563
| false
| 0
| 0.017735
| 0.001364
| 0.066849
| 0.001364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
9111781e785cdbf0e4af0d7fe8d43c637a7447e2
| 126
|
py
|
Python
|
conans/conan.py
|
laundry-96/conan
|
fd938f7220ca042d94c42ec5eb607ee69c6785a3
|
[
"MIT"
] | 2
|
2019-01-09T10:01:29.000Z
|
2019-01-09T10:01:31.000Z
|
conans/conan.py
|
laundry-96/conan
|
fd938f7220ca042d94c42ec5eb607ee69c6785a3
|
[
"MIT"
] | 6
|
2016-03-08T22:06:45.000Z
|
2020-06-02T15:22:19.000Z
|
conans/conan.py
|
laundry-96/conan
|
fd938f7220ca042d94c42ec5eb607ee69c6785a3
|
[
"MIT"
] | 2
|
2019-08-07T18:15:16.000Z
|
2021-08-04T12:33:05.000Z
|
import sys
from conans.client.command import main
def run():
main(sys.argv[1:])
if __name__ == '__main__':
run()
| 10.5
| 38
| 0.642857
| 18
| 126
| 4.055556
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.214286
| 126
| 11
| 39
| 11.454545
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.063492
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
91150271775e1bcf188908a5352023d285ea5e40
| 363
|
py
|
Python
|
src/python_package/__init__.py
|
microsoft/ai-python-package
|
770f5167ebc32b5410739f04c5730e68f84785c9
|
[
"MIT"
] | 3
|
2021-12-11T17:02:56.000Z
|
2022-02-23T19:45:35.000Z
|
src/python_package/__init__.py
|
microsoft/ai-python-package
|
770f5167ebc32b5410739f04c5730e68f84785c9
|
[
"MIT"
] | 5
|
2022-03-24T13:21:21.000Z
|
2022-03-31T13:21:39.000Z
|
src/python_package/__init__.py
|
microsoft/python-package-template
|
770f5167ebc32b5410739f04c5730e68f84785c9
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in project root for information.
# -------------------------------------------------------------
"""Python Package Template"""
from __future__ import annotations
__version__ = "0.0.2"
| 40.333333
| 80
| 0.484848
| 30
| 363
| 5.6
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009404
| 0.121212
| 363
| 8
| 81
| 45.375
| 0.517241
| 0.801653
| 0
| 0
| 0
| 0
| 0.079365
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
9120f5bc8f814b4692efc7406b81c0fe8103d83e
| 7,225
|
py
|
Python
|
samples/barebone/settings.py
|
kuasha/peregrine
|
b3dd92146d26fe9e4ea589868431b590324b47d1
|
[
"MIT"
] | 1
|
2018-10-12T15:12:15.000Z
|
2018-10-12T15:12:15.000Z
|
samples/barebone/settings.py
|
kuasha/peregrine
|
b3dd92146d26fe9e4ea589868431b590324b47d1
|
[
"MIT"
] | null | null | null |
samples/barebone/settings.py
|
kuasha/peregrine
|
b3dd92146d26fe9e4ea589868431b590324b47d1
|
[
"MIT"
] | null | null | null |
import os
import logging
from collections import namedtuple
from Crypto.PublicKey import RSA
from tornado import gen
from tornado import concurrent
from cosmos.rbac.object import *
from cosmos.service import OBSERVER_PROCESSOR
DEBUG = True
DB_HOST = "127.0.0.1"
DB_NAME = "cosmos"
DB_PORT = 27017
DB_USER_NAME = None
DB_USER_PASSWORD = None
LOG_DB_HOST = "127.0.0.1"
LOG_DB_NAME = "cosmos"
LOG_COL_NAME = "log"
LOG_DB_PORT = 27017
LOG_LEVEL = logging.DEBUG
LOG_DB_USER_NAME = None
LOG_DB_USER_PASSWORD = None
STATIC_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "app")
TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
INDEX_HTML_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "app/index.html")
LOGIN_HTML_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates/login.html")
WEB_SERVER_LISTEN_PORT = 8080
DB_CHANGE_PROCESSOR_ENDPOINT_FORMAT = "http://localhost:{0}/handlechange"
#TODO: You MUST change the following values
COOKIE_SECRET = "+8/YqtEUQfiYLUdO2iJ2OyzHHFSADEuKvKYwFqemFas="
HMAC_KEY = "+8/YqtEUQfiYLUdO2iJ2OyzHIFSAKEuKvKYwFqemFas="
facebook_client_id='000000000000000'
facebook_client_secret='00000000000000000000000000000000'
facebook_scope = "email,public_profile,user_friends"
facebook_redirect_uri = None
DEFAULT_LOGIN_NEXT_URI = "/"
"""
# pip install pycrypto for Crypto
# then from python console generate private_pem and public_pen and assign to SERVICE_PRIVATE_KEY and SERVICE_PUBLIC_KEY
import Crypto.PublicKey.RSA as RSA
key = RSA.generate(2048)
private_pem = key.exportKey()
public_pem = key.publickey().exportKey()
"""
# TODO: set both keys below. Private key backup must be kept in a secure place and should never be shared
# If private key is compromised, this service and all other services that trust this will be compromised
# Public key is to share publicly for verification
SERVICE_PRIVATE_KEY = None
SERVICE_PUBLIC_KEY = None
directory_listing_allowed = True
CONFIGURE_LOG = False
START_WEB_SERVER = True
START_OBJECT_CHANGE_MONITOR = False
GOOGLE_OAUTH2_CLIENT_ID = None
GOOGLE_OAUTH2_CLIENT_SECRET = None
GOOGLE_OAUTH2_REDIRECT_URI = None
GITHUB_CLIENT_ID = None
GITHUB_CLIENT_SECRET = None
GITHUB_OAUTH2_CALLBACK_URI = None
USERS_IDENTITY_COL_NAME = "cosmos.users.identity"
USERS_PROFILE_FB_COL_NAME = "cosmos.users.profile.facebook"
USERS_FB_FRIENDS_COL_NAME = "cosmos.users.facebook.friends"
login_url = "/login/"
OAUTH2_SERVICE_URL = r"/(?P<tenant_id>[^\/]+)/oauth2/(?P<function>[^\/]+)/"
OAUTH2_PRIVATE_KEY_PEM = b'-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEAl0RIYISOe+9F8dRkm+XQrdaVsn/d3GjufnBnFARRgceu+E6q\nWLlptI5arhckFyXjDOAUEuMnOwmISfeXHrIIp4BU6RMjqRw6ciaIhI7e3LSn5fQ7\nOwCywUaHlUkyq+zQynfH77lUC95YumyUQzGVfdiwQw8XZZYDo2wAFMKJa8heo38Z\nQ0HT788VrcuSa1f4PY9i/wRHXF+xp/9NWUE7wER8eNJjqKxkm0EUKYuB23vUFLHh\n8PG7DiATUlCCpV5txhHcNXa2iEoOGecdWg8Yk5Qs2Gq9aqacJGcgfFK9DN+2/yLn\nFEj+xMVPhB2ynILoJ9N+lfA3TE6nWVKiuriXBQIDAQABAoIBAQCAX2CVGKnbH+ra\nGofvjg+VGCEexUlBvoN4Jmg0Ip4RZ6dj70690UyWAKGQUO89/dc8nAYtKT2n6qUR\nMN+9GxYhINXun2GKKPyo127QIHeeEmrSynxhzGvnfrWdyesI4QcobJLvLPbYw6/F\nNlR02eWmUXj00B/pBHC+Be/jrlz1bF5Gwbw/RINzEJPOxVfaN2D31lotetx5WnV7\nXrTxR5ONpCnwbK8phH4/vQL3rv+ZJgKVhRM8uqd+auW5Lp57y36JFXb+g5SmkFo3\nq+mB2CfMkyip8zpJGDyyVo8XiI1jKieqaiimZ4zpJZwkClBzYsFmio60f9smMGYB\n+nQCX5iZAoGBAL6WtY9BSL0hIxMIwDh4C87rORMmy8ZW5sl91wdFHmjnqlc2Q2yS\n3uVwK32BvxQCTq6FXNRoqYO0xHSrrupSRTJD5KT9EoxpaGlqi1MSB6U6o7r41bSb\nhNwcjKJ40OSABZ/YzATOwq9+AfgU+pMZD+WNlzesYL+7QIPHyKXdwrPLAoGBAMsu\ntcUadzsZEmaaSW5xtouyZF5tWPadB6VZ0Gney8x6uWQ2+ZGLv0QRIxJP0f4cBTkY\nsPx5pUZuo7oaDzCaRH9cV2VJFBahsGrFqcsexVsKh8CfZEMD1PBptodD1Cialr9M\nL0RdSu+1lmcfRqxOXSlaMSHml/cqfOjfHOj3RaZvAoGAEG2LLtLwwySlElHxx6xJ\nUEekPstcSzdYY0vOihjiGybE3wmVXDl4rwwxI3tYjg/42kAylTiETA771BasWBRJ\nVKDXh4Us4R+A2X1OjxWBxTM9w7MJMK0rEZIAaUzCrL+APJwCUfPEgj35S3n7c0x4\nu0+uFiVsnXo1gGZrHCj2TGsCgYEApm3Ccos1MvFcgzLKB2+ZqWAcmsRS5N7Hjoe9\nEZtvsDSuewoU70VbDDRFWBCN3+mv1Y8GGijCWqjx79S8sIEMro5DADIWBFu5GByE\n8l5oJiTAAeYNyF7xI2RUIQRMWl4WMOgEp6kLYsKJSjryNt2Rrfe02yH5RHpHCrEH\nC0TQhn0CgYB0iyjs20bdGYYWNTMlSYPtf8LVhUktvGYyytA/sepRXUe13T87vjCc\nvD3utXPsuaBVGhloE7Dk5YHJdar4n5UcLITNJnu1TyRM4binlzbU4rByxVjclaSX\nGB0O/DCgCsgNFK+LFKf/N1EhRxwJKy+BLVWCIshsAxNv26u296I9jA==\n-----END RSA PRIVATE KEY-----'
OAUTH2_PUBLIC_KEY_PEM = b'-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAl0RIYISOe+9F8dRkm+XQ\nrdaVsn/d3GjufnBnFARRgceu+E6qWLlptI5arhckFyXjDOAUEuMnOwmISfeXHrII\np4BU6RMjqRw6ciaIhI7e3LSn5fQ7OwCywUaHlUkyq+zQynfH77lUC95YumyUQzGV\nfdiwQw8XZZYDo2wAFMKJa8heo38ZQ0HT788VrcuSa1f4PY9i/wRHXF+xp/9NWUE7\nwER8eNJjqKxkm0EUKYuB23vUFLHh8PG7DiATUlCCpV5txhHcNXa2iEoOGecdWg8Y\nk5Qs2Gq9aqacJGcgfFK9DN+2/yLnFEj+xMVPhB2ynILoJ9N+lfA3TE6nWVKiuriX\nBQIDAQAB\n-----END PUBLIC KEY-----'
OAUTH2_TOKEN_EXPIRY_SECONDS = 600
TENANT_ID = 'cosmosframework.com'
OAUTH2_TRUSTED_REDIRECT_URLS = ['http://localhost:8080/oauth2client/authorize/']
AUTH_PUBLIC_KEY_PEM_URL = r"/(?P<tenant_id>[^\/]+)/auth/key/"
#TODO: You should remove this processon in production environment
def test_observer(user, object_service, object_name, data, access_type, columns = None, *args, **kwargs):
assert object_name == "test"
assert access_type == AccessType.READ or access_type == AccessType.INSERT or access_type == AccessType.UPDATE or access_type == AccessType.DELETE
logging.info("Test object observer is called with [{}, {}, {}, {}, {}, {}].".format(user, object_service, object_name, data, access_type, columns))
if AccessType.INSERT == access_type:
val = concurrent.Future()
val.set_result(data)
return (val)
if AccessType.UPDATE == access_type or AccessType.DELETE == access_type:
r = ({"error": None, "n": 1, "ok": 1, "updatedExisting": 1})
val = concurrent.Future()
val.set_result({"_id":r})
return (val)
find_one = kwargs.get("find_one", False)
if find_one:
val = concurrent.Future()
val.set_result({"_id":data})
return (val)
else:
Result = namedtuple("CosmosEmptyResultSet", "fetch_next")
val = concurrent.Future()
val.set_result(False)
return (Result(fetch_next=val))
observers = [
{
"object_name": "test",
"function": test_observer,
"access": [AccessType.READ, AccessType.INSERT, AccessType.UPDATE, AccessType.DELETE],
"type": OBSERVER_PROCESSOR
}
]
try:
from local_settings import *
except ImportError:
pass
if DB_USER_NAME and DB_USER_PASSWORD:
DATABASE_URI = "mongodb://"+ DB_USER_NAME + ":"+ DB_USER_PASSWORD +"@"+ DB_HOST+":"+str(DB_PORT)+"/"+DB_NAME
else:
DATABASE_URI = "mongodb://"+DB_HOST+":"+str(DB_PORT)
if LOG_DB_USER_NAME and LOG_DB_USER_PASSWORD:
LOG_DATABASE_URI = "mongodb://"+ LOG_DB_USER_NAME + ":"+ LOG_DB_USER_PASSWORD +"@"+ LOG_DB_HOST+":"+str(LOG_DB_PORT)+"/"+LOG_DB_NAME
else:
LOG_DATABASE_URI = "mongodb://"+ LOG_DB_HOST+":"+str(LOG_DB_PORT)
GOOGLE_OAUTH2_SETTINGS = {"key": GOOGLE_OAUTH2_CLIENT_ID, "secret": GOOGLE_OAUTH2_CLIENT_SECRET, "redirect_uri": GOOGLE_OAUTH2_REDIRECT_URI}
GITHUB_OAUTH_SETTINGS = {"client_id": GITHUB_CLIENT_ID, "secret": GITHUB_CLIENT_SECRET, "redirect_uri": GITHUB_OAUTH2_CALLBACK_URI}
| 47.847682
| 1,732
| 0.800969
| 809
| 7,225
| 6.87021
| 0.357231
| 0.012594
| 0.010795
| 0.010076
| 0.114969
| 0.098777
| 0.074847
| 0.055416
| 0.055416
| 0.038143
| 0
| 0.060485
| 0.098408
| 7,225
| 150
| 1,733
| 48.166667
| 0.792754
| 0.050104
| 0
| 0.098039
| 0
| 0.019608
| 0.447745
| 0.365478
| 0
| 0
| 0
| 0.013333
| 0.019608
| 1
| 0.009804
| false
| 0.068627
| 0.098039
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
912b4d4f0a7c8620ff9eef12211953cd7f872472
| 1,633
|
py
|
Python
|
oxe-api/test/resource/company/test_get_company_taxonomy.py
|
CybersecurityLuxembourg/openxeco
|
8d4e5578bde6a07f5d6d569b16b4de224abf7bf0
|
[
"BSD-2-Clause"
] | null | null | null |
oxe-api/test/resource/company/test_get_company_taxonomy.py
|
CybersecurityLuxembourg/openxeco
|
8d4e5578bde6a07f5d6d569b16b4de224abf7bf0
|
[
"BSD-2-Clause"
] | null | null | null |
oxe-api/test/resource/company/test_get_company_taxonomy.py
|
CybersecurityLuxembourg/openxeco
|
8d4e5578bde6a07f5d6d569b16b4de224abf7bf0
|
[
"BSD-2-Clause"
] | null | null | null |
from test.BaseCase import BaseCase
class TestGetCompanyTaxonomy(BaseCase):
@BaseCase.login
def test_ok(self, token):
self.db.insert({"id": 1, "name": "My Company"}, self.db.tables["Company"])
self.db.insert({"id": 2, "name": "My Company 2"}, self.db.tables["Company"])
self.db.insert({"name": "CAT1"}, self.db.tables["TaxonomyCategory"])
self.db.insert({"name": "CAT2"}, self.db.tables["TaxonomyCategory"])
self.db.insert({"id": 1, "name": "VAL1", "category": "CAT1"}, self.db.tables["TaxonomyValue"])
self.db.insert({"id": 2, "name": "VAL2", "category": "CAT2"}, self.db.tables["TaxonomyValue"])
self.db.insert({"company": 1, "taxonomy_value": 1}, self.db.tables["TaxonomyAssignment"])
self.db.insert({"company": 1, "taxonomy_value": 2}, self.db.tables["TaxonomyAssignment"])
self.db.insert({"company": 2, "taxonomy_value": 2}, self.db.tables["TaxonomyAssignment"])
response = self.application.get('/company/get_company_taxonomy/2',
headers=self.get_standard_header(token))
self.assertEqual([{'company': 2, 'taxonomy_value': 2}], response.json)
self.assertEqual(200, response.status_code)
@BaseCase.login
def test_ok_empty(self, token):
self.db.insert({"id": 2, "name": "My Company"}, self.db.tables["Company"])
response = self.application.get('/company/get_company_taxonomy/2',
headers=self.get_standard_header(token))
self.assertEqual(response.json, [])
self.assertEqual(200, response.status_code)
| 49.484848
| 102
| 0.62278
| 195
| 1,633
| 5.128205
| 0.215385
| 0.12
| 0.12
| 0.07
| 0.88
| 0.813
| 0.766
| 0.45
| 0.2
| 0.2
| 0
| 0.021309
| 0.195346
| 1,633
| 32
| 103
| 51.03125
| 0.739726
| 0
| 0
| 0.333333
| 0
| 0
| 0.238212
| 0.037967
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
9130c59838ca9f05494c451b3ac65479a741bec6
| 265
|
py
|
Python
|
pyctcdecode/__init__.py
|
kensho-technologies/pyctcdecode
|
c33f94bce283ea9af79d30e2b815e3bf34a137c9
|
[
"Apache-2.0"
] | 203
|
2021-06-08T22:49:56.000Z
|
2022-03-31T11:55:21.000Z
|
pyctcdecode/__init__.py
|
kensho-technologies/pyctcdecode
|
c33f94bce283ea9af79d30e2b815e3bf34a137c9
|
[
"Apache-2.0"
] | 40
|
2021-06-11T20:58:07.000Z
|
2022-03-23T10:58:27.000Z
|
pyctcdecode/__init__.py
|
kensho-technologies/pyctcdecode
|
c33f94bce283ea9af79d30e2b815e3bf34a137c9
|
[
"Apache-2.0"
] | 39
|
2021-06-09T21:03:35.000Z
|
2022-03-26T13:14:23.000Z
|
# Copyright 2021-present Kensho Technologies, LLC.
from .alphabet import Alphabet # noqa
from .decoder import BeamSearchDecoderCTC, build_ctcdecoder # noqa
from .language_model import LanguageModel # noqa
__package_name__ = "pyctcdecode"
__version__ = "0.3.0"
| 29.444444
| 67
| 0.792453
| 31
| 265
| 6.419355
| 0.741935
| 0.080402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030568
| 0.135849
| 265
| 8
| 68
| 33.125
| 0.838428
| 0.237736
| 0
| 0
| 0
| 0
| 0.081218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
9130dea9e896e245175b22a313c12b30eff43fdf
| 137
|
py
|
Python
|
wumpus/start_server.py
|
marky1991/Legend-of-Wumpus
|
b53f4a520cea274ddb4c40c6ab4f42a68008896f
|
[
"MIT"
] | null | null | null |
wumpus/start_server.py
|
marky1991/Legend-of-Wumpus
|
b53f4a520cea274ddb4c40c6ab4f42a68008896f
|
[
"MIT"
] | null | null | null |
wumpus/start_server.py
|
marky1991/Legend-of-Wumpus
|
b53f4a520cea274ddb4c40c6ab4f42a68008896f
|
[
"MIT"
] | null | null | null |
from wumpus.server import Server
from circuits import Debugger
s = Server("0.0.0.0", 50551) + Debugger()
s.run()
import sys
sys.exit(1)
| 17.125
| 41
| 0.722628
| 24
| 137
| 4.125
| 0.541667
| 0.060606
| 0.060606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 0.138686
| 137
| 7
| 42
| 19.571429
| 0.754237
| 0
| 0
| 0
| 0
| 0
| 0.051095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
913758ab43b30fce640a96f90c8472af68facfb1
| 117
|
py
|
Python
|
mmdet/core/ufp/__init__.py
|
PuAnysh/UFPMP-Det
|
6809b4f8de3aa1d013a3f86114bc3e8496d896a9
|
[
"Apache-2.0"
] | 9
|
2022-01-18T14:42:39.000Z
|
2022-02-14T02:57:02.000Z
|
mmdet/core/ufp/__init__.py
|
PuAnysh/UFPMP-Det
|
6809b4f8de3aa1d013a3f86114bc3e8496d896a9
|
[
"Apache-2.0"
] | 1
|
2022-03-28T11:51:49.000Z
|
2022-03-31T14:24:02.000Z
|
mmdet/core/ufp/__init__.py
|
PuAnysh/UFPMP-Det
|
6809b4f8de3aa1d013a3f86114bc3e8496d896a9
|
[
"Apache-2.0"
] | null | null | null |
from .spp import *
from .unified_foreground_packing import *
__all__ = [
'phsppog', 'UnifiedForegroundPacking'
]
| 16.714286
| 41
| 0.74359
| 11
| 117
| 7.363636
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 117
| 6
| 42
| 19.5
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.264957
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
913c83f4f9cee2569debbb5a5301094fbb4ed18e
| 1,823
|
py
|
Python
|
ens/exceptions.py
|
pjryan93/web3.py
|
e066452a7b0e78d6cb8a9462532d169de901ef99
|
[
"MIT"
] | 326
|
2016-04-29T21:51:06.000Z
|
2022-03-31T03:20:54.000Z
|
ens/exceptions.py
|
pjryan93/web3.py
|
e066452a7b0e78d6cb8a9462532d169de901ef99
|
[
"MIT"
] | 283
|
2016-04-15T16:41:31.000Z
|
2017-11-28T16:41:36.000Z
|
ens/exceptions.py
|
pjryan93/web3.py
|
e066452a7b0e78d6cb8a9462532d169de901ef99
|
[
"MIT"
] | 146
|
2016-04-14T16:27:54.000Z
|
2021-10-03T13:31:07.000Z
|
import idna
class AddressMismatch(ValueError):
'''
In order to set up reverse resolution correctly, the ENS name should first
point to the address. This exception is raised if the name does
not currently point to the address.
'''
pass
class InvalidName(idna.IDNAError):
'''
This exception is raised if the provided name does not meet
the syntax standards specified in `EIP 137 name syntax
<https://github.com/ethereum/EIPs/blob/master/EIPS/eip-137.md#name-syntax>`_.
For example: names may not start with a dot, or include a space.
'''
pass
class UnauthorizedError(Exception):
'''
Raised if the sending account is not the owner of the name
you are trying to modify. Make sure to set ``from`` in the
``transact`` keyword argument to the owner of the name.
'''
pass
class UnownedName(Exception):
'''
Raised if you are trying to modify a name that no one owns.
If working on a subdomain, make sure the subdomain gets created
first with :meth:`~ens.main.ENS.setup_address`.
'''
pass
class BidTooLow(ValueError):
'''
Raised if you bid less than the minimum amount
'''
pass
class InvalidBidHash(ValueError):
'''
Raised if you supply incorrect data to generate the bid hash.
'''
pass
class InvalidLabel(ValueError):
'''
Raised if you supply an invalid label
'''
pass
class OversizeTransaction(ValueError):
'''
Raised if a transaction you are trying to create would cost so
much gas that it could not fit in a block.
For example: when you try to start too many auctions at once.
'''
pass
class UnderfundedBid(ValueError):
'''
Raised if you send less wei with your bid than you declared
as your intent to bid.
'''
pass
| 22.7875
| 81
| 0.673066
| 259
| 1,823
| 4.72973
| 0.490347
| 0.058776
| 0.044898
| 0.068571
| 0.146939
| 0.042449
| 0
| 0
| 0
| 0
| 0
| 0.004412
| 0.253977
| 1,823
| 79
| 82
| 23.075949
| 0.896324
| 0.646736
| 0
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.473684
| 0.052632
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
9143ca3c30c3da5376a215dd32db4d9aec05c6ba
| 282
|
py
|
Python
|
config.py
|
mhmddpkts/Get-Turkish-Words-with-Web-Scraping
|
6e344640f6dc512f03a9b59522876ce7b6339a86
|
[
"MIT"
] | null | null | null |
config.py
|
mhmddpkts/Get-Turkish-Words-with-Web-Scraping
|
6e344640f6dc512f03a9b59522876ce7b6339a86
|
[
"MIT"
] | null | null | null |
config.py
|
mhmddpkts/Get-Turkish-Words-with-Web-Scraping
|
6e344640f6dc512f03a9b59522876ce7b6339a86
|
[
"MIT"
] | null | null | null |
root_URL = "https://tr.wiktionary.org/wiki/Vikis%C3%B6zl%C3%BCk:S%C3%B6zc%C3%BCk_listesi_"
filepath = "words.csv"
#letters=["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O",
# "P","R","S","T","U","V","Y","Z"] ##İ,Ç,Ö,Ş,Ü harfleri not work correctly
letters=["C"]
| 35.25
| 90
| 0.539007
| 57
| 282
| 2.631579
| 0.859649
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023166
| 0.08156
| 282
| 8
| 91
| 35.25
| 0.552124
| 0.517731
| 0
| 0
| 0
| 0.333333
| 0.659091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e66eceebc9bb0cd90db3c066088340ee6f011e6e
| 545
|
py
|
Python
|
lang/py/cookbook/v2/source/cb2_20_9_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_20_9_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
lang/py/cookbook/v2/source/cb2_20_9_exm_1.py
|
ch1huizong/learning
|
632267634a9fd84a5f5116de09ff1e2681a6cc85
|
[
"MIT"
] | null | null | null |
class Skidoo(object):
''' a mapping which claims to contain all keys, each with a value
of 23; item setting and deletion are no-ops; you can also call
an instance with arbitrary positional args, result is 23. '''
__metaclass__ = MetaInterfaceChecker
__implements__ = IMinimalMapping, ICallable
def __getitem__(self, key): return 23
def __setitem__(self, key, value): pass
def __delitem__(self, key): pass
def __contains__(self, key): return True
def __call__(self, *args): return 23
sk = Skidoo()
| 41.923077
| 70
| 0.702752
| 73
| 545
| 4.863014
| 0.684932
| 0.078873
| 0.073239
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018779
| 0.218349
| 545
| 12
| 71
| 45.416667
| 0.814554
| 0.333945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.555556
| false
| 0.222222
| 0
| 0.333333
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 3
|
e681d9f0d0bbcd56a55111fcb8b7b0c2f584018e
| 142
|
py
|
Python
|
simulator/cc.py
|
mcfx/trivm
|
5b77ea157c562cfbfe87f7e7d256fb9702f8ceec
|
[
"MIT"
] | 6
|
2022-02-21T15:49:52.000Z
|
2022-02-23T07:16:02.000Z
|
simulator/cc.py
|
mcfx/trivm
|
5b77ea157c562cfbfe87f7e7d256fb9702f8ceec
|
[
"MIT"
] | null | null | null |
simulator/cc.py
|
mcfx/trivm
|
5b77ea157c562cfbfe87f7e7d256fb9702f8ceec
|
[
"MIT"
] | null | null | null |
import os, sys
fn = sys.argv[1]
if os.system('python compile.py %s __tmp.S' % fn) == 0:
os.system('python asm.py __tmp.S %s' % fn[:-2])
| 20.285714
| 55
| 0.598592
| 28
| 142
| 2.892857
| 0.571429
| 0.197531
| 0.345679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026087
| 0.190141
| 142
| 6
| 56
| 23.666667
| 0.678261
| 0
| 0
| 0
| 0
| 0
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e6a1e01053fb282362b9b417d81cb0cf76a2bbed
| 21,947
|
py
|
Python
|
tryhackme/http.py
|
GnarLito/tryhackme.py
|
20b4dd6a15c13c57e7a7be7f59913b937a992e4b
|
[
"MIT"
] | null | null | null |
tryhackme/http.py
|
GnarLito/tryhackme.py
|
20b4dd6a15c13c57e7a7be7f59913b937a992e4b
|
[
"MIT"
] | 16
|
2021-11-22T07:51:32.000Z
|
2021-12-14T00:07:48.000Z
|
tryhackme/http.py
|
GnarLito/tryhackme.py
|
20b4dd6a15c13c57e7a7be7f59913b937a992e4b
|
[
"MIT"
] | null | null | null |
import re
import sys
from urllib.parse import quote as _uriquote
import requests
from . import __version__, errors, utils
from .converters import _county_types, _leaderboard_types, _vpn_types, _not_none
from . import checks
from .cog import request_cog
GET='get'
POST='post'
class HTTPClient:
__CSRF_token_regex = re.compile("const csrfToken[ ]{0,1}=[ ]{0,1}[\"|'](.{36})[\"|']")
__Username_regex = re.compile("const username[ ]{0,1}=[ ]{0,1}[\"|'](.{1,16})[\"|']")
def __init__(self, session=None):
self._state = None
self.authenticated = False
self.__session = requests.Session()
self.static_session = requests.Session()
self.connect_sid = None
self._CSRF_token = None
self.username = None
self.user_agent = f'Tryhackme: (https://github.com/GnarLito/thm-api-py {__version__}) Python/{sys.version_info[0]}.{sys.version_info[1]} requests/{requests.__version__}'
if session is not None:
self.static_login(session)
def close(self):
if self.__session:
self.__session.close()
def static_login(self, session):
self.connect_sid = session
cookie = requests.cookies.create_cookie('connect.sid', session, domain='tryhackme.com')
self.__session.cookies.set_cookie(cookie)
try:
self.request(RouteList.get_unseen_notifications())
self.authenticated = True
self._CSRF_token = self.retrieve_CSRF_token()
self.username = self.retrieve_username()
except Exception as e:
print("session Issue:", e)
def retrieve_CSRF_token(self):
if not self.authenticated:
return None
try:
page = self.request(RouteList.get_profile_page())
return self._HTTPClient__CSRF_token_regex.search(page).group(1)
except AttributeError:
self.authenticated = False
return None
def retrieve_username(self):
if not self.authenticated:
return None
try:
page = self.request(RouteList.get_profile_page())
return self._HTTPClient__Username_regex.search(page).group(1)
except AttributeError:
self.authenticated = False
return None
def request(self, route, **kwargs):
session = self.__session
endpoint = route.url
method = route.method
settings = kwargs.pop('settings', {})
headers = {
'User-Agent': self.user_agent
}
if 'json' in kwargs:
headers['Content-Type'] = 'application/json'
kwargs['data'] = utils.to_json(kwargs.pop('json'))
if "static" in settings:
session = self.static_session
if "CSRF" in settings:
headers['CSRF-Token'] = self._CSRF_token
kwargs["data"]["_CSRF"] = self._CSRF_token
# TODO: retries, Pagenator
try:
with session.request(method, endpoint, **kwargs) as r:
data = utils.response_to_json_or_text(r)
# * valid return
if 300 > r.status_code >= 200:
# $ if return url is login then no auth
if r.url.split('/')[-1] == "login":
raise errors.Unauthorized(request=r, route=route, data=data)
return data
# $ no auth
if r.status_code in {401, 403}:
raise errors.Unauthorized(request=r, route=route, data=data)
# $ endpoint not found
if 404 == r.status_code:
raise errors.NotFound(request=r, route=route, data=data)
# $ server side issue's
if r.status_code in {500, 502}:
raise errors.ServerError(request=r, route=route, data=data)
except Exception as e:
raise e
class Route:
# TODO: add post payload capabilities
BASE = "https://www.tryhackme.com"
def __init__(self, method=GET, path='', **parameters):
self.method = method
self._path = path
self.path = path
url = self.BASE + self.path
options = parameters.pop("options", None)
if parameters:
try:
self.path = self.path.format(**{k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()})
self.url = self.BASE + self.path
except Exception as e:
raise errors.NotValidUrlParameters(e)
else:
self.url = url
if options:
if "?" not in self.url:
self.url + "?" + "&".join([f"{i}={options[i]}" for i in options.keys() if options[i] != None])
else:
self.url + "&" + "&".join([f"{i}={options[i]}" for i in options.keys() if options[i] != None])
self.bucket = f"{method} {path}"
class RouteList:
def get_profile_page(**parameters): return Route(path="/profile", **parameters)
# * normal site calls
def get_server_time( **parameters): return Route(path="/api/server-time", **parameters)
def get_site_stats( **parameters): return Route(path="/api/site-stats", **parameters)
def get_practise_rooms( **parameters): return Route(path="/api/practice-rooms", **parameters)
def get_series( **parameters): return Route(path="/api/series?show={show}", **parameters)
def get_glossary_terms( **parameters): return Route(path="/glossary/all-terms", **parameters)
# * Leaderboards
def get_leaderboards( **parameters): return Route(path="/api/leaderboards", **parameters)
def get_koth_leaderboards(**parameters): return Route(path="/api/leaderboards/koth", **parameters)
# * networks
def get_networks( **parameters): return Route(path="/api/networks", **parameters)
def get_network( **parameters): return Route(path="/api/room/network?code={network_code}", **parameters)
def get_network_cost( **parameters): return Route(path="/api/room/cost?code={network_code}", **parameters)
# * account
def get_subscription_cost(**parameters): return Route(path="/account/subscription/cost", **parameters)
# * paths
def get_path( **parameters): return Route(path="/paths/single/{path_code}", **parameters)
def get_public_paths( **parameters): return Route(path="/paths/public", **parameters)
def get_path_summary( **parameters): return Route(path="/paths/summary", **parameters)
# * modules
def get_modules_summary(**parameters): return Route(path="/modules/summary", **parameters)
def get_module( **parameters): return Route(path="/modules/data/{module_code}",**parameters)
# * games
def get_machine_pool( **parameters): return Route(path="/games/koth/get/machine-pool", **parameters)
def get_game_detail( **parameters): return Route(path="/games/koth/data/{game_code}", **parameters)
def get_recent_games( **parameters): return Route(path="/games/koth/recent/games", **parameters)
def get_user_games( **parameters): return Route(path="/games/koth/user/games", **parameters)
def get_game_tickets_won(**parameters): return Route(path="/games/tickets/won?username={username}", **parameters)
def post_join_koth( **parameters): return Route(method=POST, path="/games/koth/new", **parameters)
def post_new_koth( **parameters): return Route(method=POST, path="/games/koth/join-public", **parameters) # ? might be different for premium users
# * VPN
def get_available_vpns(**parameters): return Route(path="/vpn/get-available-vpns", **parameters)
def get_vpn_info( **parameters): return Route(path="/vpn/my-data", **parameters)
# * VM
def get_machine_running( **parameters): return Route(path="/api/vm/running", **parameters)
def post_renew_machine( **parameters): return Route(method=POST, path="/api/vm/renew", **parameters)
def post_terminate_machine( **parameters): return Route(method=POST, path="/api/vm/terminate", **parameters)
# * user -badge
def get_own_badges( **parameters): return Route(path="/api/badges/mine", **parameters)
def get_user_badges(**parameters): return Route(path="/api/badges/get/{username}", **parameters)
def get_all_badges( **parameters): return Route(path="/api/badges/get", **parameters)
# * user -team
def get_team_info(**parameters): return Route(path="/api/team/is-member", **parameters)
# * user -notifications
def get_unseen_notifications(**parameters): return Route(path="/notifications/has-unseen", **parameters)
def get_all_notifications( **parameters): return Route(path="/notifications/get", **parameters)
# * user -messages
def get_unseen_messages( **parameters): return Route(path="/message/has-unseen", **parameters)
def get_all_group_messages(**parameters): return Route(path="/message/group/get-all", **parameters)
def get_group_messages( **parameters): return Route(path="/message/group/get/{group_id}", **parameters)
# * user -room
def get_user_completed_rooms_count( **parameters): return Route(path="/api/no-completed-rooms-public/{username}", **parameters)
def get_user_completed_rooms( **parameters): return Route(path="/api/all-completed-rooms?username={username}", **parameters)
def get_user_created_rooms( **parameters): return Route(path="/api/created-rooms/{username}", **parameters)
# * user
def get_user_rank( **parameters): return Route(path="/api/user/rank/{username}", **parameters)
def get_user_activty(**parameters): return Route(path="/api/user/activity-events?username={username}", **parameters)
def get_all_friends( **parameters): return Route(path="/api/friend/all", **parameters)
def get_discord_user(**parameters): return Route(path="/api/discord/user/{username}", **parameters) # ? rename to user profile
def get_user_exist( **parameters): return Route(path="/api/user/exist/{username}", **parameters)
def search_user( **parameters): return Route(path="/api/similar-users/{username}", **parameters)
# * room
def get_new_rooms( **parameters): return Route(path="/api/new-rooms", **parameters)
def get_recommended_rooms( **parameters): return Route(path="/recommend/last-room?type=json", **parameters)
def get_questions_answered( **parameters): return Route(path="/api/questions-answered", **parameters)
def get_joined_rooms( **parameters): return Route(path="/api/my-rooms", **parameters)
def get_room_percetages( **parameters): return Route(method=POST, path="/api/room-percentages", **parameters) # ? is a post but it gets stuff
def get_room_scoreboard( **parameters): return Route(path="/api/room/scoreboard?code={room_code}", **parameters)
def get_room_votes( **parameters): return Route(path="/api/room/votes?code={room_code}", **parameters)
def get_room_details( **parameters): return Route(path="/api/room/details?codes={room_code}", **parameters) # ? list posibility
def get_room_tasks( **parameters): return Route(path="/api/tasks/{room_code}", **parameters)
def post_room_answer( **parameters): return Route(method=POST, path="/api/{room_code}/answer", **parameters)
def post_deploy_machine( **parameters): return Route(method=POST, path="/material/deploy", **parameters)
def post_reset_room_progress(**parameters): return Route(method=POST, path="/api/reset-progress", **parameters)
def post_leave_room( **parameters): return Route(method=POST, path="/api/room/leave", **parameters)
class HTTP(request_cog, HTTPClient):
# * normal site calls
def get_server_time(self, **attrs):
return self.request(RouteList.get_server_time(), **attrs)
def get_site_stats(self, **attrs):
return self.request(RouteList.get_site_stats(), **attrs)
def get_practise_rooms(self, **attrs):
return self.request(RouteList.get_practise_rooms(), **attrs)
def get_serie(self, show, serie_code, **attrs):
return self.request(RouteList.get_series(show=show, options={"name": serie_code}), **attrs)
def get_series(self, show, **attrs):
return self.request(RouteList.get_series(show=show), **attrs)
def get_glossary_terms(self, **attrs):
return self.request(RouteList.get_glossary_terms(), **attrs)
# * Leaderboards
def get_leaderboards(self, country: _county_types, type:_leaderboard_types, **attrs):
return self.request(RouteList.get_leaderboards(country=country.to_lower_case(), type=type), **attrs)
def get_koth_leaderboards(self, country: _county_types, type:_leaderboard_types, **attrs):
return self.request(RouteList.get_koth_leaderboards(country=country.to_lower_case(), type=type), **attrs)
# * networks
def get_network(self, network_code, **attrs):
return self.request(RouteList.get_network(network_code=network_code), **attrs)
def get_networks(self, **attrs):
return self.request(RouteList.get_networks(),**attrs)
def get_network_cost(self, network_code, **attrs):
return self.request(RouteList.get_networks(network_code=network_code), **attrs)
# * account
@checks.is_authenticated()
def get_subscription_cost(self, **attrs):
return self.request(RouteList.get_subscription_cost(), **attrs)
# * paths
def get_path(self, path_code, **attrs):
return self.request(RouteList.get_path(path_code=path_code), **attrs)
def get_public_paths(self, **attrs):
return self.request(RouteList.get_public_paths(), **attrs)
def get_path_summary(self, **attrs):
return self.request(RouteList.get_path_summary(), **attrs)
# * modules
def get_modules_summary(self, **attrs):
return self.request(RouteList.get_modules_summary(), **attrs)
def get_module(self, module_code, **attrs):
return self.request(RouteList.get_module(module_code), **attrs)
# * games
def get_machine_pool(self, **attrs):
return self.request(RouteList.get_machine_pool(), **attrs)
def get_game_detail(self, game_code, **attrs):
return self.request(RouteList.get_game_detail(game_code=game_code), **attrs)
def get_recent_games(self, **attrs):
return self.request(RouteList.get_recent_games(), **attrs)
def get_user_games(self, **attrs):
return self.request(RouteList.get_user_games(), **attrs)
def get_game_tickets_won(self, username, **attrs):
return self.request(RouteList.get_game_tickets_won(username=username), **attrs)
@checks.set_header_CSRF()
def post_join_koth(self, **attrs):
return self.request(RouteList.post_join_koth(), **attrs)
@checks.set_header_CSRF()
def post_new_koth(self, **attrs):
return self.request(RouteList.post_new_koth(), **attrs)
# * VPN
@checks.is_authenticated()
def get_available_vpns(self, type : _vpn_types, **attrs):
return self.request(RouteList.get_available_vpns(options={"type": type}), **attrs)
@checks.is_authenticated()
def get_vpn_info(self, **attrs):
return self.request(RouteList.get_vpn_info(), **attrs)
# * VM
def get_machine_running(self, **attrs):
return self.request(RouteList.get_machine_running(), **attrs)
@checks.set_header_CSRF()
def post_renew_machine(self, room_code, **attrs):
return self.request(RouteList.post_renew_machine(), json={"code": room_code}, **attrs)
@checks.set_header_CSRF()
def post_terminate_machine(self, room_code, **attrs):
return self.request(RouteList.post_terminate_machine(), json={"code": room_code}, **attrs)
# * user -badge
@checks.is_authenticated()
def get_own_badges(self, **attrs):
return self.request(RouteList.get_own_badges(), **attrs)
def get_user_badges(self, username, **attrs):
return self.request(RouteList.get_user_badges(username=username), **attrs)
def get_all_badges(self, **attrs):
return self.request(RouteList.get_all_badges(), **attrs)
# * user -team
@checks.is_authenticated()
def get_team_info(self, **attrs):
return self.request(RouteList.get_team_info(), **attrs)
# * user -notifications
@checks.is_authenticated()
def get_unseen_notifications(self, **attrs):
return self.request(RouteList.get_unseen_notifications(), **attrs)
@checks.is_authenticated()
def get_all_notifications(self, **attrs):
return self.request(RouteList.get_all_notifications(), **attrs)
# * user -messages
@checks.is_authenticated()
def get_unseen_messages(self, **attrs):
return self.request(RouteList.get_unseen_messages(), **attrs)
@checks.is_authenticated()
def get_all_group_messages(self, **attrs):
return self.request(RouteList.get_all_group_messages(), **attrs)
@checks.is_authenticated()
def get_group_messages(self, group_id, **attrs):
return self.request(RouteList.get_group_messages(group_id), **attrs)
# * user -room
def get_user_completed_rooms_count(self, username, **attrs):
return self.request(RouteList.get_user_completed_rooms_count(username=username), **attrs)
def get_user_completed_rooms(self, username, limit:int=10, page:int=1, **attrs):
return self.request(RouteList.get_user_completed_rooms(username=username, options={"limit": limit, "page": page}), **attrs)
def get_user_created_rooms(self, username, limit:int=10, page:int=1, **attrs):
return self.request(RouteList.get_user_created_rooms(username=username, options={"limit": limit, "page": page}), **attrs)
# * user
def get_user_rank(self, username : _not_none, **attrs):
return self.request(RouteList.get_user_rank(username=username), **attrs)
def get_user_activty(self, username : _not_none, **attrs):
return self.request(RouteList.get_user_activty(username=username), **attrs)
@checks.is_authenticated()
def get_all_friends(self, **attrs):
return self.request(RouteList.get_all_friends(), **attrs)
def get_discord_user(self, username : _not_none, **attrs):
return self.request(RouteList.get_discord_user(username=username), **attrs)
def get_user_exist(self, username : _not_none, **attrs):
return self.request(RouteList.get_user_exist(username=username), **attrs)
def search_user(self, username : _not_none, **attrs):
return self.request(RouteList.search_user(username=username), **attrs)
# * room
def get_new_rooms(self, **attrs):
return self.request(RouteList.get_new_rooms(), **attrs)
@checks.is_authenticated()
def get_recommended_rooms(self, **attrs):
return self.request(RouteList.get_recommended_rooms(), **attrs)
def get_questions_answered(self, **attrs):
return self.request(RouteList.get_questions_answered(), **attrs)
@checks.is_authenticated()
def get_joined_rooms(self, **attrs):
return self.request(RouteList.get_joined_rooms(), **attrs)
@checks.is_authenticated()
def get_room_percentages(self, room_codes, **attrs):
return self.request(RouteList.get_room_percetages(), json={"rooms": room_codes}, **attrs)
@checks.is_authenticated()
def get_room_scoreboard(self, room_code, **attrs):
return self.request(RouteList.get_room_scoreboard(room_code=room_code), **attrs)
def get_room_votes(self, room_code, **attrs):
return self.request(RouteList.get_room_votes(room_code=room_code), **attrs)
def get_room_details(self, room_code, loadWriteUps: bool=True, loadCreators: bool=True, loadUser: bool=True, **attrs):
return self.request(RouteList.get_room_details(room_code=room_code, options={"loadWriteUps": loadWriteUps, "loadCreators": loadCreators, "loadUser": loadUser}), **attrs).get(room_code, {})
def get_room_tasks(self, room_code, **attrs):
return self.request(RouteList.get_room_tasks(room_code=room_code), **attrs)
@checks.set_header_CSRF()
@checks.is_authenticated()
def post_room_answer(self, room_code, taskNo: int, questionNo: int, answer: str, **attrs):
return self.request(RouteList.post_room_answer(room_code=room_code), json={"taskNo": taskNo, "questionNo": questionNo, "answer": answer}, **attrs)
@checks.set_header_CSRF()
@checks.is_authenticated()
def post_deploy_machine(self, room_code, uploadId, **attrs):
return self.request(RouteList.post_deploy_machine(), json={"roomCode": room_code, "id": uploadId}, **attrs)
@checks.set_header_CSRF()
@checks.is_authenticated()
def post_reset_room_progress(self, room_code, **attrs):
return self.request(RouteList.post_reset_room_progress(), json={"code": room_code}, **attrs)
@checks.set_header_CSRF()
@checks.is_authenticated()
def post_leave_room(self, room_code, **attrs):
return self.request(RouteList.post_leave_room(), json={"code": room_code}, **attrs)
| 48.879733
| 196
| 0.645008
| 2,587
| 21,947
| 5.24971
| 0.103981
| 0.045063
| 0.092777
| 0.097195
| 0.581769
| 0.477947
| 0.388116
| 0.290185
| 0.191076
| 0.113688
| 0
| 0.002631
| 0.220668
| 21,947
| 448
| 197
| 48.988839
| 0.791394
| 0.029617
| 0
| 0.163522
| 0
| 0
| 0.091611
| 0.049546
| 0
| 0
| 0
| 0.002232
| 0
| 1
| 0.399371
| false
| 0
| 0.025157
| 0.377358
| 0.657233
| 0.003145
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
e6b9337bdda045910328d8c9cc38338bc1311810
| 406
|
py
|
Python
|
kairon/shared/sso/base.py
|
rit1200/kairon
|
674a491f6deeae4800825ca93e0726e4fb6e0866
|
[
"Apache-2.0"
] | 9
|
2020-04-22T12:49:29.000Z
|
2020-06-13T22:23:20.000Z
|
kairon/shared/sso/base.py
|
rit1200/kairon
|
674a491f6deeae4800825ca93e0726e4fb6e0866
|
[
"Apache-2.0"
] | 18
|
2020-04-20T12:39:20.000Z
|
2020-05-21T05:10:51.000Z
|
kairon/shared/sso/base.py
|
rit1200/kairon
|
674a491f6deeae4800825ca93e0726e4fb6e0866
|
[
"Apache-2.0"
] | 13
|
2020-04-21T12:12:40.000Z
|
2020-05-13T07:27:44.000Z
|
class BaseSSO:
async def get_redirect_url(self):
"""Returns redirect url for facebook."""
raise NotImplementedError("Provider not implemented")
async def verify(self, request):
"""
Fetches user details using code received in the request.
:param request: starlette request object
"""
raise NotImplementedError("Provider not implemented")
| 25.375
| 64
| 0.657635
| 42
| 406
| 6.309524
| 0.690476
| 0.060377
| 0.241509
| 0.264151
| 0.34717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.263547
| 406
| 15
| 65
| 27.066667
| 0.886288
| 0
| 0
| 0.4
| 0
| 0
| 0.20339
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e6c60d4fe212527f51e4cf099e6d8185c934aa4e
| 164
|
py
|
Python
|
tests/testsoma.py
|
gtmadureira/Python
|
38de6c56fec1d22662f30c1ff4d4f4f411678484
|
[
"MIT"
] | 4
|
2020-04-10T05:48:46.000Z
|
2021-07-14T10:56:19.000Z
|
tests/testsoma.py
|
gtmadureira/Python
|
38de6c56fec1d22662f30c1ff4d4f4f411678484
|
[
"MIT"
] | 1
|
2020-05-09T21:00:52.000Z
|
2020-05-09T21:00:52.000Z
|
tests/testsoma.py
|
gtmadureira/Python
|
38de6c56fec1d22662f30c1ff4d4f4f411678484
|
[
"MIT"
] | null | null | null |
import unittest
from hf_src.main import soma
class TestSoma(unittest.TestCase):
def test_retorno_soma_15_30(self):
self.assertEqual(soma(15, 30), 45)
| 20.5
| 42
| 0.743902
| 25
| 164
| 4.68
| 0.72
| 0.102564
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072993
| 0.164634
| 164
| 7
| 43
| 23.428571
| 0.781022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
e6dfe1b17aaabced195ba909adb862f6d72a3bd2
| 214
|
py
|
Python
|
src/main.py
|
mtnmunuklu/SigmaToExcel
|
7d11fda19c0075122928ff5f1dbaab7775d30fe9
|
[
"MIT"
] | 10
|
2021-05-26T11:24:27.000Z
|
2022-01-14T16:42:25.000Z
|
src/main.py
|
mtnmunuklu/SigmaToExcel
|
7d11fda19c0075122928ff5f1dbaab7775d30fe9
|
[
"MIT"
] | null | null | null |
src/main.py
|
mtnmunuklu/SigmaToExcel
|
7d11fda19c0075122928ff5f1dbaab7775d30fe9
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append("../")
from src.app.sigma import SigmaConverter
if __name__ == "__main__":
sigmaconverter = SigmaConverter()
sigmaconverter.read_from_file()
sigmaconverter.write_to_excel()
| 21.4
| 40
| 0.742991
| 24
| 214
| 6.125
| 0.708333
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140187
| 214
| 9
| 41
| 23.777778
| 0.798913
| 0
| 0
| 0
| 0
| 0
| 0.051402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e6f5ecde56dec14d70d3fec0b36dc822d497cba7
| 2,230
|
py
|
Python
|
nervous/utility/config.py
|
csxeba/nervous
|
f7aeb9b2ff875835c346c607722fab517ef6df61
|
[
"MIT"
] | 1
|
2018-09-24T11:29:19.000Z
|
2018-09-24T11:29:19.000Z
|
nervous/utility/config.py
|
csxeba/nervous
|
f7aeb9b2ff875835c346c607722fab517ef6df61
|
[
"MIT"
] | null | null | null |
nervous/utility/config.py
|
csxeba/nervous
|
f7aeb9b2ff875835c346c607722fab517ef6df61
|
[
"MIT"
] | null | null | null |
import os
class StressedNetConfig:
def __init__(self,
synaptic_environmental_constraint=0.8,
group_environmental_constraint=0.6,
stress_factor=0.8,
save_folder=os.path.expanduser("~/.nervous/models/")):
self._synaptic_environmental_constraint = synaptic_environmental_constraint
self._group_environmental_constraint = group_environmental_constraint
self._stress_factor = stress_factor
self._save_folder = save_folder
self._sanitize()
def _sanitize(self):
if 1. < self._group_environmental_constraint <= 0.:
raise ValueError("Group environmental constraint has to be in the range [0. - 1.)")
if 1. < self._synaptic_environmental_constraint <= 0.:
raise ValueError("Synaptic environmental constraint has to be in the range [0. - 1.)")
if 1. < self._stress_factor <= 0.:
raise ValueError("Stress factor has to be in the range [0. - 1.)")
if not os.path.exists(self._save_folder):
os.makedirs(self._save_folder)
@property
def synaptic_environmental_constraint(self):
return self._synaptic_environmental_constraint
@synaptic_environmental_constraint.setter
def synaptic_environmental_constraint(self, value):
self._synaptic_environmental_constraint = value
self._sanitize()
@property
def group_environmental_constraint(self):
return self._group_environmental_constraint
@group_environmental_constraint.setter
def group_environmental_constraint(self, value):
self._group_environmental_constraint = value
self._sanitize()
@property
def stress_factor(self):
return self._stress_factor
@stress_factor.setter
def stress_factor(self, value):
self._stress_factor = value
self._sanitize()
@property
def save_folder(self):
return self._save_folder
@save_folder.setter
def save_folder(self, value):
self._save_folder = value
self._sanitize()
def __getitem__(self, item):
if item == "self":
raise ValueError("Hahaha")
return self.__dict__[item]
| 33.283582
| 98
| 0.673991
| 244
| 2,230
| 5.790984
| 0.180328
| 0.325548
| 0.219391
| 0.12385
| 0.636943
| 0.334749
| 0.334749
| 0.084218
| 0.084218
| 0.069356
| 0
| 0.010708
| 0.246188
| 2,230
| 66
| 99
| 33.787879
| 0.829863
| 0
| 0
| 0.169811
| 0
| 0
| 0.091031
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.207547
| false
| 0
| 0.018868
| 0.075472
| 0.339623
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
fc084ddbb4a5b92a2c3c4c62cd1d09d582bd5892
| 689
|
py
|
Python
|
skynet-agent/plugins/plugin_api.py
|
skynetera/skynet
|
24a50f2a2eb95b777802934a2b66f162bf4b2d53
|
[
"Apache-2.0"
] | 3
|
2016-09-12T08:54:46.000Z
|
2016-09-18T07:54:10.000Z
|
skynet-agent/plugins/plugin_api.py
|
skynetera/skynet
|
24a50f2a2eb95b777802934a2b66f162bf4b2d53
|
[
"Apache-2.0"
] | null | null | null |
skynet-agent/plugins/plugin_api.py
|
skynetera/skynet
|
24a50f2a2eb95b777802934a2b66f162bf4b2d53
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
__author__ = 'whoami'
"""
@version: 1.0
@author: whoami
@license: Apache Licence 2.0
@contact: [email protected]
@site: http://www.itweet.cn
@software: PyCharm Community Edition
@file: plugin_api.py
@time: 2015-11-28 下午1:52
"""
from linux import cpu,disk,iostats,loadavg,memory,netstats,swap
def get_load_info():
return loadavg.monitor()
def get_cpu_status():
return cpu.monitor()
def get_memory_info():
return memory.monitor()
def get_swap_info():
return swap.monitor()
def get_disk_info():
return disk.monitor()
def get_network_info():
return netstats.monitor()
def get_iostats_info():
return iostats.monitor()
| 17.225
| 63
| 0.71553
| 101
| 689
| 4.693069
| 0.564356
| 0.088608
| 0.164557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027257
| 0.148041
| 689
| 39
| 64
| 17.666667
| 0.780239
| 0.049347
| 0
| 0
| 0
| 0
| 0.013423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4375
| false
| 0
| 0.0625
| 0.4375
| 0.9375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
fc26055543d8ffb1b618b1328cc4ad7000d27faf
| 25,605
|
py
|
Python
|
S4/S4 Library/generated/protocolbuffers/Localization_pb2.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/generated/protocolbuffers/Localization_pb2.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/generated/protocolbuffers/Localization_pb2.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
import protocolbuffers.Consts_pb2 as Consts_pb2
from google.protobuf import descriptor, message, reflection
DESCRIPTOR = descriptor.FileDescriptor(name = 'Localization.proto', package = 'EA.Sims4.Network',
serialized_pb = '\n\x12Localization.proto\x12\x10EA.Sims4.Network\x1a\x0cConsts.proto"\x85\n\n\x14LocalizedStringToken\x12G\n\x04type\x18\x01 \x02(\x0e20.EA.Sims4.Network.LocalizedStringToken.TokenType:\x07INVALID\x126\n\x08rdl_type\x18\x02 \x01(\x0e2$.EA.Sims4.Network.SocialRichDataType\x12\x12\n\nfirst_name\x18\x03 \x01(\t\x12\x11\n\tlast_name\x18\x04 \x01(\t\x12\x15\n\rfull_name_key\x18\x05 \x01(\r\x12\x11\n\tis_female\x18\x06 \x01(\x08\x12\x0e\n\x06sim_id\x18\x07 \x01(\x04\x126\n\x0btext_string\x18\x08 \x01(\x0b2!.EA.Sims4.Network.LocalizedString\x12\x0e\n\x06number\x18\t \x01(\x02\x12\x12\n\npersona_id\x18\n \x01(\x04\x12\x12\n\naccount_id\x18\x0b \x01(\x04\x12\x16\n\x0epersona_string\x18\x0c \x01(\t\x12\x0f\n\x07zone_id\x18\r \x01(\x04\x12\x10\n\x08world_id\x18\x0e \x01(\r\x12\x11\n\tzone_name\x18\x0f \x01(\t\x12\x10\n\x08event_id\x18\x10 \x01(\x04\x12\x17\n\x0fevent_type_hash\x18\x11 \x01(\r\x12\x17\n\x0fskill_name_hash\x18\x12 \x01(\r\x12\x13\n\x0bskill_level\x18\x13 \x01(\r\x12\x12\n\nskill_guid\x18\x14 \x01(\x04\x12\x17\n\x0ftrait_name_hash\x18\x15 \x01(\r\x12\x12\n\ntrait_guid\x18\x16 \x01(\x04\x12\x15\n\rbit_name_hash\x18\x17 \x01(\r\x12\x10\n\x08bit_guid\x18\x18 \x01(\x04\x12\x18\n\x10catalog_name_key\x18\x19 \x01(\r\x12\x1f\n\x17catalog_description_key\x18\x1a \x01(\r\x12\x13\n\x0bcustom_name\x18\x1b \x01(\t\x12\x1a\n\x12custom_description\x18\x1c \x01(\t\x12\x12\n\ncareer_uid\x18\x1d \x01(\x04\x12\x11\n\tmemory_id\x18\x1e \x01(\x04\x12\x1a\n\x12memory_string_hash\x18\x1f \x01(\r\x12\x10\n\x08raw_text\x18 \x01(\t\x12A\n\rdate_and_time\x18! \x01(\x0b2*.EA.Sims4.Network.LocalizedDateAndTimeData\x12E\n\x08sim_list\x18" \x03(\x0b23.EA.Sims4.Network.LocalizedStringToken.SubTokenData\x1a¨\x01\n\x0cSubTokenData\x12G\n\x04type\x18\x01 \x02(\x0e20.EA.Sims4.Network.LocalizedStringToken.TokenType:\x07INVALID\x12\x12\n\nfirst_name\x18\x02 \x01(\t\x12\x11\n\tlast_name\x18\x03 \x01(\t\x12\x15\n\rfull_name_key\x18\x04 \x01(\r\x12\x11\n\tis_female\x18\x05 \x01(\x08"\x93\x01\n\tTokenType\x12\x0b\n\x07INVALID\x10\x00\x12\x07\n\x03SIM\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x0c\n\x08RAW_TEXT\x10\x03\x12\n\n\x06NUMBER\x10\x04\x12\n\n\x06OBJECT\x10\x05\x12\x11\n\rDATE_AND_TIME\x10\x06\x12\x0c\n\x08RICHDATA\x10\x07\x12\x0f\n\x0bSTRING_LIST\x10\x08\x12\x0c\n\x08SIM_LIST\x10\t"\x9e\x01\n\x18LocalizedDateAndTimeData\x12\x0f\n\x07seconds\x18\x01 \x01(\r\x12\x0f\n\x07minutes\x18\x02 \x01(\r\x12\r\n\x05hours\x18\x03 \x01(\r\x12\x0c\n\x04date\x18\x04 \x01(\r\x12\r\n\x05month\x18\x05 \x01(\r\x12\x11\n\tfull_year\x18\x06 \x01(\r\x12!\n\x19date_and_time_format_hash\x18\x07 \x01(\r"W\n\x0fLocalizedString\x12\x0c\n\x04hash\x18\x01 \x02(\r\x126\n\x06tokens\x18\x02 \x03(\x0b2&.EA.Sims4.Network.LocalizedStringToken"W\n\x17LocalizedStringValidate\x12<\n\x11localized_strings\x18\x01 \x03(\x0b2!.EA.Sims4.Network.LocalizedString')
_LOCALIZEDSTRINGTOKEN_TOKENTYPE = descriptor.EnumDescriptor(name = 'TokenType', full_name = 'EA.Sims4.Network.LocalizedStringToken.TokenType', filename = None, file = DESCRIPTOR,
values = [
descriptor.EnumValueDescriptor(name = 'INVALID', index = 0, number = 0, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'SIM', index = 1, number = 1, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'STRING', index = 2, number = 2, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'RAW_TEXT', index = 3, number = 3, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'NUMBER', index = 4, number = 4, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'OBJECT', index = 5, number = 5, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'DATE_AND_TIME', index = 6, number = 6, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'RICHDATA', index = 7, number = 7, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'STRING_LIST', index = 8, number = 8, options = None, type = None),
descriptor.EnumValueDescriptor(name = 'SIM_LIST', index = 9, number = 9, options = None, type = None)], containing_type = None, options = None, serialized_start = 1193, serialized_end = 1340)
_LOCALIZEDSTRINGTOKEN_SUBTOKENDATA = descriptor.Descriptor(name = 'SubTokenData', full_name = 'EA.Sims4.Network.LocalizedStringToken.SubTokenData', filename = None, file = DESCRIPTOR, containing_type = None, fields = [
descriptor.FieldDescriptor(name = 'type', full_name = 'EA.Sims4.Network.LocalizedStringToken.SubTokenData.type', index = 0, number = 1, type = 14, cpp_type = 8, label = 2, has_default_value = True, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'first_name', full_name = 'EA.Sims4.Network.LocalizedStringToken.SubTokenData.first_name', index = 1, number = 2, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'last_name', full_name = 'EA.Sims4.Network.LocalizedStringToken.SubTokenData.last_name', index = 2, number = 3, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'full_name_key', full_name = 'EA.Sims4.Network.LocalizedStringToken.SubTokenData.full_name_key', index = 3, number = 4, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'is_female', full_name = 'EA.Sims4.Network.LocalizedStringToken.SubTokenData.is_female', index = 4, number = 5, type = 8, cpp_type = 7, label = 1, has_default_value = False, default_value = False, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None)], extensions = [], nested_types = [], enum_types = [], options = None, is_extendable = False, extension_ranges = [], serialized_start = 1022, serialized_end = 1190)
_LOCALIZEDSTRINGTOKEN = descriptor.Descriptor(
name = 'LocalizedStringToken',
full_name = 'EA.Sims4.Network.LocalizedStringToken',
filename = None,
file = DESCRIPTOR,
containing_type = None,
fields = [
descriptor.FieldDescriptor(name = 'type', full_name = 'EA.Sims4.Network.LocalizedStringToken.type', index = 0, number = 1, type = 14, cpp_type = 8, label = 2, has_default_value = True, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'rdl_type', full_name = 'EA.Sims4.Network.LocalizedStringToken.rdl_type', index = 1, number = 2, type = 14, cpp_type = 8, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'first_name', full_name = 'EA.Sims4.Network.LocalizedStringToken.first_name', index = 2, number = 3, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'last_name', full_name = 'EA.Sims4.Network.LocalizedStringToken.last_name', index = 3, number = 4, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'full_name_key', full_name = 'EA.Sims4.Network.LocalizedStringToken.full_name_key', index = 4, number = 5, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'is_female', full_name = 'EA.Sims4.Network.LocalizedStringToken.is_female', index = 5, number = 6, type = 8, cpp_type = 7, label = 1, has_default_value = False, default_value = False, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'sim_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.sim_id', index = 6, number = 7, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'text_string', full_name = 'EA.Sims4.Network.LocalizedStringToken.text_string', index = 7, number = 8, type = 11, cpp_type = 10, label = 1, has_default_value = False, default_value = None, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'number', full_name = 'EA.Sims4.Network.LocalizedStringToken.number', index = 8, number = 9, type = 2, cpp_type = 6, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'persona_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.persona_id', index = 9, number = 10, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'account_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.account_id', index = 10, number = 11, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'persona_string', full_name = 'EA.Sims4.Network.LocalizedStringToken.persona_string', index = 11, number = 12, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'zone_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.zone_id', index = 12, number = 13, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'world_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.world_id', index = 13, number = 14, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'zone_name', full_name = 'EA.Sims4.Network.LocalizedStringToken.zone_name', index = 14, number = 15, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'event_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.event_id', index = 15, number = 16, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'event_type_hash', full_name = 'EA.Sims4.Network.LocalizedStringToken.event_type_hash', index = 16, number = 17, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'skill_name_hash', full_name = 'EA.Sims4.Network.LocalizedStringToken.skill_name_hash', index = 17, number = 18, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'skill_level', full_name = 'EA.Sims4.Network.LocalizedStringToken.skill_level', index = 18, number = 19, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'skill_guid', full_name = 'EA.Sims4.Network.LocalizedStringToken.skill_guid', index = 19, number = 20, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'trait_name_hash', full_name = 'EA.Sims4.Network.LocalizedStringToken.trait_name_hash', index = 20, number = 21, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'trait_guid', full_name = 'EA.Sims4.Network.LocalizedStringToken.trait_guid', index = 21, number = 22, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'bit_name_hash', full_name = 'EA.Sims4.Network.LocalizedStringToken.bit_name_hash', index = 22, number = 23, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'bit_guid', full_name = 'EA.Sims4.Network.LocalizedStringToken.bit_guid', index = 23, number = 24, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'catalog_name_key', full_name = 'EA.Sims4.Network.LocalizedStringToken.catalog_name_key', index = 24, number = 25, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'catalog_description_key', full_name = 'EA.Sims4.Network.LocalizedStringToken.catalog_description_key', index = 25, number = 26, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'custom_name', full_name = 'EA.Sims4.Network.LocalizedStringToken.custom_name', index = 26, number = 27, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'custom_description', full_name = 'EA.Sims4.Network.LocalizedStringToken.custom_description', index = 27, number = 28, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'career_uid', full_name = 'EA.Sims4.Network.LocalizedStringToken.career_uid', index = 28, number = 29, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'memory_id', full_name = 'EA.Sims4.Network.LocalizedStringToken.memory_id', index = 29, number = 30, type = 4, cpp_type = 4, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'memory_string_hash', full_name = 'EA.Sims4.Network.LocalizedStringToken.memory_string_hash', index = 30, number = 31, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'raw_text', full_name = 'EA.Sims4.Network.LocalizedStringToken.raw_text', index = 31, number = 32, type = 9, cpp_type = 9, label = 1, has_default_value = False, default_value = b''.decode('utf-8'), message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'date_and_time', full_name = 'EA.Sims4.Network.LocalizedStringToken.date_and_time', index = 32, number = 33, type = 11, cpp_type = 10, label = 1, has_default_value = False, default_value = None, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'sim_list', full_name = 'EA.Sims4.Network.LocalizedStringToken.sim_list', index = 33, number = 34, type = 11, cpp_type = 10, label = 3, has_default_value = False, default_value = [], message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None)],
extensions = [],
nested_types = [_LOCALIZEDSTRINGTOKEN_SUBTOKENDATA],
enum_types = [_LOCALIZEDSTRINGTOKEN_TOKENTYPE],
options = None,
is_extendable = False,
extension_ranges = [],
serialized_start = 55,
serialized_end = 1340
)
_LOCALIZEDDATEANDTIMEDATA = descriptor.Descriptor(name = 'LocalizedDateAndTimeData', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData', filename = None, file = DESCRIPTOR, containing_type = None, fields = [
descriptor.FieldDescriptor(name = 'seconds', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.seconds', index = 0, number = 1, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'minutes', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.minutes', index = 1, number = 2, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'hours', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.hours', index = 2, number = 3, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'date', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.date', index = 3, number = 4, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'month', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.month', index = 4, number = 5, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'full_year', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.full_year', index = 5, number = 6, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'date_and_time_format_hash', full_name = 'EA.Sims4.Network.LocalizedDateAndTimeData.date_and_time_format_hash', index = 6, number = 7, type = 13, cpp_type = 3, label = 1, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None)], extensions = [], nested_types = [], enum_types = [], options = None, is_extendable = False, extension_ranges = [], serialized_start = 1343, serialized_end = 1501)
_LOCALIZEDSTRING = descriptor.Descriptor(name = 'LocalizedString', full_name = 'EA.Sims4.Network.LocalizedString', filename = None, file = DESCRIPTOR, containing_type = None, fields = [
descriptor.FieldDescriptor(name = 'hash', full_name = 'EA.Sims4.Network.LocalizedString.hash', index = 0, number = 1, type = 13, cpp_type = 3, label = 2, has_default_value = False, default_value = 0, message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None),
descriptor.FieldDescriptor(name = 'tokens', full_name = 'EA.Sims4.Network.LocalizedString.tokens', index = 1, number = 2, type = 11, cpp_type = 10, label = 3, has_default_value = False, default_value = [], message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None)], extensions = [], nested_types = [], enum_types = [], options = None, is_extendable = False, extension_ranges = [], serialized_start = 1503,
serialized_end = 1590)
_LOCALIZEDSTRINGVALIDATE = descriptor.Descriptor(name = 'LocalizedStringValidate', full_name = 'EA.Sims4.Network.LocalizedStringValidate', filename = None, file = DESCRIPTOR, containing_type = None, fields = [descriptor.FieldDescriptor(name = 'localized_strings', full_name = 'EA.Sims4.Network.LocalizedStringValidate.localized_strings', index = 0, number = 1, type = 11, cpp_type = 10, label = 3, has_default_value = False, default_value = [], message_type = None, enum_type = None, containing_type = None, is_extension = False, extension_scope = None, options = None)], extensions = [], nested_types = [], enum_types = [], options = None, is_extendable = False, extension_ranges = [], serialized_start = 1592, serialized_end = 1679)
_LOCALIZEDSTRINGTOKEN_SUBTOKENDATA.fields_by_name['type'].enum_type = _LOCALIZEDSTRINGTOKEN_TOKENTYPE
_LOCALIZEDSTRINGTOKEN_SUBTOKENDATA.containing_type = _LOCALIZEDSTRINGTOKEN
_LOCALIZEDSTRINGTOKEN.fields_by_name['type'].enum_type = _LOCALIZEDSTRINGTOKEN_TOKENTYPE
_LOCALIZEDSTRINGTOKEN.fields_by_name['rdl_type'].enum_type = Consts_pb2._SOCIALRICHDATATYPE
_LOCALIZEDSTRINGTOKEN.fields_by_name['text_string'].message_type = _LOCALIZEDSTRING
_LOCALIZEDSTRINGTOKEN.fields_by_name['date_and_time'].message_type = _LOCALIZEDDATEANDTIMEDATA
_LOCALIZEDSTRINGTOKEN.fields_by_name['sim_list'].message_type = _LOCALIZEDSTRINGTOKEN_SUBTOKENDATA
_LOCALIZEDSTRINGTOKEN_TOKENTYPE.containing_type = _LOCALIZEDSTRINGTOKEN
_LOCALIZEDSTRING.fields_by_name['tokens'].message_type = _LOCALIZEDSTRINGTOKEN
_LOCALIZEDSTRINGVALIDATE.fields_by_name['localized_strings'].message_type = _LOCALIZEDSTRING
DESCRIPTOR.message_types_by_name['LocalizedStringToken'] = _LOCALIZEDSTRINGTOKEN
DESCRIPTOR.message_types_by_name['LocalizedDateAndTimeData'] = _LOCALIZEDDATEANDTIMEDATA
DESCRIPTOR.message_types_by_name['LocalizedString'] = _LOCALIZEDSTRING
DESCRIPTOR.message_types_by_name['LocalizedStringValidate'] = _LOCALIZEDSTRINGVALIDATE
class LocalizedStringToken(message.Message, metaclass = reflection.GeneratedProtocolMessageType):
class SubTokenData(message.Message, metaclass = reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _LOCALIZEDSTRINGTOKEN_SUBTOKENDATA
DESCRIPTOR = _LOCALIZEDSTRINGTOKEN
class LocalizedDateAndTimeData(message.Message, metaclass = reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _LOCALIZEDDATEANDTIMEDATA
class LocalizedString(message.Message, metaclass = reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _LOCALIZEDSTRING
class LocalizedStringValidate(message.Message, metaclass = reflection.GeneratedProtocolMessageType):
DESCRIPTOR = _LOCALIZEDSTRINGVALIDATE
| 218.846154
| 2,866
| 0.754189
| 3,485
| 25,605
| 5.313056
| 0.074605
| 0.070426
| 0.048391
| 0.044556
| 0.764312
| 0.730719
| 0.669097
| 0.588788
| 0.556384
| 0.542558
| 0
| 0.051333
| 0.119742
| 25,605
| 116
| 2,867
| 220.732759
| 0.770132
| 0
| 0
| 0
| 0
| 0.009174
| 0.252099
| 0.21984
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018349
| 0
| 0.100917
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
fc2e16e55954bef33913f1502bf7278f77489ed4
| 3,504
|
py
|
Python
|
release/stubs.min/Autodesk/Revit/DB/__init___parts/GeomCombinationSet.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
release/stubs.min/Autodesk/Revit/DB/__init___parts/GeomCombinationSet.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
release/stubs.min/Autodesk/Revit/DB/__init___parts/GeomCombinationSet.py
|
YKato521/ironpython-stubs
|
b1f7c580de48528490b3ee5791b04898be95a9ae
|
[
"MIT"
] | null | null | null |
class GeomCombinationSet(APIObject, IDisposable, IEnumerable):
"""
A set that contains GeomCombination objects.
GeomCombinationSet()
"""
def Clear(self):
"""
Clear(self: GeomCombinationSet)
Removes every item GeomCombination the set,rendering it empty.
"""
pass
def Contains(self, item):
"""
Contains(self: GeomCombinationSet,item: GeomCombination) -> bool
Tests for the existence of an GeomCombination within the set.
item: The element to be searched for.
Returns: The Contains method returns True if the GeomCombination is within the set,
otherwise False.
"""
pass
def Dispose(self):
""" Dispose(self: GeomCombinationSet,A_0: bool) """
pass
def Erase(self, item):
"""
Erase(self: GeomCombinationSet,item: GeomCombination) -> int
Removes a specified GeomCombination from the set.
item: The GeomCombination to be erased.
Returns: The number of GeomCombinations that were erased from the set.
"""
pass
def ForwardIterator(self):
"""
ForwardIterator(self: GeomCombinationSet) -> GeomCombinationSetIterator
Retrieve a forward moving iterator to the set.
Returns: Returns a forward moving iterator to the set.
"""
pass
def GetEnumerator(self):
"""
GetEnumerator(self: GeomCombinationSet) -> IEnumerator
Retrieve a forward moving iterator to the set.
Returns: Returns a forward moving iterator to the set.
"""
pass
def Insert(self, item):
"""
Insert(self: GeomCombinationSet,item: GeomCombination) -> bool
Insert the specified element into the set.
item: The GeomCombination to be inserted into the set.
Returns: Returns whether the GeomCombination was inserted into the set.
"""
pass
def ReleaseManagedResources(self, *args):
""" ReleaseManagedResources(self: APIObject) """
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: GeomCombinationSet) """
pass
def ReverseIterator(self):
"""
ReverseIterator(self: GeomCombinationSet) -> GeomCombinationSetIterator
Retrieve a backward moving iterator to the set.
Returns: Returns a backward moving iterator to the set.
"""
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args):
""" __iter__(self: IEnumerable) -> object """
pass
IsEmpty = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Test to see if the set is empty.
Get: IsEmpty(self: GeomCombinationSet) -> bool
"""
Size = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Returns the number of GeomCombinations that are in the set.
Get: Size(self: GeomCombinationSet) -> int
"""
| 22.901961
| 221
| 0.62871
| 364
| 3,504
| 5.865385
| 0.255495
| 0.044965
| 0.044965
| 0.053396
| 0.381265
| 0.290398
| 0.254801
| 0.203279
| 0.185948
| 0.185948
| 0
| 0.000395
| 0.278253
| 3,504
| 152
| 222
| 23.052632
| 0.843812
| 0.556221
| 0
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.451613
| false
| 0.451613
| 0
| 0
| 0.548387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
fc2e789c677ad0b86e4fb3d988a64d970401e0fa
| 401
|
py
|
Python
|
epio_commands/management/commands/epio_flush_redis.py
|
idan/pypostbin
|
61dd1c0960e8fb6e4460a5623971cbbc78a55ee7
|
[
"BSD-3-Clause"
] | 2
|
2015-11-05T08:51:42.000Z
|
2016-03-01T22:13:25.000Z
|
epio_commands/management/commands/epio_flush_redis.py
|
idan/pypostbin
|
61dd1c0960e8fb6e4460a5623971cbbc78a55ee7
|
[
"BSD-3-Clause"
] | null | null | null |
epio_commands/management/commands/epio_flush_redis.py
|
idan/pypostbin
|
61dd1c0960e8fb6e4460a5623971cbbc78a55ee7
|
[
"BSD-3-Clause"
] | null | null | null |
import redis
from bundle_config import config
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
help = 'Flushes all keys in redis.'
def handle_noargs(self, **options):
r = redis.Redis(host=config['redis']['host'], port=int(config['redis']['port']), password=config['redis']['password'])
r.flushall()
print "All redis keys flushed."
| 30.846154
| 126
| 0.688279
| 51
| 401
| 5.372549
| 0.588235
| 0.120438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169576
| 401
| 12
| 127
| 33.416667
| 0.822823
| 0
| 0
| 0
| 0
| 0
| 0.199501
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.111111
| 0.333333
| null | null | 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 3
|
fc3c853df2b1d6ee609b09518a9278f9e15018c1
| 114
|
py
|
Python
|
mlbase/lazy.py
|
n-kats/mlbase
|
7d69f259dcaf9608a921523083458fa6d0d6914b
|
[
"MIT"
] | null | null | null |
mlbase/lazy.py
|
n-kats/mlbase
|
7d69f259dcaf9608a921523083458fa6d0d6914b
|
[
"MIT"
] | 2
|
2018-09-23T18:39:01.000Z
|
2018-09-24T18:02:21.000Z
|
mlbase/lazy.py
|
n-kats/mlbase
|
7d69f259dcaf9608a921523083458fa6d0d6914b
|
[
"MIT"
] | null | null | null |
from mlbase.utils.misc import lazy
tensorflow = lazy("tensorflow")
numpy = lazy("numpy")
gensim = lazy("gensim")
| 19
| 34
| 0.72807
| 15
| 114
| 5.533333
| 0.6
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 114
| 5
| 35
| 22.8
| 0.83
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
fc4af8f087c68aec19d9b595aee4bd3178dfeac2
| 9,119
|
py
|
Python
|
tutorials/create_table/tests.py
|
MeGustas-5427/SQL_Tutorials
|
627372c2d5d8656d72645830c9a1fae1df278fc7
|
[
"Apache-2.0"
] | 13
|
2020-11-05T04:22:51.000Z
|
2022-02-27T08:44:50.000Z
|
tutorials/create_table/tests.py
|
MeGustas-5427/SQL_Tutorials
|
627372c2d5d8656d72645830c9a1fae1df278fc7
|
[
"Apache-2.0"
] | null | null | null |
tutorials/create_table/tests.py
|
MeGustas-5427/SQL_Tutorials
|
627372c2d5d8656d72645830c9a1fae1df278fc7
|
[
"Apache-2.0"
] | 2
|
2020-11-10T10:01:20.000Z
|
2021-04-07T02:33:29.000Z
|
#!/usr/bin/python3
# -*- coding:utf-8 -*-
# __author__ = '__MeGustas__'
from django.test import TestCase
from django.db import connection
from tutorials.create_table.models import *
# Create your tests here.
class TestHealthFile(TestCase):
def setUp(self):
cursor = connection.cursor()
# Populate Customers table
cursor.execute("INSERT INTO Customers(cust_id, cust_name, cust_address, cust_city, cust_state, cust_zip, cust_country, cust_contact, cust_email) \
VALUES('1000000001', 'Village Toys', '200 Maple Lane', 'Detroit', 'MI', '44444', 'USA', 'John Smith', '[email protected]');")
cursor.execute("INSERT INTO Customers(cust_id, cust_name, cust_address, cust_city, cust_state, cust_zip, cust_country, cust_contact) \
VALUES('1000000002', 'Kids Place', '333 South Lake Drive', 'Columbus', 'OH', '43333', 'USA', 'Michelle Green');")
cursor.execute("INSERT INTO Customers(cust_id, cust_name, cust_address, cust_city, cust_state, cust_zip, cust_country, cust_contact, cust_email) \
VALUES('1000000003', 'Fun4All', '1 Sunny Place', 'Muncie', 'IN', '42222', 'USA', 'Jim Jones', '[email protected]');")
cursor.execute("INSERT INTO Customers(cust_id, cust_name, cust_address, cust_city, cust_state, cust_zip, cust_country, cust_contact, cust_email) \
VALUES('1000000004', 'Fun4All', '829 Riverside Drive', 'Phoenix', 'AZ', '88888', 'USA', 'Denise L. Stephens', '[email protected]');")
cursor.execute("INSERT INTO Customers(cust_id, cust_name, cust_address, cust_city, cust_state, cust_zip, cust_country, cust_contact) \
VALUES('1000000005', 'The Toy Store', '4545 53rd Street', 'Chicago', 'IL', '54545', 'USA', 'Kim Howard');")
# Populate Vendors table
cursor.execute("INSERT INTO Vendors(vend_id, vend_name, vend_address, vend_city, vend_state, vend_zip, vend_country) \
VALUES('BRS01','Bears R Us','123 Main Street','Bear Town','MI','44444', 'USA');")
cursor.execute("INSERT INTO Vendors(vend_id, vend_name, vend_address, vend_city, vend_state, vend_zip, vend_country) \
VALUES('BRE02','Bear Emporium','500 Park Street','Anytown','OH','44333', 'USA');")
cursor.execute("INSERT INTO Vendors(vend_id, vend_name, vend_address, vend_city, vend_state, vend_zip, vend_country) \
VALUES('DLL01','Doll House Inc.','555 High Street','Dollsville','CA','99999', 'USA');")
cursor.execute("INSERT INTO Vendors(vend_id, vend_name, vend_address, vend_city, vend_state, vend_zip, vend_country) \
VALUES('FRB01','Furball Inc.','1000 5th Avenue','New York','NY','11111', 'USA');")
cursor.execute("INSERT INTO Vendors(vend_id, vend_name, vend_address, vend_city, vend_state, vend_zip, vend_country) \
VALUES('FNG01','Fun and Games','42 Galaxy Road','London', NULL,'N16 6PS', 'England');")
cursor.execute("INSERT INTO Vendors(vend_id, vend_name, vend_address, vend_city, vend_state, vend_zip, vend_country) \
VALUES('JTS01','Jouets et ours','1 Rue Amusement','Paris', NULL,'45678', 'France');")
# Populate Products table
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('BR01', 'BRS01', '8 inch teddy bear', 5.99, '8 inch teddy bear, comes with cap and jacket');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('BR02', 'BRS01', '12 inch teddy bear', 8.99, '12 inch teddy bear, comes with cap and jacket');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('BR03', 'BRS01', '18 inch teddy bear', 11.99, '18 inch teddy bear, comes with cap and jacket');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('BNBG01', 'DLL01', 'Fish bean bag toy', 3.49, 'Fish bean bag toy, complete with bean bag worms with which to feed it');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('BNBG02', 'DLL01', 'Bird bean bag toy', 3.49, 'Bird bean bag toy, eggs are not included');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('BNBG03', 'DLL01', 'Rabbit bean bag toy', 3.49, 'Rabbit bean bag toy, comes with bean bag carrots');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('RGAN01', 'DLL01', 'Raggedy Ann', 4.99, '18 inch Raggedy Ann doll');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('RYL01', 'FNG01', 'King doll', 9.49, '12 inch king doll with royal garments and crown');")
cursor.execute("INSERT INTO Products(prod_id, vend_id, prod_name, prod_price, prod_desc) \
VALUES('RYL02', 'FNG01', 'Queen doll', 9.49, '12 inch queen doll with royal garments and crown');")
# Populate Orders table
cursor.execute("INSERT INTO Orders(order_num, order_date, cust_id) \
VALUES(20005, '2020-05-01', '1000000001');")
cursor.execute("INSERT INTO Orders(order_num, order_date, cust_id) \
VALUES(20006, '2020-01-12', '1000000003');")
cursor.execute("INSERT INTO Orders(order_num, order_date, cust_id) \
VALUES(20007, '2020-01-30', '1000000004');")
cursor.execute("INSERT INTO Orders(order_num, order_date, cust_id) \
VALUES(20008, '2020-02-03', '1000000005');")
cursor.execute("INSERT INTO Orders(order_num, order_date, cust_id) \
VALUES(20009, '2020-02-08', '1000000001');")
# Populate OrderItems table
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20005, 1, 'BR01', 100, 5.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20005, 2, 'BR03', 100, 10.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20006, 1, 'BR01', 20, 5.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20006, 2, 'BR02', 10, 8.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20006, 3, 'BR03', 10, 11.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20007, 1, 'BR03', 50, 11.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20007, 2, 'BNBG01', 100, 2.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20007, 3, 'BNBG02', 100, 2.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20007, 4, 'BNBG03', 100, 2.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20007, 5, 'RGAN01', 50, 4.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20008, 1, 'RGAN01', 5, 4.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20008, 2, 'BR03', 5, 11.99);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20008, 3, 'BNBG01', 10, 3.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20008, 4, 'BNBG02', 10, 3.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20008, 5, 'BNBG03', 10, 3.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20009, 1, 'BNBG01', 250, 2.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20009, 2, 'BNBG02', 250, 2.49);")
cursor.execute("INSERT INTO OrderItems(order_num, order_item, prod_id, quantity, item_price) \
VALUES(20009, 3, 'BNBG03', 250, 2.49);")
def tearDown(self):
# Clean up run after every test method.
Customers.objects.all().delete()
Vendors.objects.all().delete()
Orders.objects.all().delete()
OrderItems.objects.all().delete()
Products.objects.all().delete()
def test_customers(self):
for i in Customers.objects.all():
print(i.to_dict())
for i in Vendors.objects.all():
print(i.to_dict())
for i in Orders.objects.all():
print(i.to_dict())
for i in OrderItems.objects.all():
print(i.to_dict())
for i in Products.objects.all():
print(i.to_dict())
| 67.051471
| 154
| 0.665205
| 1,263
| 9,119
| 4.619161
| 0.196358
| 0.095818
| 0.140041
| 0.169523
| 0.70312
| 0.687693
| 0.67398
| 0.67398
| 0.67398
| 0.654782
| 0
| 0.080113
| 0.188288
| 9,119
| 136
| 155
| 67.051471
| 0.708052
| 0.027196
| 0
| 0.440367
| 0
| 0.220183
| 0.17197
| 0.004739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027523
| false
| 0
| 0.027523
| 0
| 0.06422
| 0.045872
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
fc515ce56fd34f4315010ae886d6091f5950eab2
| 610
|
py
|
Python
|
two_qubit_simulator/circuits.py
|
L-McCormack/two-qubit-simulator
|
d7115f0630c9931724aa660dba4b89a50db4e2e0
|
[
"MIT"
] | null | null | null |
two_qubit_simulator/circuits.py
|
L-McCormack/two-qubit-simulator
|
d7115f0630c9931724aa660dba4b89a50db4e2e0
|
[
"MIT"
] | null | null | null |
two_qubit_simulator/circuits.py
|
L-McCormack/two-qubit-simulator
|
d7115f0630c9931724aa660dba4b89a50db4e2e0
|
[
"MIT"
] | null | null | null |
"""
Contains the QuantumCircuit class
boom.
"""
class QuantumCircuit(object): # pylint: disable=useless-object-inheritance
""" Implements a quantum circuit.
- - - WRITE DOCUMENTATION HERE - - -
"""
def __init__(self):
""" Initialise a QuantumCircuit object """
pass
def add_gate(self, gate):
""" Add a gate to the circuit """
pass
def run_circuit(self, register):
""" Run the circuit on a given quantum register """
pass
def __call__(self, register):
""" Run the circuit on a given quantum register """
pass
| 23.461538
| 74
| 0.598361
| 67
| 610
| 5.298507
| 0.447761
| 0.059155
| 0.084507
| 0.101408
| 0.292958
| 0.292958
| 0.292958
| 0.292958
| 0.292958
| 0.292958
| 0
| 0
| 0.291803
| 610
| 25
| 75
| 24.4
| 0.821759
| 0.498361
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.444444
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
fc5ae40661fc1b76d02d932d2ea414f59839b072
| 319
|
py
|
Python
|
packages/micropython-official/v1.10/esp32/stubs/ubinascii.py
|
TheVinhLuong102/micropy-stubs
|
55ff1773008f7c4dfc3d70a403986486226eb6b3
|
[
"MIT"
] | 18
|
2019-07-11T13:31:09.000Z
|
2022-01-27T06:38:40.000Z
|
packages/micropython-official/v1.10/esp32/stubs/ubinascii.py
|
TheVinhLuong102/micropy-stubs
|
55ff1773008f7c4dfc3d70a403986486226eb6b3
|
[
"MIT"
] | 9
|
2019-09-01T21:44:49.000Z
|
2022-02-04T20:55:08.000Z
|
packages/micropython-official/v1.10/esp32/stubs/ubinascii.py
|
TheVinhLuong102/micropy-stubs
|
55ff1773008f7c4dfc3d70a403986486226eb6b3
|
[
"MIT"
] | 6
|
2019-10-08T05:31:21.000Z
|
2021-04-22T10:21:01.000Z
|
"""
Module: 'ubinascii' on esp32 1.10.0
"""
# MCU: (sysname='esp32', nodename='esp32', release='1.10.0', version='v1.10 on 2019-01-25', machine='ESP32 module with ESP32')
# Stubber: 1.2.0
def a2b_base64():
pass
def b2a_base64():
pass
def crc32():
pass
def hexlify():
pass
def unhexlify():
pass
| 15.190476
| 126
| 0.623824
| 49
| 319
| 4.020408
| 0.571429
| 0.142132
| 0.040609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155642
| 0.194357
| 319
| 20
| 127
| 15.95
| 0.610895
| 0.551724
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
fc75b91ed00a53eb686038762f01dc7958ac5d5b
| 172
|
py
|
Python
|
molecule_ignite/test/unit/test_driver.py
|
ragingpastry/molecule-ignite
|
aaf005cabba9a8c933191458cf8553da9bac581d
|
[
"MIT"
] | 17
|
2020-02-19T08:16:49.000Z
|
2022-02-05T08:16:42.000Z
|
molecule_ignite/test/unit/test_driver.py
|
ragingpastry/molecule-ignite
|
aaf005cabba9a8c933191458cf8553da9bac581d
|
[
"MIT"
] | 15
|
2020-06-27T10:16:44.000Z
|
2022-01-04T10:37:54.000Z
|
molecule_ignite/test/unit/test_driver.py
|
ragingpastry/molecule-ignite
|
aaf005cabba9a8c933191458cf8553da9bac581d
|
[
"MIT"
] | 11
|
2020-02-18T16:24:29.000Z
|
2022-03-28T11:44:51.000Z
|
from molecule import api
def test_driver_is_detected():
driver_name = __name__.split(".")[0].split("_")[-1]
assert driver_name in [str(d) for d in api.drivers()]
| 24.571429
| 57
| 0.686047
| 27
| 172
| 4
| 0.703704
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013793
| 0.156977
| 172
| 6
| 58
| 28.666667
| 0.731034
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
fc92382e8eb5b5b2b839d82c3970e59959dd78f5
| 5,870
|
py
|
Python
|
tests/share/normalize/test_xml.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | 87
|
2015-01-06T18:24:45.000Z
|
2021-08-08T07:59:40.000Z
|
tests/share/normalize/test_xml.py
|
fortress-biotech/SHARE
|
9c5a05dd831447949fa6253afec5225ff8ab5d4f
|
[
"Apache-2.0"
] | 442
|
2015-01-01T19:16:01.000Z
|
2022-03-30T21:10:26.000Z
|
tests/share/normalize/test_xml.py
|
fortress-biotech/SHARE
|
9c5a05dd831447949fa6253afec5225ff8ab5d4f
|
[
"Apache-2.0"
] | 67
|
2015-03-10T16:32:58.000Z
|
2021-11-12T16:33:41.000Z
|
import xmltodict
from share.transform.chain import * # noqa
EXAMPLE = '''
<entry>
<id>http://arxiv.org/abs/cond-mat/0102536v1</id>
<updated>2001-02-28T20:12:09Z</updated>
<published>2001-02-28T20:12:09Z</published>
<title>Impact of Electron-Electron Cusp
on Configuration Interaction Energies</title>
<summary> The effect of the electron-electron cusp on the convergence of configuration
interaction (CI) wave functions is examined. By analogy with the
pseudopotential approach for electron-ion interactions, an effective
electron-electron interaction is developed which closely reproduces the
scattering of the Coulomb interaction but is smooth and finite at zero
electron-electron separation. The exact many-electron wave function for this
smooth effective interaction has no cusp at zero electron-electron separation.
We perform CI and quantum Monte Carlo calculations for He and Be atoms, both
with the Coulomb electron-electron interaction and with the smooth effective
electron-electron interaction. We find that convergence of the CI expansion of
the wave function for the smooth electron-electron interaction is not
significantly improved compared with that for the divergent Coulomb interaction
for energy differences on the order of 1 mHartree. This shows that, contrary to
popular belief, description of the electron-electron cusp is not a limiting
factor, to within chemical accuracy, for CI calculations.
</summary>
<author>
<name>David Prendergast</name>
<arxiv:affiliation xmlns:arxiv="http://arxiv.org/schemas/atom">Department of Physics</arxiv:affiliation>
</author>
<author>
<name>M. Nolan</name>
<arxiv:affiliation xmlns:arxiv="http://arxiv.org/schemas/atom">NMRC, University College, Cork, Ireland</arxiv:affiliation>
</author>
<author>
<name>Claudia Filippi</name>
<arxiv:affiliation xmlns:arxiv="http://arxiv.org/schemas/atom">Department of Physics</arxiv:affiliation>
</author>
<author>
<name>Stephen Fahy</name>
<arxiv:affiliation xmlns:arxiv="http://arxiv.org/schemas/atom">Department of Physics</arxiv:affiliation>
</author>
<author>
<name>J. C. Greer</name>
<arxiv:affiliation xmlns:arxiv="http://arxiv.org/schemas/atom">NMRC, University College, Cork, Ireland</arxiv:affiliation>
</author>
<arxiv:doi xmlns:arxiv="http://arxiv.org/schemas/atom">10.1063/1.1383585</arxiv:doi>
<link title="doi" href="http://dx.doi.org/10.1063/1.1383585" rel="related"/>
<arxiv:comment xmlns:arxiv="http://arxiv.org/schemas/atom">11 pages, 6 figures, 3 tables, LaTeX209, submitted to The Journal of
Chemical Physics</arxiv:comment>
<arxiv:journal_ref xmlns:arxiv="http://arxiv.org/schemas/atom">J. Chem. Phys. 115, 1626 (2001)</arxiv:journal_ref>
<link href="http://arxiv.org/abs/cond-mat/0102536v1" rel="alternate" type="text/html"/>
<link title="pdf" href="http://arxiv.org/pdf/cond-mat/0102536v1" rel="related" type="application/pdf"/>
<arxiv:primary_category xmlns:arxiv="http://arxiv.org/schemas/atom" term="cond-mat.str-el" scheme="http://arxiv.org/schemas/atom"/>
<category term="cond-mat.str-el" scheme="http://arxiv.org/schemas/atom"/>
</entry>
'''
class Organization(Parser):
name = ctx
class IsAffiliatedWith(Parser):
related = Delegate(Organization, ctx)
class Person(Parser):
related_agents = Map(Delegate(IsAffiliatedWith), ctx.affiliation)
given_name = ParseName(ctx.name).first
family_name = ParseName(ctx.name).last
class Creator(Parser):
agent = Delegate(Person, ctx)
class Preprint(Parser):
title = ctx.entry.title
description = ctx.entry.summary
related_agents = Map(Delegate(Creator), ctx.entry.author)
class Extra:
comment = ctx.entry.comment
journal_ref = ctx.entry.journal_ref
class TestParser:
def test_preprint_parser(self):
parsed = Preprint(
xmltodict.parse(
EXAMPLE,
process_namespaces=True,
namespaces={
'http://www.w3.org/2005/Atom': None,
'http://arxiv.org/schemas/atom': None,
}
)
).parse()
assert isinstance(parsed, dict)
assert parsed['@type'] == 'preprint'
normalized = ctx.pool[parsed]
assert normalized['extra'] == {'comment': '11 pages, 6 figures, 3 tables, LaTeX209, submitted to The Journal of\n Chemical Physics', 'journal_ref': 'J. Chem. Phys. 115, 1626 (2001)'}
# no newlines, leading/trailing white space, or multiple spaces
assert normalized['title'] == 'Impact of Electron-Electron Cusp on Configuration Interaction Energies'
assert normalized['description'] == 'The effect of the electron-electron cusp on the convergence of configuration interaction (CI) wave functions is examined. By analogy with the pseudopotential approach for electron-ion interactions, an effective electron-electron interaction is developed which closely reproduces the scattering of the Coulomb interaction but is smooth and finite at zero electron-electron separation. The exact many-electron wave function for this smooth effective interaction has no cusp at zero electron-electron separation. We perform CI and quantum Monte Carlo calculations for He and Be atoms, both with the Coulomb electron-electron interaction and with the smooth effective electron-electron interaction. We find that convergence of the CI expansion of the wave function for the smooth electron-electron interaction is not significantly improved compared with that for the divergent Coulomb interaction for energy differences on the order of 1 mHartree. This shows that, contrary to popular belief, description of the electron-electron cusp is not a limiting factor, to within chemical accuracy, for CI calculations.'
| 52.410714
| 1,152
| 0.729131
| 783
| 5,870
| 5.449553
| 0.269476
| 0.067495
| 0.042184
| 0.053433
| 0.705648
| 0.690415
| 0.681041
| 0.635575
| 0.635575
| 0.635575
| 0
| 0.025226
| 0.169336
| 5,870
| 111
| 1,153
| 52.882883
| 0.849877
| 0.011244
| 0
| 0.164835
| 0
| 0.153846
| 0.785209
| 0.061541
| 0
| 0
| 0
| 0
| 0.054945
| 1
| 0.010989
| false
| 0
| 0.021978
| 0
| 0.208791
| 0.043956
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5d78b08ed15d1550fa9397049ff76029d3869bce
| 555
|
py
|
Python
|
tests/blueprint/test_decorators.py
|
cuenca-mx/agave
|
d4719bdbab8e200c98d206475df6adb275e9fdcc
|
[
"MIT"
] | 3
|
2020-12-11T16:48:44.000Z
|
2021-03-29T00:05:57.000Z
|
tests/blueprint/test_decorators.py
|
cuenca-mx/agave
|
d4719bdbab8e200c98d206475df6adb275e9fdcc
|
[
"MIT"
] | 115
|
2020-08-26T13:26:07.000Z
|
2022-03-31T23:58:22.000Z
|
tests/blueprint/test_decorators.py
|
cuenca-mx/agave
|
d4719bdbab8e200c98d206475df6adb275e9fdcc
|
[
"MIT"
] | null | null | null |
from functools import wraps
from agave.blueprints.decorators import copy_attributes
def i_am_test(func):
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.i_am_test = True
return wrapper
class TestResource:
@i_am_test
def retrieve(self) -> str:
return 'hello'
def test_copy_properties_from() -> None:
def retrieve():
...
assert not hasattr(retrieve, 'i_am_test')
retrieve = copy_attributes(TestResource)(retrieve)
assert hasattr(retrieve, 'i_am_test')
| 19.821429
| 55
| 0.677477
| 70
| 555
| 5.157143
| 0.428571
| 0.041551
| 0.096953
| 0.099723
| 0.121884
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218018
| 555
| 27
| 56
| 20.555556
| 0.831797
| 0
| 0
| 0
| 0
| 0
| 0.041441
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.277778
| false
| 0
| 0.111111
| 0.111111
| 0.611111
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
5d7ce8af330d95b04a5584c878164fe2af01973b
| 8,279
|
py
|
Python
|
neaten_db.py
|
Adoni/ZhihuCrawler
|
c275192ced3a344d7b93b7cfd3ebf87ed179400d
|
[
"MIT"
] | null | null | null |
neaten_db.py
|
Adoni/ZhihuCrawler
|
c275192ced3a344d7b93b7cfd3ebf87ed179400d
|
[
"MIT"
] | null | null | null |
neaten_db.py
|
Adoni/ZhihuCrawler
|
c275192ced3a344d7b93b7cfd3ebf87ed179400d
|
[
"MIT"
] | null | null | null |
from pymongo import MongoClient
from pyltp import Segmentor
def insert_questions_from_answered_question():
in_db = MongoClient().zhihu.user_answered_questions
out_db = MongoClient().zhihu_network.questions
existed_question_id = set(map(lambda q: q['_id'], out_db.find()))
segmentor = Segmentor()
segmentor.load("/Users/sunxiaofei/workspace/ltp_data/cws.model")
for u in in_db.find():
for a in u['answers']:
if a['q_id'] in existed_question_id:
continue
existed_question_id.add(a['q_id'])
if len(existed_question_id) % 1000 == 0:
print(len(existed_question_id))
words = segmentor.segment(a['title'].strip().replace(
'\n', ' ').replace('\r', ' ').replace('\b', ' '))
if len(words) < 3:
continue
out_db.insert({'_id': a['q_id'], 'title': ' '.join(words)})
def insert_questions_from_followed_question():
in_db = MongoClient().zhihu.user_followed_questions
out_db = MongoClient().zhihu_network.questions
existed_question_id = set(map(lambda q: q['_id'], out_db.find()))
segmentor = Segmentor()
segmentor.load("/Users/sunxiaofei/workspace/ltp_data/cws.model")
for u in in_db.find():
for q in u['questions']:
if q['id'] in existed_question_id:
continue
existed_question_id.add(q['id'])
words = segmentor.segment(q['title'].strip().replace(
'\n', ' ').replace('\r', ' ').replace('\b', ' '))
if len(words) < 3:
continue
out_db.insert({'_id': q['id'], 'title': ' '.join(words)})
def insert_questions_from_asked_question():
in_db = MongoClient().zhihu.user_asked_questions
out_db = MongoClient().zhihu_network.questions
existed_question_id = set(map(lambda q: q['_id'], out_db.find()))
segmentor = Segmentor()
segmentor.load("/Users/sunxiaofei/workspace/ltp_data/cws.model")
for u in in_db.find():
for q in u['questions']:
if q['id'] in existed_question_id:
continue
existed_question_id.add(q['id'])
if len(existed_question_id) % 1000 == 0:
print(len(existed_question_id))
words = segmentor.segment(q['title'].strip().replace(
'\n', ' ').replace('\r', ' ').replace('\b', ' '))
if len(words) < 3:
continue
out_db.insert({'_id': q['id'], 'title': ' '.join(words)})
def insert_questions_from_collected_question():
in_db = MongoClient().zhihu.user_collected_questions
out_db = MongoClient().zhihu_network.questions
existed_question_id = set(map(lambda q: q['_id'], out_db.find()))
segmentor = Segmentor()
segmentor.load("/Users/sunxiaofei/workspace/ltp_data/cws.model")
for u in in_db.find():
for c_name, c_questions in u['collections'].items():
for a in c_questions:
if a['q_id'] == -1:
continue
if a['q_id'] in existed_question_id:
continue
existed_question_id.add(a['q_id'])
if len(existed_question_id) % 1000 == 0:
print(len(existed_question_id))
words = segmentor.segment(a['title'].strip().replace(
'\n', ' ').replace('\r', ' ').replace('\b', ' '))
if len(words) < 3:
continue
out_db.insert({'_id': a['q_id'], 'title': ' '.join(words)})
def delete_noise_question():
db = MongoClient().zhihu_network.questions
id_to_delete = []
for q in db.find():
if len(q['title'].split(' ')) < 3:
id_to_delete.append(q['_id'])
print(len(id_to_delete))
for _id in id_to_delete:
db.delete_one({'_id': _id})
def remove_enger_inline():
db = MongoClient().zhihu_network.questions
for q in db.find():
if '\n' in q['title'] or '\r' in q['title'] or '\b' in q['title']:
q['title'] = q['title'].replace('\n', ' ')
q['title'] = q['title'].replace('\r', ' ')
q['title'] = q['title'].replace('\b', ' ')
db.update_one({'_id': q['_id']},
{'$set': {'title': q['title']}},
upsert=True)
def insert_user_list():
keys = ['_id', 'name', 'is_zero_user', 'gender', 'location', 'business',
'education', 'motto', 'answer_num', 'collection_num',
'followed_column_num', 'followed_topic_num', 'followee_num',
'follower_num', 'post_num', 'question_num', 'thank_num',
'upvote_num', 'photo_url', 'weibo_url']
out_db = MongoClient().zhihu_network.users
existed_user_id = set(map(lambda u: u['_id'], out_db.find()))
for line in open('./user_info.data'):
line = line.strip().split('\t')
try:
assert (len(keys) == len(line))
except:
continue
user = dict(zip(keys, line))
if user['_id'] in existed_user_id:
continue
existed_user_id.add(user['_id'])
for key in user:
if key.endswith('_num'):
user[key] = int(user[key])
out_db.insert(user)
def insert_user_follow_user_list():
out_db = MongoClient().zhihu_network.user_follow_user_adjacency_list
existed_user_id = set(map(lambda u: u['_id'], out_db.find()))
for line in open('./user_followees.data'):
line = line.strip().split('\t')
user = dict()
user['_id'] = line[0]
user['neibors'] = line[1:]
if user['_id'] in existed_user_id:
continue
existed_user_id.add(user['_id'])
out_db.insert(user)
def insert_user_follow_question_list():
in_db = MongoClient().zhihu.user_followed_questions
out_db = MongoClient().zhihu_network.user_follow_question_adjacency_list
existed_user_id = set(map(lambda u: u['_id'], out_db.find()))
for user in in_db.find():
if user['_id'] in existed_user_id:
continue
existed_user_id.add(user['_id'])
q_ids = [q['id'] for q in user['questions']]
out_db.insert({'_id': user['_id'], 'neibors': q_ids})
def insert_user_ask_question_list():
in_db = MongoClient().zhihu.user_asked_questions
out_db = MongoClient().zhihu_network.user_ask_question_adjacency_list
existed_user_id = set(map(lambda u: u['_id'], out_db.find()))
for user in in_db.find():
if user['_id'] in existed_user_id:
continue
existed_user_id.add(user['_id'])
q_ids = [q['id'] for q in user['questions']]
out_db.insert({'_id': user['_id'], 'neibors': q_ids})
def insert_user_collect_question_list():
in_db = MongoClient().zhihu.user_collected_questions
out_db = MongoClient().zhihu_network.user_collect_question_adjacency_list
existed_user_id = set(map(lambda u: u['_id'], out_db.find()))
for user in in_db.find():
if user['_id'] in existed_user_id:
continue
existed_user_id.add(user['_id'])
q_ids = []
for _, c in user['collections'].items():
q_ids += [q['q_id'] for q in c]
out_db.insert({'_id': user['_id'], 'neibors': q_ids})
def insert_user_answer_question_list():
in_db = MongoClient().zhihu.user_answered_questions
out_db = MongoClient().zhihu_network.user_answer_question_adjacency_list
existed_user_id = set(map(lambda u: u['_id'], out_db.find()))
for user in in_db.find():
if user['_id'] in existed_user_id:
continue
existed_user_id.add(user['_id'])
q_ids = [a['q_id'] for a in user['answers']]
out_db.insert({'_id': user['_id'], 'neibors': q_ids})
if __name__ == '__main__':
# insert_questions_from_answered_question()
# insert_questions_from_followed_question()
# insert_questions_from_asked_question()
# insert_questions_from_collected_question()
#delete_noise_question()
#remove_enger_inline()
# insert_user_list()
insert_user_follow_user_list()
# insert_user_follow_question_list()
# insert_user_ask_question_list()
# insert_user_collect_question_list()
# insert_user_answer_question_list()
| 39.42381
| 77
| 0.595603
| 1,082
| 8,279
| 4.229205
| 0.107209
| 0.045892
| 0.078671
| 0.065559
| 0.821241
| 0.708698
| 0.685533
| 0.661932
| 0.647072
| 0.636364
| 0
| 0.003716
| 0.252446
| 8,279
| 209
| 78
| 39.61244
| 0.73566
| 0.044208
| 0
| 0.651163
| 0
| 0
| 0.110113
| 0.025946
| 0
| 0
| 0
| 0
| 0.005814
| 1
| 0.069767
| false
| 0
| 0.011628
| 0
| 0.081395
| 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5d861ae24ab41a343997586ea4f68f7cd661d4d3
| 301
|
py
|
Python
|
tests/data_creator_action.py
|
michalurbanski/bkgames
|
69b1d16ae27d3118dd78449ce7deecbd6e1b95e7
|
[
"MIT"
] | null | null | null |
tests/data_creator_action.py
|
michalurbanski/bkgames
|
69b1d16ae27d3118dd78449ce7deecbd6e1b95e7
|
[
"MIT"
] | null | null | null |
tests/data_creator_action.py
|
michalurbanski/bkgames
|
69b1d16ae27d3118dd78449ce7deecbd6e1b95e7
|
[
"MIT"
] | null | null | null |
from typing import Callable
class DataCreatorAction:
def __init__(self, func: Callable, priority_for_creation: int = 99, priority_for_removal: int = 99):
self.func = func
self.priority_for_creation = priority_for_creation
self.priority_for_removal = priority_for_removal
| 33.444444
| 104
| 0.750831
| 38
| 301
| 5.526316
| 0.421053
| 0.314286
| 0.271429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.189369
| 301
| 8
| 105
| 37.625
| 0.844262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5d93683480faf496a5e564f3b162607a289a4f92
| 21,601
|
py
|
Python
|
koku/reporting/migrations/0099_ocp_performance.py
|
Vasyka/koku
|
b5aa9ec41c3b0821e74afe9ff3a5ffaedb910614
|
[
"Apache-2.0"
] | 2
|
2022-01-12T03:42:39.000Z
|
2022-01-12T03:42:40.000Z
|
koku/reporting/migrations/0099_ocp_performance.py
|
Vasyka/koku
|
b5aa9ec41c3b0821e74afe9ff3a5ffaedb910614
|
[
"Apache-2.0"
] | null | null | null |
koku/reporting/migrations/0099_ocp_performance.py
|
Vasyka/koku
|
b5aa9ec41c3b0821e74afe9ff3a5ffaedb910614
|
[
"Apache-2.0"
] | 1
|
2021-07-21T09:33:59.000Z
|
2021-07-21T09:33:59.000Z
|
# Generated by Django 2.2.10 on 2020-02-18 12:51
import django.contrib.postgres.indexes
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [("reporting", "0098_auto_20200221_2034")]
operations = [
migrations.RunSQL(
"""
drop materialized view if exists reporting_ocpallcostlineitem_daily_summary;
drop materialized view if exists reporting_ocpallcostlineitem_project_daily_summary;
"""
),
migrations.RemoveIndex(model_name="ocpawscostlineitemdailysummary", name="cost_summary_node_idx"),
migrations.RemoveIndex(
model_name="ocpawscostlineitemprojectdailysummary", name="cost__proj_sum_namespace_idx"
),
migrations.RemoveIndex(model_name="ocpawscostlineitemprojectdailysummary", name="cost_proj_sum_node_idx"),
migrations.RemoveIndex(model_name="ocpazurecostlineitemdailysummary", name="ocpazure_node_idx"),
migrations.RemoveIndex(
model_name="ocpazurecostlineitemprojectdailysummary", name="ocpazure_proj_namespace_idx"
),
migrations.RemoveIndex(model_name="ocpazurecostlineitemprojectdailysummary", name="ocpazure_proj_node_idx"),
migrations.RemoveIndex(model_name="ocpusagelineitemdaily", name="namespace_idx"),
migrations.RemoveIndex(model_name="ocpusagelineitemdaily", name="node_idx"),
migrations.RemoveIndex(model_name="ocpusagelineitemdailysummary", name="summary_namespace_idx"),
migrations.RemoveIndex(model_name="ocpusagelineitemdailysummary", name="summary_node_idx"),
migrations.AlterField(
model_name="ocpawscostlineitemprojectdailysummary", name="usage_end", field=models.DateField()
),
migrations.AlterField(
model_name="ocpawscostlineitemprojectdailysummary", name="usage_start", field=models.DateField()
),
migrations.AlterField(
model_name="ocpazurecostlineitemdailysummary", name="usage_end", field=models.DateField()
),
migrations.AlterField(
model_name="ocpazurecostlineitemdailysummary", name="usage_start", field=models.DateField()
),
migrations.AlterField(
model_name="ocpazurecostlineitemprojectdailysummary", name="usage_end", field=models.DateField()
),
migrations.AlterField(
model_name="ocpazurecostlineitemprojectdailysummary", name="usage_start", field=models.DateField()
),
migrations.AlterField(model_name="ocpstoragelineitemdaily", name="usage_end", field=models.DateField()),
migrations.AlterField(model_name="ocpstoragelineitemdaily", name="usage_start", field=models.DateField()),
migrations.AlterField(model_name="ocpusagelineitemdaily", name="total_seconds", field=models.IntegerField()),
migrations.AlterField(model_name="ocpusagelineitemdaily", name="usage_end", field=models.DateField()),
migrations.AlterField(model_name="ocpusagelineitemdaily", name="usage_start", field=models.DateField()),
migrations.AlterField(model_name="ocpusagelineitemdailysummary", name="usage_end", field=models.DateField()),
migrations.AlterField(model_name="ocpusagelineitemdailysummary", name="usage_start", field=models.DateField()),
migrations.AddIndex(
model_name="ocpawscostlineitemdailysummary",
index=models.Index(fields=["node"], name="cost_summary_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpawscostlineitemprojectdailysummary",
index=models.Index(
fields=["namespace"], name="cost__proj_sum_namespace_idx", opclasses=["varchar_pattern_ops"]
),
),
migrations.AddIndex(
model_name="ocpawscostlineitemprojectdailysummary",
index=models.Index(fields=["node"], name="cost_proj_sum_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpazurecostlineitemdailysummary",
index=models.Index(fields=["node"], name="ocpazure_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpazurecostlineitemprojectdailysummary",
index=models.Index(
fields=["namespace"], name="ocpazure_proj_namespace_idx", opclasses=["varchar_pattern_ops"]
),
),
migrations.AddIndex(
model_name="ocpazurecostlineitemprojectdailysummary",
index=models.Index(fields=["node"], name="ocpazure_proj_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpstoragelineitemdaily",
index=models.Index(
fields=["namespace"], name="ocp_storage_li_namespace_idx", opclasses=["varchar_pattern_ops"]
),
),
migrations.AddIndex(
model_name="ocpstoragelineitemdaily",
index=models.Index(fields=["node"], name="ocp_storage_li_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpusagelineitemdaily",
index=models.Index(fields=["namespace"], name="namespace_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpusagelineitemdaily",
index=models.Index(fields=["node"], name="node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpusagelineitemdailysummary",
index=models.Index(fields=["namespace"], name="summary_namespace_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="ocpusagelineitemdailysummary",
index=models.Index(fields=["node"], name="summary_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AlterField(model_name="costsummary", name="usage_end", field=models.DateField()),
migrations.AlterField(model_name="costsummary", name="usage_start", field=models.DateField()),
migrations.AddIndex(
model_name="costsummary", index=models.Index(fields=["usage_start"], name="ocpcostsum_usage_start_idx")
),
migrations.AddIndex(
model_name="costsummary",
index=models.Index(
fields=["namespace"], name="ocpcostsum_namespace_idx", opclasses=["varchar_pattern_ops"]
),
),
migrations.AddIndex(
model_name="costsummary",
index=models.Index(fields=["node"], name="ocpcostsum_node_idx", opclasses=["varchar_pattern_ops"]),
),
migrations.AddIndex(
model_name="costsummary",
index=django.contrib.postgres.indexes.GinIndex(fields=["pod_labels"], name="ocpcostsum_pod_labels_idx"),
),
# This extension will help specifically with "col LIKE %val%"
# operations. (As long as val is at least 3 characters)
migrations.RunSQL(
"""
create extension if not exists pg_trgm schema public;
"""
),
# Create indexes to aid with text searching.
# These cases will specifically help with case-insensitive
# and contains (vs startswith) searches
# ocp usage line item daily
migrations.RunSQL(
"""
/* add namespace index for like trigram ops */
create index if not exists ocp_namespace_idx
on reporting_ocpusagelineitem_daily using gin (UPPER(namespace) gin_trgm_ops);
/* add node index for like trigram ops */
create index if not exists ocp_node_idx
on reporting_ocpusagelineitem_daily using gin (UPPER(node) gin_trgm_ops);
"""
),
# ocp usage line item daily summary
migrations.RunSQL(
"""
/* add namespace index for like trigram ops */
create index if not exists ocp_summary_namespace_like_idx
on reporting_ocpusagelineitem_daily_summary using gin (UPPER(namespace) gin_trgm_ops);
/* add node index for like trigram ops */
create index if not exists ocp_summary_node_like_idx
on reporting_ocpusagelineitem_daily_summary using gin (UPPER(node) gin_trgm_ops);
"""
),
# reporting_ocpstoragelineitem_daily
migrations.RunSQL(
"""
/* add namespace index for like trigram ops */
create index if not exists ocp_storage_li_namespace_like_idx
on reporting_ocpstoragelineitem_daily using gin (UPPER(namespace) gin_trgm_ops);
/* add node index for like trigram ops */
create index if not exists ocp_storage_li_node_like_idx
on reporting_ocpstoragelineitem_daily using gin (UPPER(node) gin_trgm_ops);
"""
),
# ocp azure cost
migrations.RunSQL(
"""
/* add node index for like trigram ops */
create index if not exists ocpazure_node_like_idx
on reporting_ocpazurecostlineitem_daily_summary using gin (UPPER(node) gin_trgm_ops);
"""
),
# ocp azure project cost
migrations.RunSQL(
"""
/* add namespace index for like trigram ops */
create index if not exists ocpazure_proj_namespace_like_idx
on reporting_ocpazurecostlineitem_project_daily_summary using gin (UPPER(namespace) gin_trgm_ops);
/* add node index for like trigram ops */
create index if not exists ocpazure_proj_node_like_idx
on reporting_ocpazurecostlineitem_project_daily_summary using gin (UPPER(node) gin_trgm_ops);
"""
),
# reporting_ocpawscostlineitem_daily_summary
migrations.RunSQL(
"""
/* add node index for like trigram ops */
create index if not exists cost_summary_node_like_idx
on reporting_ocpawscostlineitem_daily_summary using gin (UPPER(node) gin_trgm_ops);
"""
),
# reporting_ocpawscostlineitem_project_daily_summary
migrations.RunSQL(
"""
/* add namespace index for like trigram ops */
create index if not exists cost__proj_sum_namespace_like_idx
on reporting_ocpawscostlineitem_project_daily_summary using gin (UPPER(namespace) gin_trgm_ops);
/* add node index for like trigram ops */
create index if not exists cost__proj_sum_node_like_idx
on reporting_ocpawscostlineitem_project_daily_summary using gin (UPPER(node) gin_trgm_ops);
"""
),
# reporting_ocpcosts_summary
migrations.RunSQL(
"""
/* add namespace index for like trigram ops */
create index if not exists ocpcostsum_namespace_like_idx
on reporting_ocpcosts_summary using gin (UPPER(namespace) gin_trgm_ops);
/* add node index for like trigram ops */
create index if not exists ocpcostsum_node_like_idx
on reporting_ocpcosts_summary using gin (UPPER(node) gin_trgm_ops);
"""
),
migrations.RunSQL(
"""
drop materialized view if exists reporting_ocpallcostlineitem_daily_summary;
create materialized view if not exists reporting_ocpallcostlineitem_daily_summary as
SELECT row_number() OVER () AS id,
lids.source_type,
lids.cluster_id,
lids.cluster_alias,
lids.namespace,
lids.node,
lids.resource_id,
lids.usage_start,
lids.usage_end,
lids.usage_account_id,
lids.account_alias_id,
lids.product_code,
lids.product_family,
lids.instance_type,
lids.region,
lids.availability_zone,
lids.tags,
lids.usage_amount,
lids.unit,
lids.unblended_cost,
lids.markup_cost,
lids.currency_code,
lids.shared_projects,
lids.project_costs
FROM ( SELECT 'AWS'::text AS source_type,
reporting_ocpawscostlineitem_daily_summary.cluster_id,
reporting_ocpawscostlineitem_daily_summary.cluster_alias,
reporting_ocpawscostlineitem_daily_summary.namespace,
reporting_ocpawscostlineitem_daily_summary.node,
reporting_ocpawscostlineitem_daily_summary.resource_id,
reporting_ocpawscostlineitem_daily_summary.usage_start::date,
reporting_ocpawscostlineitem_daily_summary.usage_end::date,
reporting_ocpawscostlineitem_daily_summary.usage_account_id,
reporting_ocpawscostlineitem_daily_summary.account_alias_id,
reporting_ocpawscostlineitem_daily_summary.product_code,
reporting_ocpawscostlineitem_daily_summary.product_family,
reporting_ocpawscostlineitem_daily_summary.instance_type,
reporting_ocpawscostlineitem_daily_summary.region,
reporting_ocpawscostlineitem_daily_summary.availability_zone,
reporting_ocpawscostlineitem_daily_summary.tags,
reporting_ocpawscostlineitem_daily_summary.usage_amount,
reporting_ocpawscostlineitem_daily_summary.unit,
reporting_ocpawscostlineitem_daily_summary.unblended_cost,
reporting_ocpawscostlineitem_daily_summary.markup_cost,
reporting_ocpawscostlineitem_daily_summary.currency_code,
reporting_ocpawscostlineitem_daily_summary.shared_projects,
reporting_ocpawscostlineitem_daily_summary.project_costs
FROM reporting_ocpawscostlineitem_daily_summary
WHERE reporting_ocpawscostlineitem_daily_summary.usage_start >= date_trunc('month'::text, date_trunc('month'::text, now()) - '1 day'::interval day)
UNION
SELECT 'Azure'::text AS source_type,
reporting_ocpazurecostlineitem_daily_summary.cluster_id,
reporting_ocpazurecostlineitem_daily_summary.cluster_alias,
reporting_ocpazurecostlineitem_daily_summary.namespace,
reporting_ocpazurecostlineitem_daily_summary.node,
reporting_ocpazurecostlineitem_daily_summary.resource_id,
reporting_ocpazurecostlineitem_daily_summary.usage_start::date,
reporting_ocpazurecostlineitem_daily_summary.usage_end::date,
reporting_ocpazurecostlineitem_daily_summary.subscription_guid AS usage_account_id,
NULL::integer AS account_alias_id,
reporting_ocpazurecostlineitem_daily_summary.service_name AS product_code,
NULL::character varying AS product_family,
reporting_ocpazurecostlineitem_daily_summary.instance_type,
reporting_ocpazurecostlineitem_daily_summary.resource_location AS region,
NULL::character varying AS availability_zone,
reporting_ocpazurecostlineitem_daily_summary.tags,
reporting_ocpazurecostlineitem_daily_summary.usage_quantity AS usage_amount,
reporting_ocpazurecostlineitem_daily_summary.unit_of_measure AS unit,
reporting_ocpazurecostlineitem_daily_summary.pretax_cost AS unblended_cost,
reporting_ocpazurecostlineitem_daily_summary.markup_cost,
reporting_ocpazurecostlineitem_daily_summary.currency AS currency_code,
reporting_ocpazurecostlineitem_daily_summary.shared_projects,
reporting_ocpazurecostlineitem_daily_summary.project_costs
FROM reporting_ocpazurecostlineitem_daily_summary
WHERE reporting_ocpazurecostlineitem_daily_summary.usage_start >= date_trunc('month'::text, date_trunc('month'::text, now()) - '1 day'::interval day)) lids
with no data;
create index mv_reporting_ocpallcostlineitem_daily_summary_namespace_ix
on reporting_ocpallcostlineitem_daily_summary using gin (namespace);
create index mv_reporting_ocpallcostlineitem_daily_summary_node_ix
on reporting_ocpallcostlineitem_daily_summary (node varchar_pattern_ops);
create index mv_reporting_ocpallcostlineitem_daily_summary_usage_ix
on reporting_ocpallcostlineitem_daily_summary (usage_start);
drop materialized view if exists reporting_ocpallcostlineitem_project_daily_summary;
create materialized view if not exists reporting_ocpallcostlineitem_project_daily_summary as
SELECT row_number() OVER () AS id,
lids.source_type,
lids.cluster_id,
lids.cluster_alias,
lids.data_source,
lids.namespace,
lids.node,
lids.pod_labels,
lids.resource_id,
lids.usage_start,
lids.usage_end,
lids.usage_account_id,
lids.account_alias_id,
lids.product_code,
lids.product_family,
lids.instance_type,
lids.region,
lids.availability_zone,
lids.usage_amount,
lids.unit,
lids.unblended_cost,
lids.project_markup_cost,
lids.pod_cost,
lids.currency_code
FROM ( SELECT 'AWS'::text AS source_type,
reporting_ocpawscostlineitem_project_daily_summary.cluster_id,
reporting_ocpawscostlineitem_project_daily_summary.cluster_alias,
reporting_ocpawscostlineitem_project_daily_summary.data_source,
reporting_ocpawscostlineitem_project_daily_summary.namespace,
reporting_ocpawscostlineitem_project_daily_summary.node,
reporting_ocpawscostlineitem_project_daily_summary.pod_labels,
reporting_ocpawscostlineitem_project_daily_summary.resource_id,
reporting_ocpawscostlineitem_project_daily_summary.usage_start::date,
reporting_ocpawscostlineitem_project_daily_summary.usage_end::date,
reporting_ocpawscostlineitem_project_daily_summary.usage_account_id,
reporting_ocpawscostlineitem_project_daily_summary.account_alias_id,
reporting_ocpawscostlineitem_project_daily_summary.product_code,
reporting_ocpawscostlineitem_project_daily_summary.product_family,
reporting_ocpawscostlineitem_project_daily_summary.instance_type,
reporting_ocpawscostlineitem_project_daily_summary.region,
reporting_ocpawscostlineitem_project_daily_summary.availability_zone,
reporting_ocpawscostlineitem_project_daily_summary.usage_amount,
reporting_ocpawscostlineitem_project_daily_summary.unit,
reporting_ocpawscostlineitem_project_daily_summary.unblended_cost,
reporting_ocpawscostlineitem_project_daily_summary.project_markup_cost,
reporting_ocpawscostlineitem_project_daily_summary.pod_cost,
reporting_ocpawscostlineitem_project_daily_summary.currency_code
FROM reporting_ocpawscostlineitem_project_daily_summary
WHERE reporting_ocpawscostlineitem_project_daily_summary.usage_start >= date_trunc('month'::text, date_trunc('month'::text, now()) - '1 day'::interval day)
UNION
SELECT 'Azure'::text AS source_type,
reporting_ocpazurecostlineitem_project_daily_summary.cluster_id,
reporting_ocpazurecostlineitem_project_daily_summary.cluster_alias,
reporting_ocpazurecostlineitem_project_daily_summary.data_source,
reporting_ocpazurecostlineitem_project_daily_summary.namespace,
reporting_ocpazurecostlineitem_project_daily_summary.node,
reporting_ocpazurecostlineitem_project_daily_summary.pod_labels,
reporting_ocpazurecostlineitem_project_daily_summary.resource_id,
reporting_ocpazurecostlineitem_project_daily_summary.usage_start::date,
reporting_ocpazurecostlineitem_project_daily_summary.usage_end::date,
reporting_ocpazurecostlineitem_project_daily_summary.subscription_guid AS usage_account_id,
NULL::integer AS account_alias_id,
reporting_ocpazurecostlineitem_project_daily_summary.service_name AS product_code,
NULL::character varying AS product_family,
reporting_ocpazurecostlineitem_project_daily_summary.instance_type,
reporting_ocpazurecostlineitem_project_daily_summary.resource_location AS region,
NULL::character varying AS availability_zone,
reporting_ocpazurecostlineitem_project_daily_summary.usage_quantity AS usage_amount,
reporting_ocpazurecostlineitem_project_daily_summary.unit_of_measure AS unit,
reporting_ocpazurecostlineitem_project_daily_summary.pretax_cost AS unblended_cost,
reporting_ocpazurecostlineitem_project_daily_summary.project_markup_cost,
reporting_ocpazurecostlineitem_project_daily_summary.pod_cost,
reporting_ocpazurecostlineitem_project_daily_summary.currency AS currency_code
FROM reporting_ocpazurecostlineitem_project_daily_summary
WHERE reporting_ocpazurecostlineitem_project_daily_summary.usage_start >= date_trunc('month'::text, date_trunc('month'::text, now()) - '1 day'::interval day)) lids
with no data;
create index mv_reporting_ocpallcostlineitem_prj_daily_summary_namespace_ix
on reporting_ocpallcostlineitem_project_daily_summary (namespace varchar_pattern_ops);
create index mv_reporting_ocpallcostlineitem_prj_daily_summary_node_ix
on reporting_ocpallcostlineitem_project_daily_summary (node varchar_pattern_ops);
create index mv_reporting_ocpallcostlineitem_prj_daily_summary_namespace_like_ix
on reporting_ocpallcostlineitem_project_daily_summary using gin (namespace gin_trgm_ops);
create index mv_reporting_ocpallcostlineitem_prj_daily_summary_node_like_ix
on reporting_ocpallcostlineitem_project_daily_summary using gin (node gin_trgm_ops);
create index mv_reporting_ocpallcostlineitem_prj_daily_summary_usage_ix
on reporting_ocpallcostlineitem_project_daily_summary (usage_start);
"""
),
migrations.RunSQL(
"""
refresh materialized view reporting_ocpallcostlineitem_daily_summary;
refresh materialized view reporting_ocpallcostlineitem_project_daily_summary;
"""
),
]
| 51.800959
| 173
| 0.734781
| 2,226
| 21,601
| 6.731806
| 0.082659
| 0.1001
| 0.074808
| 0.07027
| 0.917718
| 0.828295
| 0.678946
| 0.575709
| 0.545479
| 0.464531
| 0
| 0.002126
| 0.19425
| 21,601
| 416
| 174
| 51.925481
| 0.858834
| 0.025554
| 0
| 0.65035
| 1
| 0
| 0.299134
| 0.200596
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020979
| 0
| 0.041958
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5da38d402c0b885654d90358b5f682eddb296488
| 672
|
py
|
Python
|
03.py
|
SnowWolf75/aoc-2020
|
1745a6cf46dac097869e5af99194b710e78bed28
|
[
"Unlicense"
] | null | null | null |
03.py
|
SnowWolf75/aoc-2020
|
1745a6cf46dac097869e5af99194b710e78bed28
|
[
"Unlicense"
] | null | null | null |
03.py
|
SnowWolf75/aoc-2020
|
1745a6cf46dac097869e5af99194b710e78bed28
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
import sys, os
import unittest
from lib.common import *
filename = "inputs/2020_12_03_input.txt"
class day03:
def __init__(self):
pass
class day03part1(day03):
def solve(self, args):
pass
class day03part2(day03):
def solve(self, args):
pass
class examples(unittest.TestCase):
def test_examples_part1(self):
day3 = day03part1()
# self.assetTrue()
def test_examples_part2(self):
day3 = day03part2()
# self.assetTrue()
class solutions(unittest.TestCase):
def test_part1(self):
day3 = day03part1()
def test_part2(self):
day3 = day03part2()
| 16.8
| 40
| 0.638393
| 80
| 672
| 5.2
| 0.45
| 0.067308
| 0.0625
| 0.081731
| 0.144231
| 0.144231
| 0.144231
| 0
| 0
| 0
| 0
| 0.082
| 0.255952
| 672
| 39
| 41
| 17.230769
| 0.75
| 0.081845
| 0
| 0.391304
| 0
| 0
| 0.044046
| 0.044046
| 0
| 0
| 0
| 0
| 0
| 1
| 0.304348
| false
| 0.130435
| 0.130435
| 0
| 0.652174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
5db3dae6928f712e933165c643051e536448b1fb
| 359
|
py
|
Python
|
ois_api_client/v3_0/dto/Lines.py
|
peterkulik/ois_api_client
|
51dabcc9f920f89982c4419bb058f5a88193cee0
|
[
"MIT"
] | 7
|
2020-10-22T08:15:29.000Z
|
2022-01-27T07:59:39.000Z
|
ois_api_client/v3_0/dto/Lines.py
|
peterkulik/ois_api_client
|
51dabcc9f920f89982c4419bb058f5a88193cee0
|
[
"MIT"
] | null | null | null |
ois_api_client/v3_0/dto/Lines.py
|
peterkulik/ois_api_client
|
51dabcc9f920f89982c4419bb058f5a88193cee0
|
[
"MIT"
] | null | null | null |
from typing import List
from dataclasses import dataclass
from .Line import Line
@dataclass
class Lines:
"""Product / service items
:param merged_item_indicator: Indicates whether the data exchange contains merged line data due to size reduction
:param line: Product / service item
"""
merged_item_indicator: bool
line: List[Line]
| 22.4375
| 117
| 0.743733
| 47
| 359
| 5.595745
| 0.574468
| 0.106464
| 0.144487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203343
| 359
| 15
| 118
| 23.933333
| 0.91958
| 0.48468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.428571
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
5db5a1d6975c3995d47712215ed2acd01be9b8ad
| 208
|
py
|
Python
|
application/model/_base.py
|
keysona/blog
|
783e0bdbed1e4d8ec9857ee609b39c9dfb958670
|
[
"MIT"
] | null | null | null |
application/model/_base.py
|
keysona/blog
|
783e0bdbed1e4d8ec9857ee609b39c9dfb958670
|
[
"MIT"
] | null | null | null |
application/model/_base.py
|
keysona/blog
|
783e0bdbed1e4d8ec9857ee609b39c9dfb958670
|
[
"MIT"
] | null | null | null |
from flask_sqlalchemy import SQLAlchemy, Model
# class BaseModel(Model):
# def save(self):
# db.session.add(self)
# db.session.commit(self)
# def delete(self):
# db.session.
db = SQLAlchemy()
| 16
| 46
| 0.673077
| 27
| 208
| 5.148148
| 0.555556
| 0.129496
| 0.280576
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 208
| 12
| 47
| 17.333333
| 0.822485
| 0.605769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
5dcb3695e4bb82f323f1875e14dd30d0eb26c6e3
| 199
|
py
|
Python
|
stograde/common/run_status.py
|
babatana/stograde
|
c1c447e99c44c23cef9dd857e669861f3708ae77
|
[
"MIT"
] | null | null | null |
stograde/common/run_status.py
|
babatana/stograde
|
c1c447e99c44c23cef9dd857e669861f3708ae77
|
[
"MIT"
] | null | null | null |
stograde/common/run_status.py
|
babatana/stograde
|
c1c447e99c44c23cef9dd857e669861f3708ae77
|
[
"MIT"
] | null | null | null |
from enum import auto, Enum
class RunStatus(Enum):
SUCCESS = auto()
CALLED_PROCESS_ERROR = auto()
FILE_NOT_FOUND = auto()
PROCESS_LOOKUP_ERROR = auto()
TIMEOUT_EXPIRED = auto()
| 19.9
| 33
| 0.683417
| 25
| 199
| 5.16
| 0.64
| 0.139535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221106
| 199
| 9
| 34
| 22.111111
| 0.832258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
5dd288bce128d196a30c7168a6af79b6e365abd9
| 11,995
|
py
|
Python
|
saleor-env/lib/python3.7/site-packages/snowballstemmer/nepali_stemmer.py
|
tadartefactorist/mask
|
7967dd4ad39e3d26ac516719faefb40e00a8cbff
|
[
"BSD-3-Clause"
] | null | null | null |
saleor-env/lib/python3.7/site-packages/snowballstemmer/nepali_stemmer.py
|
tadartefactorist/mask
|
7967dd4ad39e3d26ac516719faefb40e00a8cbff
|
[
"BSD-3-Clause"
] | 1
|
2021-06-01T23:55:30.000Z
|
2021-06-01T23:55:30.000Z
|
venv/lib/python2.7/site-packages/snowballstemmer/nepali_stemmer.py
|
tvek/DatasciencePythonInitBase
|
e578b4a3026b55bc2935b200453e511f1731c75e
|
[
"MIT"
] | null | null | null |
# This file was generated automatically by the Snowball to Python compiler
# http://snowballstem.org/
from .basestemmer import BaseStemmer
from .among import Among
class NepaliStemmer(BaseStemmer):
'''
This class was automatically generated by a Snowball to Python compiler
It implements the stemming algorithm defined by a snowball script.
'''
a_0 = [
Among(u"\u0932\u093E\u0907", -1, 1),
Among(u"\u0932\u093E\u0908", -1, 1),
Among(u"\u0938\u0901\u0917", -1, 1),
Among(u"\u0938\u0902\u0917", -1, 1),
Among(u"\u092E\u093E\u0930\u094D\u092B\u0924", -1, 1),
Among(u"\u0930\u0924", -1, 1),
Among(u"\u0915\u093E", -1, 2),
Among(u"\u092E\u093E", -1, 1),
Among(u"\u0926\u094D\u0935\u093E\u0930\u093E", -1, 1),
Among(u"\u0915\u093F", -1, 2),
Among(u"\u092A\u091B\u093F", -1, 1),
Among(u"\u0915\u0940", -1, 2),
Among(u"\u0932\u0947", -1, 1),
Among(u"\u0915\u0948", -1, 2),
Among(u"\u0938\u0901\u0917\u0948", -1, 1),
Among(u"\u092E\u0948", -1, 1),
Among(u"\u0915\u094B", -1, 2)
]
a_1 = [
Among(u"\u0901", -1, -1),
Among(u"\u0902", -1, -1),
Among(u"\u0948", -1, -1)
]
a_2 = [
Among(u"\u0901", -1, 1),
Among(u"\u0902", -1, 1),
Among(u"\u0948", -1, 2)
]
a_3 = [
Among(u"\u0925\u093F\u090F", -1, 1),
Among(u"\u091B", -1, 1),
Among(u"\u0907\u091B", 1, 1),
Among(u"\u090F\u091B", 1, 1),
Among(u"\u093F\u091B", 1, 1),
Among(u"\u0947\u091B", 1, 1),
Among(u"\u0928\u0947\u091B", 5, 1),
Among(u"\u0939\u0941\u0928\u0947\u091B", 6, 1),
Among(u"\u0907\u0928\u094D\u091B", 1, 1),
Among(u"\u093F\u0928\u094D\u091B", 1, 1),
Among(u"\u0939\u0941\u0928\u094D\u091B", 1, 1),
Among(u"\u090F\u0915\u093E", -1, 1),
Among(u"\u0907\u090F\u0915\u093E", 11, 1),
Among(u"\u093F\u090F\u0915\u093E", 11, 1),
Among(u"\u0947\u0915\u093E", -1, 1),
Among(u"\u0928\u0947\u0915\u093E", 14, 1),
Among(u"\u0926\u093E", -1, 1),
Among(u"\u0907\u0926\u093E", 16, 1),
Among(u"\u093F\u0926\u093E", 16, 1),
Among(u"\u0926\u0947\u0916\u093F", -1, 1),
Among(u"\u092E\u093E\u0925\u093F", -1, 1),
Among(u"\u090F\u0915\u0940", -1, 1),
Among(u"\u0907\u090F\u0915\u0940", 21, 1),
Among(u"\u093F\u090F\u0915\u0940", 21, 1),
Among(u"\u0947\u0915\u0940", -1, 1),
Among(u"\u0926\u0947\u0916\u0940", -1, 1),
Among(u"\u0925\u0940", -1, 1),
Among(u"\u0926\u0940", -1, 1),
Among(u"\u091B\u0941", -1, 1),
Among(u"\u090F\u091B\u0941", 28, 1),
Among(u"\u0947\u091B\u0941", 28, 1),
Among(u"\u0928\u0947\u091B\u0941", 30, 1),
Among(u"\u0928\u0941", -1, 1),
Among(u"\u0939\u0930\u0941", -1, 1),
Among(u"\u0939\u0930\u0942", -1, 1),
Among(u"\u091B\u0947", -1, 1),
Among(u"\u0925\u0947", -1, 1),
Among(u"\u0928\u0947", -1, 1),
Among(u"\u090F\u0915\u0948", -1, 1),
Among(u"\u0947\u0915\u0948", -1, 1),
Among(u"\u0928\u0947\u0915\u0948", 39, 1),
Among(u"\u0926\u0948", -1, 1),
Among(u"\u0907\u0926\u0948", 41, 1),
Among(u"\u093F\u0926\u0948", 41, 1),
Among(u"\u090F\u0915\u094B", -1, 1),
Among(u"\u0907\u090F\u0915\u094B", 44, 1),
Among(u"\u093F\u090F\u0915\u094B", 44, 1),
Among(u"\u0947\u0915\u094B", -1, 1),
Among(u"\u0928\u0947\u0915\u094B", 47, 1),
Among(u"\u0926\u094B", -1, 1),
Among(u"\u0907\u0926\u094B", 49, 1),
Among(u"\u093F\u0926\u094B", 49, 1),
Among(u"\u092F\u094B", -1, 1),
Among(u"\u0907\u092F\u094B", 52, 1),
Among(u"\u092D\u092F\u094B", 52, 1),
Among(u"\u093F\u092F\u094B", 52, 1),
Among(u"\u0925\u093F\u092F\u094B", 55, 1),
Among(u"\u0926\u093F\u092F\u094B", 55, 1),
Among(u"\u0925\u094D\u092F\u094B", 52, 1),
Among(u"\u091B\u094C", -1, 1),
Among(u"\u0907\u091B\u094C", 59, 1),
Among(u"\u090F\u091B\u094C", 59, 1),
Among(u"\u093F\u091B\u094C", 59, 1),
Among(u"\u0947\u091B\u094C", 59, 1),
Among(u"\u0928\u0947\u091B\u094C", 63, 1),
Among(u"\u092F\u094C", -1, 1),
Among(u"\u0925\u093F\u092F\u094C", 65, 1),
Among(u"\u091B\u094D\u092F\u094C", 65, 1),
Among(u"\u0925\u094D\u092F\u094C", 65, 1),
Among(u"\u091B\u0928\u094D", -1, 1),
Among(u"\u0907\u091B\u0928\u094D", 69, 1),
Among(u"\u090F\u091B\u0928\u094D", 69, 1),
Among(u"\u093F\u091B\u0928\u094D", 69, 1),
Among(u"\u0947\u091B\u0928\u094D", 69, 1),
Among(u"\u0928\u0947\u091B\u0928\u094D", 73, 1),
Among(u"\u0932\u093E\u0928\u094D", -1, 1),
Among(u"\u091B\u093F\u0928\u094D", -1, 1),
Among(u"\u0925\u093F\u0928\u094D", -1, 1),
Among(u"\u092A\u0930\u094D", -1, 1),
Among(u"\u0907\u0938\u094D", -1, 1),
Among(u"\u0925\u093F\u0907\u0938\u094D", 79, 1),
Among(u"\u091B\u0938\u094D", -1, 1),
Among(u"\u0907\u091B\u0938\u094D", 81, 1),
Among(u"\u090F\u091B\u0938\u094D", 81, 1),
Among(u"\u093F\u091B\u0938\u094D", 81, 1),
Among(u"\u0947\u091B\u0938\u094D", 81, 1),
Among(u"\u0928\u0947\u091B\u0938\u094D", 85, 1),
Among(u"\u093F\u0938\u094D", -1, 1),
Among(u"\u0925\u093F\u0938\u094D", 87, 1),
Among(u"\u091B\u0947\u0938\u094D", -1, 1),
Among(u"\u0939\u094B\u0938\u094D", -1, 1)
]
def __r_remove_category_1(self):
# (, line 53
# [, line 54
self.ket = self.cursor
# substring, line 54
among_var = self.find_among_b(NepaliStemmer.a_0)
if among_var == 0:
return False
# ], line 54
self.bra = self.cursor
if among_var == 1:
# (, line 58
# delete, line 58
if not self.slice_del():
return False
elif among_var == 2:
# (, line 59
# or, line 59
try:
v_1 = self.limit - self.cursor
try:
# (, line 59
# or, line 59
try:
v_2 = self.limit - self.cursor
try:
# literal, line 59
if not self.eq_s_b(u"\u090F"):
raise lab3()
raise lab2()
except lab3: pass
self.cursor = self.limit - v_2
# literal, line 59
if not self.eq_s_b(u"\u0947"):
raise lab1()
except lab2: pass
# (, line 59
raise lab0()
except lab1: pass
self.cursor = self.limit - v_1
# delete, line 59
if not self.slice_del():
return False
except lab0: pass
return True
def __r_check_category_2(self):
# (, line 63
# [, line 64
self.ket = self.cursor
# substring, line 64
if self.find_among_b(NepaliStemmer.a_1) == 0:
return False
# ], line 64
self.bra = self.cursor
return True
def __r_remove_category_2(self):
# (, line 69
# [, line 70
self.ket = self.cursor
# substring, line 70
among_var = self.find_among_b(NepaliStemmer.a_2)
if among_var == 0:
return False
# ], line 70
self.bra = self.cursor
if among_var == 1:
# (, line 71
# or, line 71
try:
v_1 = self.limit - self.cursor
try:
# literal, line 71
if not self.eq_s_b(u"\u092F\u094C"):
raise lab1()
raise lab0()
except lab1: pass
self.cursor = self.limit - v_1
try:
# literal, line 71
if not self.eq_s_b(u"\u091B\u094C"):
raise lab2()
raise lab0()
except lab2: pass
self.cursor = self.limit - v_1
try:
# literal, line 71
if not self.eq_s_b(u"\u0928\u094C"):
raise lab3()
raise lab0()
except lab3: pass
self.cursor = self.limit - v_1
# literal, line 71
if not self.eq_s_b(u"\u0925\u0947"):
return False
except lab0: pass
# delete, line 71
if not self.slice_del():
return False
elif among_var == 2:
# (, line 72
# literal, line 72
if not self.eq_s_b(u"\u0924\u094D\u0930"):
return False
# delete, line 72
if not self.slice_del():
return False
return True
def __r_remove_category_3(self):
# (, line 76
# [, line 77
self.ket = self.cursor
# substring, line 77
if self.find_among_b(NepaliStemmer.a_3) == 0:
return False
# ], line 77
self.bra = self.cursor
# (, line 79
# delete, line 79
if not self.slice_del():
return False
return True
def _stem(self):
# (, line 85
# backwards, line 86
self.limit_backward = self.cursor
self.cursor = self.limit
# (, line 86
# do, line 87
v_1 = self.limit - self.cursor
try:
# call remove_category_1, line 87
if not self.__r_remove_category_1():
raise lab0()
except lab0: pass
self.cursor = self.limit - v_1
# do, line 88
v_2 = self.limit - self.cursor
try:
# (, line 88
# repeat, line 89
try:
while True:
try:
v_3 = self.limit - self.cursor
try:
# (, line 89
# do, line 89
v_4 = self.limit - self.cursor
try:
# (, line 89
# and, line 89
v_5 = self.limit - self.cursor
# call check_category_2, line 89
if not self.__r_check_category_2():
raise lab5()
self.cursor = self.limit - v_5
# call remove_category_2, line 89
if not self.__r_remove_category_2():
raise lab5()
except lab5: pass
self.cursor = self.limit - v_4
# call remove_category_3, line 89
if not self.__r_remove_category_3():
raise lab4()
raise lab3()
except lab4: pass
self.cursor = self.limit - v_3
raise lab2()
except lab3: pass
except lab2: pass
except lab1: pass
self.cursor = self.limit - v_2
self.cursor = self.limit_backward
return True
class lab0(BaseException): pass
class lab1(BaseException): pass
class lab2(BaseException): pass
class lab3(BaseException): pass
class lab4(BaseException): pass
class lab5(BaseException): pass
| 34.970845
| 75
| 0.469362
| 1,514
| 11,995
| 3.64465
| 0.099075
| 0.123958
| 0.135738
| 0.086988
| 0.694998
| 0.585357
| 0.376767
| 0.17597
| 0.10801
| 0.096412
| 0
| 0.239095
| 0.388412
| 11,995
| 342
| 76
| 35.073099
| 0.513086
| 0.085286
| 0
| 0.349206
| 1
| 0
| 0.202606
| 0.101854
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019841
| false
| 0.079365
| 0.007937
| 0
| 0.134921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
5de9426d377676b21fdbfe522c80d5ca38d85f47
| 7,000
|
bzl
|
Python
|
go/def.bzl
|
bobg/rules_go
|
fd11dd2768669dc2cc1f3a11f2b0b81d84e81c32
|
[
"Apache-2.0"
] | null | null | null |
go/def.bzl
|
bobg/rules_go
|
fd11dd2768669dc2cc1f3a11f2b0b81d84e81c32
|
[
"Apache-2.0"
] | 1
|
2022-02-18T15:47:32.000Z
|
2022-02-18T15:47:32.000Z
|
go/def.bzl
|
bobg/rules_go
|
fd11dd2768669dc2cc1f3a11f2b0b81d84e81c32
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Public definitions for Go rules.
All public Go rules, providers, and other definitions are imported and
re-exported in this file. This allows the real location of definitions
to change for easier maintenance.
Definitions outside this file are private unless otherwise noted, and
may change without notice.
"""
load(
"//go/private:context.bzl",
_go_context = "go_context",
)
load(
"//go/private:providers.bzl",
_GoArchive = "GoArchive",
_GoArchiveData = "GoArchiveData",
_GoLibrary = "GoLibrary",
_GoPath = "GoPath",
_GoSDK = "GoSDK",
_GoSource = "GoSource",
)
load(
"//go/private/rules:sdk.bzl",
_go_sdk = "go_sdk",
)
load(
"//go/private:go_toolchain.bzl",
_declare_toolchains = "declare_toolchains",
_go_toolchain = "go_toolchain",
)
load(
"//go/private/rules:wrappers.bzl",
_go_binary_macro = "go_binary_macro",
_go_library_macro = "go_library_macro",
_go_test_macro = "go_test_macro",
)
load(
"//go/private/rules:source.bzl",
_go_source = "go_source",
)
load(
"//extras:embed_data.bzl",
_go_embed_data = "go_embed_data",
)
load(
"//go/private/tools:path.bzl",
_go_path = "go_path",
)
load(
"//go/private/rules:library.bzl",
_go_tool_library = "go_tool_library",
)
load(
"//go/private/rules:nogo.bzl",
_nogo = "nogo_wrapper",
)
# TOOLS_NOGO is a list of all analysis passes in
# golang.org/x/tools/go/analysis/passes.
# This is not backward compatible, so use caution when depending on this --
# new analyses may discover issues in existing builds.
TOOLS_NOGO = [
"@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library",
"@org_golang_x_tools//go/analysis/passes/assign:go_default_library",
"@org_golang_x_tools//go/analysis/passes/atomic:go_default_library",
"@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library",
"@org_golang_x_tools//go/analysis/passes/bools:go_default_library",
"@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library",
"@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library",
# TODO(#2396): pass raw cgo sources to cgocall and re-enable.
# "@org_golang_x_tools//go/analysis/passes/cgocall:go_default_library",
"@org_golang_x_tools//go/analysis/passes/composite:go_default_library",
"@org_golang_x_tools//go/analysis/passes/copylock:go_default_library",
"@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library",
"@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library",
"@org_golang_x_tools//go/analysis/passes/errorsas:go_default_library",
"@org_golang_x_tools//go/analysis/passes/findcall:go_default_library",
"@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library",
"@org_golang_x_tools//go/analysis/passes/ifaceassert:go_default_library",
"@org_golang_x_tools//go/analysis/passes/inspect:go_default_library",
"@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library",
"@org_golang_x_tools//go/analysis/passes/lostcancel:go_default_library",
"@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library",
"@org_golang_x_tools//go/analysis/passes/nilness:go_default_library",
"@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library",
"@org_golang_x_tools//go/analysis/passes/printf:go_default_library",
"@org_golang_x_tools//go/analysis/passes/shadow:go_default_library",
"@org_golang_x_tools//go/analysis/passes/shift:go_default_library",
"@org_golang_x_tools//go/analysis/passes/sortslice:go_default_library",
"@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library",
"@org_golang_x_tools//go/analysis/passes/stringintconv:go_default_library",
"@org_golang_x_tools//go/analysis/passes/structtag:go_default_library",
"@org_golang_x_tools//go/analysis/passes/testinggoroutine:go_default_library",
"@org_golang_x_tools//go/analysis/passes/tests:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unusedresult:go_default_library",
]
# Current version or next version to be tagged. Gazelle and other tools may
# check this to determine compatibility.
RULES_GO_VERSION = "0.30.0"
declare_toolchains = _declare_toolchains
go_context = _go_context
go_embed_data = _go_embed_data
go_sdk = _go_sdk
go_tool_library = _go_tool_library
go_toolchain = _go_toolchain
nogo = _nogo
# See go/providers.rst#GoLibrary for full documentation.
GoLibrary = _GoLibrary
# See go/providers.rst#GoSource for full documentation.
GoSource = _GoSource
# See go/providers.rst#GoPath for full documentation.
GoPath = _GoPath
# See go/providers.rst#GoArchive for full documentation.
GoArchive = _GoArchive
# See go/providers.rst#GoArchiveData for full documentation.
GoArchiveData = _GoArchiveData
# See go/providers.rst#GoSDK for full documentation.
GoSDK = _GoSDK
# See docs/go/core/rules.md#go_library for full documentation.
go_library = _go_library_macro
# See docs/go/core/rules.md#go_binary for full documentation.
go_binary = _go_binary_macro
# See docs/go/core/rules.md#go_test for full documentation.
go_test = _go_test_macro
# See docs/go/core/rules.md#go_test for full documentation.
go_source = _go_source
# See docs/go/core/rules.md#go_path for full documentation.
go_path = _go_path
def go_vet_test(*args, **kwargs):
fail("The go_vet_test rule has been removed. Please migrate to nogo instead, which supports vet tests.")
def go_rule(**kwargs):
fail("The go_rule function has been removed. Use rule directly instead. See https://github.com/bazelbuild/rules_go/blob/master/go/toolchains.rst#writing-new-go-rules")
def go_rules_dependencies():
_moved("go_rules_dependencies")
def go_register_toolchains(**kwargs):
_moved("go_register_toolchains")
def go_download_sdk(**kwargs):
_moved("go_download_sdk")
def go_host_sdk(**kwargs):
_moved("go_host_sdk")
def go_local_sdk(**kwargs):
_moved("go_local_sdk")
def go_wrap_sdk(**kwargs):
_moved("go_wrap_sdK")
def _moved(name):
fail(name + " has moved. Please load from " +
" @io_bazel_rules_go//go:deps.bzl instead of def.bzl.")
| 37.037037
| 171
| 0.762143
| 1,024
| 7,000
| 4.892578
| 0.216797
| 0.103393
| 0.057485
| 0.11497
| 0.402196
| 0.376447
| 0.357285
| 0.336128
| 0.330739
| 0.330739
| 0
| 0.002604
| 0.122286
| 7,000
| 188
| 172
| 37.234043
| 0.812826
| 0.278429
| 0
| 0.081967
| 0
| 0.008197
| 0.641272
| 0.530094
| 0
| 0
| 0
| 0.005319
| 0.008197
| 1
| 0.07377
| false
| 0.278689
| 0
| 0
| 0.07377
| 0.008197
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
5dfbc6d76c2633ab81a042a9da06802874d69efe
| 2,986
|
py
|
Python
|
mushroom_rl/utils/plots/common_plots.py
|
PuzeLiu/mushroom-rl
|
99942b425e66b4ddcc26009d7105dde23841e95d
|
[
"MIT"
] | 344
|
2020-01-10T09:45:02.000Z
|
2022-03-30T09:48:28.000Z
|
mushroom_rl/utils/plots/common_plots.py
|
AmmarFahmy/mushroom-rl
|
2625ee7f64d5613b3b9fba00f0b7a39fece88ca5
|
[
"MIT"
] | 44
|
2020-01-23T03:00:56.000Z
|
2022-03-25T17:14:22.000Z
|
mushroom_rl/utils/plots/common_plots.py
|
AmmarFahmy/mushroom-rl
|
2625ee7f64d5613b3b9fba00f0b7a39fece88ca5
|
[
"MIT"
] | 93
|
2020-01-10T21:17:58.000Z
|
2022-03-31T17:58:52.000Z
|
from mushroom_rl.utils.plots import PlotItemBuffer, DataBuffer
from mushroom_rl.utils.plots.plot_item_buffer import PlotItemBufferLimited
class RewardPerStep(PlotItemBuffer):
"""
Class that represents a plot for the reward at every step.
"""
def __init__(self, plot_buffer):
"""
Constructor.
Args:
plot_buffer (DataBuffer): data buffer to be used.
"""
title = "Step_Reward"
curves_params = [dict(data_buffer=plot_buffer)]
super().__init__(title, curves_params)
class RewardPerEpisode(PlotItemBuffer):
"""
Class that represents a plot for the accumulated reward per episode.
"""
def __init__(self, plot_buffer):
"""
Constructor.
Args:
plot_buffer (DataBuffer): data buffer to be used.
"""
title = "Episode_Reward"
curves_params = [dict(data_buffer=plot_buffer)]
super().__init__(title, curves_params)
class Actions(PlotItemBufferLimited):
"""
Class that represents a plot for the actions.
"""
def __init__(self, plot_buffers, maxs=None, mins=None):
"""
Constructor.
Args:
plot_buffer (DataBuffer): data buffer to be used;
maxs(list, None): list of max values of each data buffer plotted.
If an element is None, no max line is drawn;
mins(list, None): list of min values of each data buffer plotted.
If an element is None, no min line is drawn.
"""
title = "Actions"
super().__init__(title, plot_buffers, maxs=maxs, mins=mins)
class Observations(PlotItemBufferLimited):
"""
Class that represents a plot for the observations.
"""
def __init__(self, plot_buffers, maxs=None, mins=None, dotted_limits=None):
"""
Constructor.
Args:
plot_buffer (DataBuffer): data buffer to be used;
maxs(list, None): list of max values of each data buffer plotted.
If an element is None, no max line is drawn;
mins(list, None): list of min values of each data buffer plotted.
If an element is None, no min line is drawn.
dotted_limits (list, None): list of booleans. If True, the
corresponding limit is dotted; otherwise, it is printed as a
solid line.
"""
title = "Observations"
super().__init__(title, plot_buffers, maxs=maxs, mins=mins,
dotted_limits=dotted_limits)
class LenOfEpisodeTraining(PlotItemBuffer):
"""
Class that represents a plot for the length of the episode.
"""
def __init__(self, plot_buffer):
"""
Constructor.
Args:
plot_buffer (DataBuffer): data buffer to be used;
"""
title = "Len of Episode"
plot_params = [dict(data_buffer=plot_buffer)]
super().__init__(title, plot_params)
| 28.990291
| 79
| 0.609846
| 350
| 2,986
| 4.991429
| 0.214286
| 0.068689
| 0.054379
| 0.057241
| 0.757298
| 0.729823
| 0.729823
| 0.729823
| 0.595879
| 0.482542
| 0
| 0
| 0.304421
| 2,986
| 102
| 80
| 29.27451
| 0.841117
| 0.434695
| 0
| 0.269231
| 0
| 0
| 0.043973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192308
| false
| 0
| 0.076923
| 0
| 0.461538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
b909e91c70f62d03b4cb515c5e970eae1b71dc91
| 585
|
py
|
Python
|
pycfmodel/model/resources/properties/policy.py
|
donatoaz/pycfmodel
|
1586e290b67d2347493dd4a77d2b0c8ee6c0936b
|
[
"Apache-2.0"
] | 23
|
2018-06-28T10:45:01.000Z
|
2021-05-07T11:12:39.000Z
|
pycfmodel/model/resources/properties/policy.py
|
donatoaz/pycfmodel
|
1586e290b67d2347493dd4a77d2b0c8ee6c0936b
|
[
"Apache-2.0"
] | 27
|
2019-03-09T08:33:22.000Z
|
2022-03-03T14:59:11.000Z
|
pycfmodel/model/resources/properties/policy.py
|
donatoaz/pycfmodel
|
1586e290b67d2347493dd4a77d2b0c8ee6c0936b
|
[
"Apache-2.0"
] | 7
|
2019-03-09T02:18:18.000Z
|
2021-07-22T20:33:09.000Z
|
from pycfmodel.model.resources.properties.policy_document import PolicyDocument
from pycfmodel.model.resources.properties.property import Property
from pycfmodel.model.types import Resolvable, ResolvableStr
class Policy(Property):
"""
Contains information about an attached policy.
Properties:
- PolicyDocument: A [policy document][pycfmodel.model.resources.properties.policy_document.PolicyDocument] object.
- PolicyName: The friendly name (not ARN) identifying the policy.
"""
PolicyName: ResolvableStr
PolicyDocument: Resolvable[PolicyDocument]
| 32.5
| 118
| 0.788034
| 60
| 585
| 7.65
| 0.466667
| 0.122004
| 0.117647
| 0.215686
| 0.294118
| 0.204793
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 585
| 17
| 119
| 34.411765
| 0.910714
| 0.411966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
f8f18448ffd7145536a1fb795da3a1417cfe03f4
| 255
|
py
|
Python
|
lm5/input.py
|
jmcph4/lm5
|
cd6f480ad70a3769090eab6ac3f3d47378a965de
|
[
"MIT"
] | 4
|
2018-09-25T07:29:05.000Z
|
2022-02-22T01:56:36.000Z
|
lm5/input.py
|
jmcph4/lm5
|
cd6f480ad70a3769090eab6ac3f3d47378a965de
|
[
"MIT"
] | null | null | null |
lm5/input.py
|
jmcph4/lm5
|
cd6f480ad70a3769090eab6ac3f3d47378a965de
|
[
"MIT"
] | 2
|
2018-09-25T07:29:06.000Z
|
2021-12-30T07:19:13.000Z
|
class Input(object):
def __init__(self, type, data):
self.__type = type
self.__data = deepcopy(data)
def __repr__(self):
return repr(self.__data)
def __str__(self):
return str(self.__type) + str(self.__data)
| 21.25
| 50
| 0.611765
| 32
| 255
| 4.1875
| 0.375
| 0.179104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.270588
| 255
| 11
| 51
| 23.181818
| 0.72043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
f8f3fc84d2eec11a3d1fe8de179b44f825aeb0e4
| 419
|
py
|
Python
|
BanditSim/__init__.py
|
AJB0211/BanditSim
|
5426486b40c35492049b09f9b57eb18ad5d6ce63
|
[
"MIT"
] | null | null | null |
BanditSim/__init__.py
|
AJB0211/BanditSim
|
5426486b40c35492049b09f9b57eb18ad5d6ce63
|
[
"MIT"
] | null | null | null |
BanditSim/__init__.py
|
AJB0211/BanditSim
|
5426486b40c35492049b09f9b57eb18ad5d6ce63
|
[
"MIT"
] | null | null | null |
from .multiarmedbandit import MultiArmedBandit
from .eps_greedy_constant_stepsize import EpsilonGreedyConstantStepsize
from .greedy_constant_stepsize import GreedyConstantStepsize
from .epsilon_greedy_average_step import EpsilonGreedyAverageStep
from .greedy_average_step import GreedyAverageStep
from .greedy_bayes_update import GreedyBayesianUpdate
from .eps_greedy_bayes_update import EpsilonGreedyBayesianUpdate
| 38.090909
| 71
| 0.909308
| 43
| 419
| 8.511628
| 0.418605
| 0.081967
| 0.071038
| 0.153005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073986
| 419
| 10
| 72
| 41.9
| 0.943299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
f8f8dae5df2040a52619a0f46630de1b8ffbe445
| 533
|
py
|
Python
|
heat/initial_data.py
|
kjetil-lye/ismo_heat
|
09776b740a0543e270417af653d2a047c94f1b50
|
[
"MIT"
] | null | null | null |
heat/initial_data.py
|
kjetil-lye/ismo_heat
|
09776b740a0543e270417af653d2a047c94f1b50
|
[
"MIT"
] | 6
|
2020-11-13T19:04:16.000Z
|
2022-02-10T02:10:50.000Z
|
heat/initial_data.py
|
kjetil-lye/ismo_heat
|
09776b740a0543e270417af653d2a047c94f1b50
|
[
"MIT"
] | 1
|
2021-03-26T06:53:19.000Z
|
2021-03-26T06:53:19.000Z
|
import numpy
class InitialDataControlSine:
def __init__(self, coefficients):
self.coefficients = coefficients
def __call__(self, x):
u = numpy.zeros_like(x)
for k, coefficient in enumerate(self.coefficients):
u += coefficient * numpy.sin(k * numpy.pi * x)
return u
def exact_solution(self, x, t, q=1):
return sum(coefficient * numpy.exp(-q * (k * numpy.pi) ** 2 * t) * numpy.sin(
k * numpy.pi * x) for k, coefficient in enumerate(self.coefficients))
| 28.052632
| 85
| 0.613508
| 69
| 533
| 4.594203
| 0.42029
| 0.201893
| 0.07571
| 0.100946
| 0.375394
| 0.375394
| 0.271293
| 0.271293
| 0
| 0
| 0
| 0.005141
| 0.270169
| 533
| 18
| 86
| 29.611111
| 0.809769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.083333
| 0.083333
| 0.583333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
5d0889f3f4e69f4a0ebb469755c280704d4811e2
| 326
|
py
|
Python
|
test_modules/language_dictionary_test.py
|
1goodday/Google-Dictionary-Pronunciation.ankiaddon
|
35837802e41d81733aec656fbf4ad1c8e4aeec5e
|
[
"MIT"
] | 1
|
2021-10-02T13:16:29.000Z
|
2021-10-02T13:16:29.000Z
|
test_modules/language_dictionary_test.py
|
1goodday/Google-Dictionary-Pronunciation.ankiaddon
|
35837802e41d81733aec656fbf4ad1c8e4aeec5e
|
[
"MIT"
] | 2
|
2021-09-08T14:08:33.000Z
|
2021-10-10T04:35:08.000Z
|
test_modules/language_dictionary_test.py
|
1goodday/Google-Dictionary.ankiaddon
|
35837802e41d81733aec656fbf4ad1c8e4aeec5e
|
[
"MIT"
] | null | null | null |
import csv
_iso_639_1_codes_file = open("files/ISO-639-1_Codes.csv", mode='r')
_iso_639_1_codes_dictreader = csv.DictReader(_iso_639_1_codes_file)
_iso_639_1_codes_dict: dict = {}
for _row in _iso_639_1_codes_dictreader:
_iso_639_1_codes_dict[_row['ISO-639-1 Code']] = _row['Language']
print(str(_iso_639_1_codes_dict))
| 32.6
| 68
| 0.794479
| 61
| 326
| 3.606557
| 0.327869
| 0.245455
| 0.286364
| 0.436364
| 0.609091
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120805
| 0.08589
| 326
| 10
| 69
| 32.6
| 0.61745
| 0
| 0
| 0
| 0
| 0
| 0.146789
| 0.076453
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.142857
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5d5caf5c5d1415de34379e45359c322cac37e6ff
| 2,766
|
py
|
Python
|
lib/adv_model.py
|
chawins/entangle-rep
|
3e9e0d6e7536b0de0e35d7f8717f2ccc8e887759
|
[
"MIT"
] | 15
|
2019-06-30T12:30:17.000Z
|
2021-12-07T20:20:36.000Z
|
lib/adv_model.py
|
chawins/entangle-rep
|
3e9e0d6e7536b0de0e35d7f8717f2ccc8e887759
|
[
"MIT"
] | 2
|
2020-06-11T10:10:52.000Z
|
2021-12-21T08:50:33.000Z
|
lib/adv_model.py
|
chawins/entangle-rep
|
3e9e0d6e7536b0de0e35d7f8717f2ccc8e887759
|
[
"MIT"
] | 9
|
2019-07-09T14:52:30.000Z
|
2020-10-27T19:18:34.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class PGDModel(nn.Module):
"""
code adapted from
https://github.com/karandwivedi42/adversarial/blob/master/main.py
"""
def __init__(self, basic_net, config):
super(PGDModel, self).__init__()
self.basic_net = basic_net
self.rand = config['random_start']
self.step_size = config['step_size']
self.epsilon = config['epsilon']
self.num_steps = config['num_steps']
assert config['loss_func'] == 'xent', 'Only xent supported for now.'
def forward(self, inputs, targets, attack=False):
if not attack:
return self.basic_net(inputs)
x = inputs.clone()
if self.rand:
x = x + torch.zeros_like(x).uniform_(-self.epsilon, self.epsilon)
for _ in range(self.num_steps):
x.requires_grad_()
with torch.enable_grad():
logits = self.basic_net(x)
loss = F.cross_entropy(logits, targets, reduction='sum')
grad = torch.autograd.grad(loss, x)[0]
x = x.detach() + self.step_size * torch.sign(grad.detach())
x = torch.min(torch.max(x, inputs.detach() - self.epsilon),
inputs.detach() + self.epsilon)
x = torch.clamp(x, 0, 1)
return self.basic_net(x)
class PGDL2Model(nn.Module):
"""
code adapted from
https://github.com/karandwivedi42/adversarial/blob/master/main.py
"""
def __init__(self, basic_net, config):
super(PGDL2Model, self).__init__()
self.basic_net = basic_net
self.epsilon = config['epsilon']
self.rand = config['random_start']
self.step_size = config['step_size']
self.num_steps = config['num_steps']
assert config['loss_func'] == 'xent', 'Only xent supported for now.'
def forward(self, inputs, targets, attack=False):
if not attack:
return self.basic_net(inputs)
x = inputs.clone()
if self.rand:
x = x + torch.zeros_like(x).normal_(0, self.step_size)
for _ in range(self.num_steps):
x.requires_grad_()
with torch.enable_grad():
logits = self.basic_net(x)
loss = F.cross_entropy(logits, targets, reduction='sum')
grad = torch.autograd.grad(loss, x)[0].detach()
grad_norm = grad.view(x.size(0), -1).norm(2, 1)
delta = self.step_size * grad / grad_norm.view(x.size(0), 1, 1, 1)
x = x.detach() + delta
diff = (x - inputs).view(x.size(0), -1).renorm(2, 0, self.epsilon)
x = diff.view(x.size()) + inputs
x.clamp_(0, 1)
return self.basic_net(x)
| 35.012658
| 78
| 0.578453
| 360
| 2,766
| 4.272222
| 0.233333
| 0.062419
| 0.078023
| 0.041612
| 0.748375
| 0.70091
| 0.70091
| 0.673602
| 0.63459
| 0.63459
| 0
| 0.012697
| 0.288142
| 2,766
| 78
| 79
| 35.461538
| 0.76841
| 0.060376
| 0
| 0.631579
| 0
| 0
| 0.063281
| 0
| 0
| 0
| 0
| 0
| 0.035088
| 1
| 0.070175
| false
| 0
| 0.052632
| 0
| 0.22807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5d61828a9a51cb5ce1865c213ffd2c5903a688a4
| 47
|
py
|
Python
|
prereise/gather/solardata/tests/__init__.py
|
terrywqf/PreREISE
|
f8052dd37091eaa15024725d5c92a3ef0ee311ee
|
[
"MIT"
] | null | null | null |
prereise/gather/solardata/tests/__init__.py
|
terrywqf/PreREISE
|
f8052dd37091eaa15024725d5c92a3ef0ee311ee
|
[
"MIT"
] | null | null | null |
prereise/gather/solardata/tests/__init__.py
|
terrywqf/PreREISE
|
f8052dd37091eaa15024725d5c92a3ef0ee311ee
|
[
"MIT"
] | null | null | null |
__all__ = ["mock_pv_info", "test_pv_tracking"]
| 23.5
| 46
| 0.744681
| 7
| 47
| 3.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.627907
| 0
| 0
| 0
| 0
| 0
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5d65f544314984a2fee60579b78ec312b1835ccc
| 491
|
py
|
Python
|
tests/moz_library/rental_books_test.py
|
mozkzki/moz-library
|
fb925414405a9fcba8bb7194cf983ba18c920e2f
|
[
"MIT"
] | null | null | null |
tests/moz_library/rental_books_test.py
|
mozkzki/moz-library
|
fb925414405a9fcba8bb7194cf983ba18c920e2f
|
[
"MIT"
] | 35
|
2021-10-09T13:08:33.000Z
|
2022-03-29T14:26:59.000Z
|
tests/moz_library/rental_books_test.py
|
mozkzki/moz-library
|
fb925414405a9fcba8bb7194cf983ba18c920e2f
|
[
"MIT"
] | null | null | null |
import pytest
from moz_library.rental_books import RentalBooks
class TestRentalBooks:
@pytest.fixture()
def books1(self):
return RentalBooks()
def test_can_extend_period_1(self, books1):
assert books1._can_extend_period("延長できません") is False
def test_can_extend_period_2(self, books1):
assert books1._can_extend_period("すでに延長されています") is False
def test_can_extend_period_3(self, books1):
assert books1._can_extend_period("それ以外") is True
| 27.277778
| 64
| 0.737271
| 66
| 491
| 5.136364
| 0.439394
| 0.159292
| 0.265487
| 0.141593
| 0.563422
| 0.498525
| 0.498525
| 0
| 0
| 0
| 0
| 0.025
| 0.185336
| 491
| 17
| 65
| 28.882353
| 0.8225
| 0
| 0
| 0
| 0
| 0
| 0.044807
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.083333
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
5d7378673807f7e0283f1553a575bc82a4166826
| 390
|
py
|
Python
|
utilities.py
|
armandok/pySLAM-D
|
ef7398806e021885b29702adf55acbedaf544ce6
|
[
"MIT"
] | 10
|
2020-12-24T16:40:46.000Z
|
2022-02-01T18:09:13.000Z
|
utilities.py
|
armandok/pySLAM-D
|
ef7398806e021885b29702adf55acbedaf544ce6
|
[
"MIT"
] | null | null | null |
utilities.py
|
armandok/pySLAM-D
|
ef7398806e021885b29702adf55acbedaf544ce6
|
[
"MIT"
] | null | null | null |
import numpy as np
def rot_to_angle(rot):
return np.arccos(0.5*np.trace(rot)-0.5)
def rot_to_heading(rot):
# This function calculates the heading angle of the rot matrix w.r.t. the y-axis
new_rot = rot[0:3:2, 0:3:2] # remove the mid row and column corresponding to the y-axis
new_rot = new_rot/np.linalg.det(new_rot)
return np.arctan2(new_rot[1, 0], new_rot[0, 0])
| 30
| 92
| 0.694872
| 80
| 390
| 3.2625
| 0.475
| 0.137931
| 0.061303
| 0.084291
| 0.10728
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04717
| 0.184615
| 390
| 12
| 93
| 32.5
| 0.773585
| 0.348718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
5d75bb550217d28f2cb95a0798b2a193f98c5dc4
| 190
|
py
|
Python
|
publication-erdf/flask_service.py
|
ticapix/automated-tasks
|
a0c73ad2939c6f1a2d91aea6fd309b5005455191
|
[
"Unlicense"
] | null | null | null |
publication-erdf/flask_service.py
|
ticapix/automated-tasks
|
a0c73ad2939c6f1a2d91aea6fd309b5005455191
|
[
"Unlicense"
] | null | null | null |
publication-erdf/flask_service.py
|
ticapix/automated-tasks
|
a0c73ad2939c6f1a2d91aea6fd309b5005455191
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
from flask import Flask
app = Flask(__name__)
@app.route('/process-email')
def process_email():
return "Hello World!"
if __name__ == "__main__":
app.run()
| 14.615385
| 28
| 0.673684
| 26
| 190
| 4.423077
| 0.730769
| 0.208696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.168421
| 190
| 12
| 29
| 15.833333
| 0.721519
| 0.110526
| 0
| 0
| 0
| 0
| 0.202381
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 0.428571
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 3
|
53713acb71d2f50fa7d7472d8e125a179f1d5d33
| 417
|
py
|
Python
|
backend/links/sentence.py
|
dla1635/hyLink
|
8f3d1b6b0cad57ce2f6861583eb2b523f9fceee7
|
[
"MIT"
] | 1
|
2020-07-17T05:57:47.000Z
|
2020-07-17T05:57:47.000Z
|
backend/links/sentence.py
|
dla1635/hyLink
|
8f3d1b6b0cad57ce2f6861583eb2b523f9fceee7
|
[
"MIT"
] | 11
|
2020-06-06T00:30:23.000Z
|
2022-02-26T19:59:06.000Z
|
backend/links/sentence.py
|
dla1635/hylink
|
8f3d1b6b0cad57ce2f6861583eb2b523f9fceee7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from collections import Counter
from konlpy.tag import Okt
class Sentence(object):
okt = Okt()
def __init__(self, text, index=0):
self.index = index
self.text = text.strip()
self.tokens = self.okt.phrases(self.text)
self.bow = Counter(self.tokens)
def __str__(self):
return self.text
def __hash__(self):
return self.index
| 18.954545
| 49
| 0.611511
| 54
| 417
| 4.5
| 0.481481
| 0.131687
| 0.115226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006579
| 0.270983
| 417
| 21
| 50
| 19.857143
| 0.792763
| 0.05036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| false
| 0
| 0.153846
| 0.153846
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
53898a41d0b3979d97ed59d9bf3e85e1664af2da
| 103
|
py
|
Python
|
programacao basica/7.py
|
m-brito/Neps-Academy
|
0d962fb921d74c5f97f10fcdd8a0f464c0ccdb14
|
[
"MIT"
] | null | null | null |
programacao basica/7.py
|
m-brito/Neps-Academy
|
0d962fb921d74c5f97f10fcdd8a0f464c0ccdb14
|
[
"MIT"
] | null | null | null |
programacao basica/7.py
|
m-brito/Neps-Academy
|
0d962fb921d74c5f97f10fcdd8a0f464c0ccdb14
|
[
"MIT"
] | null | null | null |
bino = int(input())
cino = int(input())
if (bino+cino)%2==0:
print("Bino")
else:
print("Cino")
| 14.714286
| 20
| 0.563107
| 16
| 103
| 3.625
| 0.5625
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.184466
| 103
| 6
| 21
| 17.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.07767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
539b64bd9ed2668ae9a573fa432b5a05793c8032
| 109
|
py
|
Python
|
test/run/t344.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t344.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t344.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
for ch in "Hello world!":
d = ord(ch)
h = hex(d)
o = oct(d)
b = bin(d)
print ch, d, h, o, b
| 12.111111
| 25
| 0.449541
| 23
| 109
| 2.130435
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.366972
| 109
| 8
| 26
| 13.625
| 0.710145
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.166667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
53c5781ea07cd092d5d5320da909512506460ef4
| 184
|
py
|
Python
|
python/helpers.py
|
cdacos/astrophysics_with_a_pc
|
b0017856005a4771fbd89c8137fb320b72b1b633
|
[
"FSFAP"
] | null | null | null |
python/helpers.py
|
cdacos/astrophysics_with_a_pc
|
b0017856005a4771fbd89c8137fb320b72b1b633
|
[
"FSFAP"
] | null | null | null |
python/helpers.py
|
cdacos/astrophysics_with_a_pc
|
b0017856005a4771fbd89c8137fb320b72b1b633
|
[
"FSFAP"
] | 1
|
2021-03-14T23:13:28.000Z
|
2021-03-14T23:13:28.000Z
|
import sys
def start_parameter(text, i):
if len(sys.argv) > i:
print('{0}{1}'.format(text, sys.argv[i]))
return float(sys.argv[i])
else:
return float(raw_input(text))
| 20.444444
| 45
| 0.63587
| 31
| 184
| 3.709677
| 0.612903
| 0.182609
| 0.208696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013245
| 0.179348
| 184
| 8
| 46
| 23
| 0.748344
| 0
| 0
| 0
| 0
| 0
| 0.032609
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.571429
| 0.142857
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
53e86b46c3285488d7ebc41a01e6a577e706cb66
| 693
|
py
|
Python
|
associations/migrations/0001_initial.py
|
ollc-code/django-back
|
205f3adc61f9e62c88dfcc170999cef495cebed7
|
[
"MIT"
] | null | null | null |
associations/migrations/0001_initial.py
|
ollc-code/django-back
|
205f3adc61f9e62c88dfcc170999cef495cebed7
|
[
"MIT"
] | null | null | null |
associations/migrations/0001_initial.py
|
ollc-code/django-back
|
205f3adc61f9e62c88dfcc170999cef495cebed7
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-11-09 08:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Associations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('association_name', models.CharField(max_length=100)),
('incharge', models.CharField(max_length=100)),
('about', models.CharField(max_length=500)),
('contacts', models.CharField(max_length=300)),
],
),
]
| 27.72
| 114
| 0.580087
| 70
| 693
| 5.628571
| 0.642857
| 0.152284
| 0.182741
| 0.243655
| 0.137056
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05499
| 0.291486
| 693
| 24
| 115
| 28.875
| 0.747454
| 0.064935
| 0
| 0
| 1
| 0
| 0.082043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
53e96f34f945ecef4aebd95bbb66a14049ee97c2
| 4,631
|
py
|
Python
|
tests/pds/test_times.py
|
seignovert/pyvims
|
a70b5b9b8bc5c37fa43b7db4d15407f312a31849
|
[
"BSD-3-Clause"
] | 4
|
2019-09-16T15:50:22.000Z
|
2021-04-08T15:32:48.000Z
|
tests/pds/test_times.py
|
seignovert/pyvims
|
a70b5b9b8bc5c37fa43b7db4d15407f312a31849
|
[
"BSD-3-Clause"
] | 3
|
2018-05-04T09:28:24.000Z
|
2018-12-03T09:00:31.000Z
|
tests/pds/test_times.py
|
seignovert/pyvims
|
a70b5b9b8bc5c37fa43b7db4d15407f312a31849
|
[
"BSD-3-Clause"
] | 1
|
2020-10-12T15:14:17.000Z
|
2020-10-12T15:14:17.000Z
|
"""Test PDS times modules."""
from datetime import datetime as dt
from pyvims.pds.times import (cassini2utc, cassini_time, dt_date, dt_doy, dt_iso,
dyear, pds_folder, pds_time, utc2cassini)
from pytest import approx, raises
def test_dt_iso():
"""Test parsing ISO time pattern."""
assert str(dt_iso('2005-02-14T18:02:29.123')) == '2005-02-14 18:02:29.123000+00:00'
assert str(dt_iso('2005-02-14 18:02:29')) == '2005-02-14 18:02:29+00:00'
assert str(dt_iso('2005-02-14:18:02')) == '2005-02-14 18:02:00+00:00'
assert str(dt_iso('2005-02-14')) == '2005-02-14 00:00:00+00:00'
times = dt_iso('from 2005-02-14T18:02:29 to 2005-02-14T18:03')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 18:02:29+00:00'
assert str(times[1]) == '2005-02-14 18:03:00+00:00'
with raises(ValueError):
_ = dt_iso('2005-045')
def test_dt_doy():
"""Test parsing DOY time pattern."""
assert str(dt_doy('2005-045T18:02:29.123')) == '2005-02-14 18:02:29.123000+00:00'
assert str(dt_doy('2005-045 18:02:29')) == '2005-02-14 18:02:29+00:00'
assert str(dt_doy('2005-045:18:02')) == '2005-02-14 18:02:00+00:00'
assert str(dt_doy('2005-045')) == '2005-02-14 00:00:00+00:00'
times = dt_doy('from 2005-045T18:02:29 to 2005-045T18:03')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 18:02:29+00:00'
assert str(times[1]) == '2005-02-14 18:03:00+00:00'
with raises(ValueError):
_ = dt_doy('2005-02-14')
def test_dt_date():
"""Test date pattern."""
assert str(dt_date('Feb 14, 2005')) == '2005-02-14 00:00:00+00:00'
assert str(dt_date('Febr 14, 2005')) == '2005-02-14 00:00:00+00:00'
assert str(dt_date('Feb 14, 2005', eod=True)) == '2005-02-14 23:59:59+00:00'
assert str(dt_date('to Feb 14, 2005')) == '2005-02-14 23:59:59+00:00'
times = dt_date('from Feb 14, 2005 through March 12, 2006')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 00:00:00+00:00'
assert str(times[1]) == '2006-03-12 23:59:59+00:00'
with raises(ValueError):
_ = dt_date('2005-02-14')
def test_pds_time():
"""Test PDS time parsing."""
assert str(pds_time('May 17, 2007')) == '2007-05-17 00:00:00+00:00'
assert str(pds_time('2010-274T00:00:00')) == '2010-10-01 00:00:00+00:00'
assert str(pds_time('2011-10-01T00:02:04.244')) == '2011-10-01 00:02:04.244000+00:00'
t0, t1 = pds_time('… May 17, 2007 through Jun 30, 2007')
assert str(t0) == '2007-05-17 00:00:00+00:00'
assert str(t1) == '2007-06-30 23:59:59+00:00'
t0, t1 = pds_time('… 2010-274T00:00:00 through 2010-365T23:59:59')
assert str(t0) == '2010-10-01 00:00:00+00:00'
assert str(t1) == '2010-12-31 23:59:59+00:00'
t0, t1 = pds_time('… 2011-10-01T00:02:04.244 through 2011-12-31T12:28:45.128')
assert str(t0) == '2011-10-01 00:02:04.244000+00:00'
assert str(t1) == '2011-12-31 12:28:45.128000+00:00'
t0, t1 = pds_time('2005015T175855_2005016T184233/')
assert str(t0) == '2005-01-15 17:58:55+00:00'
assert str(t1) == '2005-01-16 18:42:33+00:00'
with raises(ValueError):
_ = pds_time('No data available')
def test_cassini_time():
"""Test Cassini time parsing."""
assert cassini_time('v1487096932_1.qub') == 1487096932.0
assert cassini_time(1483230358.172) == 1483230358.172
with raises(ValueError):
_ = cassini_time('v123_1')
with raises(ValueError):
_ = cassini_time(123)
def test_cassini2utc():
"""Test Cassini time to UTC converter."""
assert str(cassini2utc('v1487096932_1')) == '2005-02-14 18:02:29'
assert str(cassini2utc(1483230358.172)) == '2005-01-01 00:00:00'
def test_utc2cassini():
"""Test UTC to Cassini time converter."""
assert utc2cassini('2005-02-14T18:02:29') == approx(1487096932.068, abs=1e-3)
times = utc2cassini('May 17, 2007 through Jun 30, 2007')
assert len(times) == 2
assert times[0] == approx(1558053238.602, abs=1e-3)
assert times[1] == approx(1561941262.879, abs=1e-3)
def test_pds_folder():
"""Test convert PDS folder as string."""
assert pds_folder('2005015T175855') == '2005-015T17:58:55'
assert pds_folder('2005015T175855_2005016T184233/') == \
'2005-015T17:58:55 2005-016T18:42:33'
def test_dyear():
"""Test decimal year."""
assert dyear('2005-01-01') == 2005.0
assert dyear('2005-12-31') == 2005.9973
assert dyear('2004-12-31') == 2004.9973
assert dyear(dt(2005, 1, 1)) == 2005.0
assert dyear(dt(2005, 12, 31)) == 2005.9973
assert dyear(dt(2004, 12, 31)) == 2004.9973
| 34.559701
| 89
| 0.628374
| 809
| 4,631
| 3.529048
| 0.144623
| 0.089667
| 0.06725
| 0.081961
| 0.527145
| 0.432224
| 0.382837
| 0.349562
| 0.310333
| 0.284764
| 0
| 0.335618
| 0.180954
| 4,631
| 133
| 90
| 34.819549
| 0.414711
| 0.060462
| 0
| 0.170732
| 0
| 0.012195
| 0.367442
| 0.064651
| 0
| 0
| 0
| 0
| 0.585366
| 1
| 0.109756
| false
| 0
| 0.036585
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
990280dc9a383a0a37cbb821de57615b46aa6a23
| 401
|
py
|
Python
|
April/Apr_25_2019/builder.py
|
while1618/DailyCodingProblem
|
187909f78281828da543439646cdf52d64c2bd0c
|
[
"MIT"
] | 1
|
2019-11-17T10:56:28.000Z
|
2019-11-17T10:56:28.000Z
|
April/Apr_25_2019/builder.py
|
while1618/DailyCodingProblem
|
187909f78281828da543439646cdf52d64c2bd0c
|
[
"MIT"
] | null | null | null |
April/Apr_25_2019/builder.py
|
while1618/DailyCodingProblem
|
187909f78281828da543439646cdf52d64c2bd0c
|
[
"MIT"
] | 1
|
2021-11-02T01:00:37.000Z
|
2021-11-02T01:00:37.000Z
|
# This problem was asked by Facebook.
#
# A builder is looking to build a row of N houses that can be of K different colors.
# He has a goal of minimizing cost while ensuring that no two neighboring houses are of the same color.
#
# Given an N by K matrix where the nth row and kth column represents the cost to build the nth house with kth color,
# return the minimum cost which achieves this goal.
| 44.555556
| 116
| 0.763092
| 75
| 401
| 4.08
| 0.68
| 0.045752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206983
| 401
| 8
| 117
| 50.125
| 0.962264
| 0.9601
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
990aa6cbf16ed34f5030609c03ab43c0f0ed8c2a
| 674
|
py
|
Python
|
data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py
|
harshp8l/deep-learning-lang-detection
|
2a54293181c1c2b1a2b840ddee4d4d80177efb33
|
[
"MIT"
] | 84
|
2017-10-25T15:49:21.000Z
|
2021-11-28T21:25:54.000Z
|
data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 5
|
2018-03-29T11:50:46.000Z
|
2021-04-26T13:33:18.000Z
|
data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 24
|
2017-11-22T08:31:00.000Z
|
2022-03-27T01:22:31.000Z
|
from django.conf.urls.defaults import *
urlpatterns = patterns('pytorque.views',
(r'^$', 'central_dispatch_view'),
(r'^browse$', 'central_dispatch_view'),
(r'^monitor$', 'central_dispatch_view'),
(r'^submit$', 'central_dispatch_view'),
(r'^stat$', 'central_dispatch_view'),
(r'^login/$', 'login'),
(r'^logout/$', 'logout'),
# (r'^$', 'central_dispatch_view'),
(r'^user/(?P<username>\w{0,50})/$', 'index'),
(r'^user/(?P<username>\w{0,50})/browse$', 'browse'),
# (r'^user/(?P<username>\w{0,50})/monitor', 'monitor'),
# (r'^user/(?P<username>\w{0,50})/submit', 'submit'),
# (r'^user/(?P<username>\w{0,50})/stat', 'stat'),
)
| 33.7
| 58
| 0.569733
| 88
| 674
| 4.227273
| 0.306818
| 0.241935
| 0.306452
| 0.322581
| 0.352151
| 0.241935
| 0.241935
| 0
| 0
| 0
| 0
| 0.025729
| 0.135015
| 674
| 20
| 59
| 33.7
| 0.61235
| 0.290801
| 0
| 0
| 0
| 0
| 0.545648
| 0.363057
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.