hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
29b5b93fcc93149c869189a925d3bab4277eed76
| 748
|
py
|
Python
|
googledevices/cli/commands/info.py
|
vlebourl/googledevices
|
5d8604ad48d94170127d1da9f01106a4d3bc4845
|
[
"MIT"
] | 19
|
2018-11-24T03:09:59.000Z
|
2021-02-11T09:20:11.000Z
|
googledevices/cli/commands/info.py
|
vlebourl/googledevices
|
5d8604ad48d94170127d1da9f01106a4d3bc4845
|
[
"MIT"
] | 13
|
2018-11-24T13:16:38.000Z
|
2022-02-22T17:27:08.000Z
|
googledevices/cli/commands/info.py
|
vlebourl/googledevices
|
5d8604ad48d94170127d1da9f01106a4d3bc4845
|
[
"MIT"
] | 4
|
2018-11-26T16:14:42.000Z
|
2021-10-20T14:20:40.000Z
|
"""Get information about this package."""
def info(system):
"""Get information about this package."""
import googledevices.utils.const as package
print("Projectname: ", package.NAME)
print("Version: ", package.VERSION)
print("GitHub link: ", package.URLS.get("github"))
print("PyPi link: ", package.URLS.get("pypi"))
print("Maintainers:")
for maintainer in package.MAINTAINERS:
print(" ", maintainer.get("name"), "(", maintainer.get("github"), ")")
print("")
if system:
import platform
print("")
print("System: ", platform.system())
print("Version: ", platform.version())
print("Python version: ", platform.python_version())
| 32.521739
| 81
| 0.593583
| 75
| 748
| 5.906667
| 0.373333
| 0.063205
| 0.085779
| 0.103837
| 0.13544
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240642
| 748
| 22
| 82
| 34
| 0.77993
| 0.09492
| 0
| 0.125
| 0
| 0
| 0.217718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.125
| 0
| 0.1875
| 0.6875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
29ba94b0967bd5341e441dd394da5100f547c093
| 3,542
|
py
|
Python
|
xbrl/const.py
|
blinkace/pxp
|
9155103dc166674137bd0e2fddb609ca44875761
|
[
"MIT"
] | 1
|
2022-01-27T14:53:23.000Z
|
2022-01-27T14:53:23.000Z
|
xbrl/const.py
|
blinkace/pxp
|
9155103dc166674137bd0e2fddb609ca44875761
|
[
"MIT"
] | null | null | null |
xbrl/const.py
|
blinkace/pxp
|
9155103dc166674137bd0e2fddb609ca44875761
|
[
"MIT"
] | null | null | null |
import re
class NS:
xs = 'http://www.w3.org/2001/XMLSchema'
link = 'http://www.xbrl.org/2003/linkbase'
xlink = "http://www.w3.org/1999/xlink"
xbrli = "http://www.xbrl.org/2003/instance"
xbrldi = "http://xbrl.org/2006/xbrldi"
xbrldie = "http://xbrl.org/2005/xbrldi/errors"
xbrldt = "http://xbrl.org/2005/xbrldt"
xbrldte = "http://xbrl.org/2005/xbrldt/errors"
catalog = "urn:oasis:names:tc:entity:xmlns:xml:catalog"
tp = "http://xbrl.org/2016/taxonomy-package"
oime = "https://xbrl.org/((~status_date_uri~))/oim/error"
oimce = "https://xbrl.org/((~status_date_uri~))/oim-common/error"
xbrlxe = "http://www.xbrl.org/WGWD/YYYY-MM-DD/xbrl-xml/error"
xbrl21e = "http://www.blinkace.com/python-xbrl-oim/xbrl-2.1/error"
xbrl = "https://xbrl.org/2021"
iso4217 = "http://www.xbrl.org/2003/iso4217"
utr = "http://www.xbrl.org/2009/utr"
ix = "http://www.xbrl.org/2013/inlineXBRL"
ix10 = "http://www.xbrl.org/2008/inlineXBRL"
ixe = "http://www.xbrl.org/2013/inlineXBRL/error"
pyxbrle = "https://blinkace.com/pyxbrl/error"
tpe = 'http://xbrl.org/2016/taxonomy-package/errors'
xhtml = 'http://www.w3.org/1999/xhtml'
xbrlce = 'https://xbrl.org/((~status_date_uri~))/xbrl-csv/error'
xbrlje = 'https://xbrl.org/((~status_date_uri~))/xbrl-json/error'
enum2 = 'http://xbrl.org/2020/extensible-enumerations-2.0'
xsi = 'http://www.w3.org/2001/XMLSchema-instance'
entities = "https://xbrl.org/((~status_date_uri~))/entities"
entities_cr7 = "https://xbrl.org/2021-02-03/entities"
PREFIX = {}
NSMAP = {}
OIM_COMMON_RESERVED_PREFIXES = {}
OIM_COMMON_RESERVED_PREFIX_MAP = {}
def buildPrefixMaps():
global PREFIX
PREFIX.clear()
for k, v in NS.__dict__.items():
if not k.startswith("_"):
PREFIX[v] = k
global NSMAP
NSMAP.clear()
for k, v in NS.__dict__.items():
if not k.startswith("_"):
NSMAP[k] = v
global OIM_COMMON_RESERVED_PREFIXES
OIM_COMMON_RESERVED_PREFIXES = { "iso4217", "utr", "xbrl", "xbrli", "xs" }
global OIM_COMMON_RESERVED_PREFIX_MAP
OIM_COMMON_RESERVED_PREFIX_MAP.clear()
for k in OIM_COMMON_RESERVED_PREFIXES:
OIM_COMMON_RESERVED_PREFIX_MAP[k] = getattr(NS, k)
buildPrefixMaps()
def setOIMVersion(version):
for k, v in NS.__dict__.items():
if not k.startswith("_"):
setattr(NS, k, re.sub(r'\(\(~status_date_uri~\)\)', version, v))
buildPrefixMaps()
class LinkType:
footnote = 'http://www.xbrl.org/2003/arcrole/fact-footnote'
explanatoryFact = 'http://www.xbrl.org/2009/arcrole/fact-explanatoryFact'
class LinkGroup:
default = 'http://www.xbrl.org/2003/role/link'
LINK_RESERVED_URI_MAP = {
"_": LinkGroup.default,
"footnote": LinkType.footnote,
"explanatoryFact": LinkType.explanatoryFact,
}
class DocumentType:
xbrlcsv_git = 'https://xbrl.org/((~status_date_uri~))/xbrl-csv'
xbrlcsv_cr7 = 'https://xbrl.org/CR/2021-02-03/xbrl-csv'
xbrlcsv_cr9 = 'https://xbrl.org/CR/2021-07-07/xbrl-csv'
xbrlcsv_pr1 = 'https://xbrl.org/PR/2021-08-04/xbrl-csv'
xbrlcsv = 'https://xbrl.org/2021/xbrl-csv'
xbrljson_git = 'https://xbrl.org/((~status_date_uri~))/xbrl-json'
xbrljson_wgwd = 'https://xbrl.org/WGWD/YYYY-MM-DD/xbrl-json'
xbrljson_cr7 = 'https://xbrl.org/CR/2021-02-02/xbrl-json'
xbrljson_cr9 = 'https://xbrl.org/CR/2021-07-07/xbrl-json'
xbrljson_pr1 = 'https://xbrl.org/PR/2021-08-04/xbrl-json'
xbrljson = 'https://xbrl.org/2021/xbrl-json'
| 37.680851
| 78
| 0.660926
| 506
| 3,542
| 4.482213
| 0.254941
| 0.111111
| 0.095238
| 0.067901
| 0.502205
| 0.373457
| 0.289242
| 0.205467
| 0.145503
| 0.049383
| 0
| 0.061497
| 0.15528
| 3,542
| 93
| 79
| 38.086022
| 0.696524
| 0
| 0
| 0.1
| 0
| 0.025
| 0.494636
| 0.019198
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.0125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
29c2e1b7e5523be19b17e937a85dde93fdb45fab
| 24,237
|
py
|
Python
|
apps/recurring_donations/management/commands/process_monthly_donations.py
|
gannetson/sportschooldeopenlucht
|
0c78e5a95b22a963244112e478119ba60c572141
|
[
"BSD-3-Clause"
] | 1
|
2019-01-19T06:58:39.000Z
|
2019-01-19T06:58:39.000Z
|
apps/recurring_donations/management/commands/process_monthly_donations.py
|
gannetson/sportschooldeopenlucht
|
0c78e5a95b22a963244112e478119ba60c572141
|
[
"BSD-3-Clause"
] | null | null | null |
apps/recurring_donations/management/commands/process_monthly_donations.py
|
gannetson/sportschooldeopenlucht
|
0c78e5a95b22a963244112e478119ba60c572141
|
[
"BSD-3-Clause"
] | null | null | null |
import csv
import os
import math
import logging
import traceback
import requests
import sys
from collections import namedtuple
from optparse import make_option
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from django.db import connection
from django.db import transaction
from django.utils import timezone
from apps.cowry_docdata.adapters import WebDirectDocDataDirectDebitPaymentAdapter
from apps.cowry_docdata.exceptions import DocDataPaymentException
from apps.cowry_docdata.models import DocDataPaymentOrder
from apps.fund.models import RecurringDirectDebitPayment, Order, OrderStatuses, Donation, OrderItem
from apps.projects.models import Project, ProjectPhases
from ...mails import mail_monthly_donation_processed_notification
logger = logging.getLogger(__name__)
#
# Run with:
# ./manage.py process_monthly_donations -v 2 --settings=bluebottle.settings.local (or .production etc.)
#
class Command(BaseCommand):
help = 'Process monthly donations.'
requires_model_validation = True
verbosity_loglevel = {
'0': logging.ERROR, # 0 means no output.
'1': logging.WARNING, # 1 means normal output (default).
'2': logging.INFO, # 2 means verbose output.
'3': logging.DEBUG # 3 means very verbose output.
}
option_list = BaseCommand.option_list + (
make_option('--dry-run', action='store_true', dest='dry_run', default=False,
help="Process the monthly donations without creating any db records or payments."),
make_option('--no-email', action='store_true', dest='no_email', default=False,
help="Don't send the monthly donation email to users."),
make_option('--csv-export', action='store_true', dest='csv_export', default=False,
help="Generate CSV export of monthly donors with donations amounts."),
make_option('--process-one-recurring-payment', action='store', dest='process_payment_id', type='int',
metavar='RECURRING-PAYMENT-ID',
help="Process only the RecurringDirectDebitPayment specified by its primary key."),
)
def handle(self, *args, **options):
# Setup the log level for root logger.
loglevel = self.verbosity_loglevel.get(options['verbosity'])
logger.setLevel(loglevel)
if options['dry_run'] and options['csv_export']:
logger.error("You cannot set both '--dry-run' and '--csv-export'.")
sys.exit(1)
send_email = not options['no_email']
if options['dry_run']:
# TODO Implement --dry-run.
logger.warn("Config: --dry-run not fully implemented yet. Database records and payments will be created.")
logger.info("Config: Not sending emails.")
send_email = False
recurring_payments_queryset = RecurringDirectDebitPayment.objects.filter(active=True)
if options['csv_export']:
generate_monthly_donations_csv(recurring_payments_queryset)
else:
if options['process_payment_id']:
recurring_payments_queryset = recurring_payments_queryset.filter(id=options['process_payment_id'])
try:
process_monthly_donations(recurring_payments_queryset, send_email)
except:
print traceback.format_exc()
def generate_monthly_donations_csv(recurring_payments_queryset):
csv_path = os.path.expanduser('~/monthly-donors-{0}.csv'.format(timezone.now().date()))
logger.info("Saving monthly donations CSV file to:")
logger.info(" {0}".format(csv_path))
with open(csv_path, 'wb') as csv_file:
csvwriter = csv.writer(csv_file, dialect='excel')
csvwriter.writerow(['Member', 'Active', 'Amount'])
for rp in recurring_payments_queryset:
csvwriter.writerow([rp.user.email, rp.active, rp.amount])
def update_last_donation(donation, remaining_amount, popular_projects):
"""
Updates the last donation with the remaining amount of the payment. If the donation is more than the project
needs, the project will be filled and the balance will be used to fill the popular projects recursively.
"""
project = Project.objects.get(id=donation.project_id)
# Base case.
if project.projectcampaign.money_donated + remaining_amount <= project.projectcampaign.money_asked or \
len(popular_projects) == 0:
# The remaining amount won't fill up the project or we have no more projects to try. We're done.
logger.debug(u"Donation is less than project '{0}' needs. No further adjustments are needed.".format(project.title))
donation.amount = remaining_amount
donation.donation_type = Donation.DonationTypes.recurring
donation.save()
return
# Recursive case.
else:
# Fill up the project.
logger.debug(u"Donation is more than project '{0}' needs. Filling up project and creating new donation.".format(project.title))
donation.amount = project.projectcampaign.money_asked - project.projectcampaign.money_donated
donation.donation_type = Donation.DonationTypes.recurring
donation.save()
# Create a new Donation and recursively update it with the remaining amount.
ct = ContentType.objects.get_for_model(donation)
order = OrderItem.objects.get(content_type=ct, content_object=donation)
new_project = popular_projects.pop(0)
new_donation = Donation.objects.create(user=donation.user, project=new_project, amount=0, currency='EUR',
donation_type=Donation.DonationTypes.recurring)
OrderItem.objects.create(content_object=new_donation, order=order)
update_last_donation(new_donation, remaining_amount - donation.amount, popular_projects)
def create_recurring_order(user, projects, order=None):
"""
Creates a recurring Order with donations to the supplied projects.
"""
if not order:
order = Order.objects.create(status=OrderStatuses.recurring, user=user, recurring=True)
for p in projects:
project = Project.objects.get(id=p.id)
if project.phase == ProjectPhases.campaign:
donation = Donation.objects.create(user=user, project=project, amount=0, currency='EUR',
donation_type=Donation.DonationTypes.recurring)
OrderItem.objects.create(content_object=donation, order=order)
return order
def correct_donation_amounts(popular_projects, recurring_order, recurring_payment):
"""
Divides the total amount for the monthly donation across all projects. This method deals with the case of a
donation filling up a project.
"""
remaining_amount = recurring_payment.amount
num_donations = recurring_order.donations.count()
amount_per_project = math.floor(recurring_payment.amount / num_donations)
donations = recurring_order.donations
for i in range(0, num_donations - 1):
donation = donations[i]
project = Project.objects.get(id=donation.project_id)
if project.projectcampaign.money_donated + amount_per_project > project.projectcampaign.money_asked:
donation.amount = project.projectcampaign.money_asked - project.projectcampaign.money_donated
else:
donation.amount = amount_per_project
donation.donation_type = Donation.DonationTypes.recurring
donation.save()
remaining_amount -= donation.amount
# Update the last donation with the remaining amount.
update_last_donation(donations[num_donations - 1], remaining_amount, popular_projects)
def set_order_created_datetime(recurring_order, order_created_datetime):
""" Uses custom SQL to set the created time of Order to a consistent value. """
db_table = recurring_order._meta.db_table
pk_name = recurring_order._meta.pk.name
logger.debug("Setting created and updated to {0} on Order {1}.".format(order_created_datetime, recurring_order.id))
cursor = connection.cursor()
sql_statement = "UPDATE {0} SET created = '{1}' WHERE {2} = {3}".format(db_table, order_created_datetime,
pk_name, recurring_order.pk)
cursor.execute(sql_statement)
sql_statement = "UPDATE {0} SET updated = '{1}' WHERE {2} = {3}".format(db_table, order_created_datetime,
pk_name, recurring_order.pk)
cursor.execute(sql_statement)
transaction.commit_unless_managed()
def process_monthly_donations(recurring_payments_queryset, send_email):
""" The starting point for creating DocData payments for the monthly donations. """
recurring_donation_errors = []
RecurringDonationError = namedtuple('RecurringDonationError', 'recurring_payment error_message')
skipped_recurring_payments = []
SkippedRecurringPayment = namedtuple('SkippedRecurringPayment', 'recurring_payment orders')
donation_count = 0
# The adapter is used after the recurring Order and donations have been adjusted. It's created here so that we can
# reuse it to process all recurring donations.
webdirect_payment_adapter = WebDirectDocDataDirectDebitPaymentAdapter()
# A consistent created time to use for the created recurring Orders.
order_created_datetime = timezone.now()
# Fixed lists of the popular projects.
popular_projects_all = list(Project.objects.filter(phase=ProjectPhases.campaign).order_by('-popularity'))
top_three_projects = popular_projects_all[:3]
popular_projects_rest = popular_projects_all[3:]
logger.info("Config: Using these projects as 'Top Three':")
for project in top_three_projects:
logger.info(" {0}".format(project.title))
# The main loop that processes each monthly donation.
for recurring_payment in recurring_payments_queryset:
top_three_donation = False
user_selected_projects = []
# Skip payment if there has been a recurring Order recently.
ten_days_ago = timezone.now() + timezone.timedelta(days=-10)
recent_closed_recurring_orders = Order.objects.filter(user=recurring_payment.user, status=OrderStatuses.closed,
recurring=True, updated__gt=ten_days_ago)
if recent_closed_recurring_orders.count() > 0:
skipped_recurring_payments.append(SkippedRecurringPayment(recurring_payment, list(recent_closed_recurring_orders)))
logger.warn(
"Skipping '{0}' because it looks like it has been processed recently with one of these Orders:".format(
recurring_payment))
for closed_order in recent_closed_recurring_orders:
logger.warn(" Order Number: {0}".format(closed_order.order_number))
continue
# Check if there is a monthly shopping cart (Order status is 'recurring') for this recurring_payment user.
try:
recurring_order = Order.objects.get(user=recurring_payment.user, status=OrderStatuses.recurring)
logger.debug("Using existing recurring Order for user: {0}.".format(recurring_payment.user))
except Order.DoesNotExist:
# There is no monthly shopping cart. The user is supporting the top three projects so we need to create an
# Order with Donations for the top three projects.
logger.debug("Creating new 'Top Three' recurring Order for user {0}.".format(recurring_payment.user))
recurring_order = create_recurring_order(recurring_payment.user, top_three_projects)
top_three_donation = True
except Order.MultipleObjectsReturned:
error_message = "Multiple Orders with status 'recurring' returned for '{0}'. Not processing this recurring donation.".format(
recurring_payment)
logger.error(error_message)
recurring_donation_errors.append(RecurringDonationError(recurring_payment, error_message))
continue
# Check if we're above the DocData minimum for direct debit.
if recurring_payment.amount < 113:
# Cleanup the Order if there's an error.
if top_three_donation:
recurring_order.delete()
error_message = "Payment amount for '{0}' is less than the DocData minimum for direct debit (113). Skipping.".format(
recurring_payment)
logger.error(error_message)
recurring_donation_errors.append(RecurringDonationError(recurring_payment, error_message))
continue
# Remove donations to projects that are no longer in the campaign phase.
for donation in recurring_order.donations:
project = Project.objects.get(id=donation.project.id)
if project.phase != ProjectPhases.campaign:
ctype = ContentType.objects.get_for_model(donation)
order_item = OrderItem.objects.get(object_id=donation.id, content_type=ctype)
order_item.delete()
donation.delete()
if recurring_order.donations.count() > 0:
# There are donations in the recurring Order and we need to redistribute / correct the donation amounts.
# Save a copy of the projects that have been selected by the user so that the monthly shopping cart can
# recreated after the payment has been successfully started.
for donation in recurring_order.donations:
user_selected_projects.append(donation.project)
correct_donation_amounts(popular_projects_all, recurring_order, recurring_payment)
else:
# There are no donations in the recurring Order so we need to create a monthly shopping cart to support the
# top three projects and redistribute / correct the donation amounts.
create_recurring_order(recurring_payment.user, top_three_projects, recurring_order)
if recurring_order.donations.count() == 0:
logger.debug("The top three donations are full. Using next three projects for top three.")
top_three_projects = popular_projects_rest[:3]
popular_projects_rest = popular_projects_rest[3:]
create_recurring_order(recurring_payment.user, top_three_projects, recurring_order)
correct_donation_amounts(popular_projects_rest, recurring_order, recurring_payment)
top_three_donation = True
# At this point the order should be correctly setup and ready for the DocData payment.
if top_three_donation:
donation_type_message = "supporting the 'Top Three' projects"
else:
donation_type_message = "with {0} donations".format(recurring_order.donations.count())
logger.info("Starting payment for '{0}' {1}.".format(recurring_payment, donation_type_message))
# Safety check to ensure the modifications to the donations in the recurring result in an Order total that
# matches the RecurringDirectDebitPayment.
if recurring_payment.amount != recurring_order.total:
# Cleanup the Order if there's an error.
if top_three_donation:
recurring_order.delete()
error_message = "RecurringDirectDebitPayment amount: {0} does not equal recurring Order amount: {1} for '{2}'. Not processing this recurring donation.".format(
recurring_payment.amount, recurring_order.total, recurring_payment)
logger.error(error_message)
recurring_donation_errors.append(RecurringDonationError(recurring_payment, error_message))
continue
# Check if the IBAN / BIC is stored correctly on the RecurringDirectDebitPayment.
if recurring_payment.iban == '' or recurring_payment.bic == '' or \
not recurring_payment.iban.endswith(recurring_payment.account) or \
recurring_payment.bic[:4] != recurring_payment.iban[4:8]:
# Cleanup the Order if there's an error.
if top_three_donation:
recurring_order.delete()
error_message = "Cannot create payment because the IBAN and/or BIC are not available."
logger.error(error_message)
recurring_donation_errors.append(RecurringDonationError(recurring_payment, error_message))
continue
# Create and fill in the DocDataPaymentOrder.
payment = DocDataPaymentOrder()
payment.order = recurring_order
payment.payment_method_id = 'dd-webdirect'
payment.amount = recurring_payment.amount
payment.currency = recurring_payment.currency
payment.customer_id = recurring_payment.user.id
payment.email = recurring_payment.user.email
# Use the recurring payment name (bank account name) to set the first and last name if they're not set.
if not recurring_payment.user.first_name:
if ' ' in recurring_payment.name:
payment.first_name = recurring_payment.name.split(' ')[0]
else:
payment.first_name = recurring_payment.name
else:
payment.first_name = recurring_payment.user.first_name
if not recurring_payment.user.last_name:
if ' ' in recurring_payment.name:
payment.last_name = recurring_payment.name[recurring_payment.name.index(' ') + 1:]
else:
payment.last_name = recurring_payment.name
else:
payment.last_name = recurring_payment.user.last_name
# Try to use the address from the profile if it's set.
address = recurring_payment.user.address
if not address:
# Cleanup the Order if there's an error.
if top_three_donation:
recurring_order.delete()
error_message = "Cannot create a payment for '{0}' because user does not have an address set.".format(recurring_payment)
logger.error(error_message)
recurring_donation_errors.append(RecurringDonationError(recurring_payment, error_message))
continue
# Set a default value for the pieces of the address that we don't have.
unknown_value = u'Unknown'
if not address.line1:
logger.warn("User '{0}' does not have their street and street number set. Using '{1}'.".format(recurring_payment.user, unknown_value))
payment.address = unknown_value
else:
payment.address = address.line1
if not address.city:
logger.warn("User '{0}' does not have their city set. Using '{1}'.".format(recurring_payment.user, unknown_value))
payment.city = unknown_value
else:
payment.city = address.city
if not address.postal_code:
logger.warn("User '{0}' does not have their postal code set. Using '{1}'.".format(recurring_payment.user, unknown_value))
payment.postal_code = unknown_value
else:
payment.postal_code = address.postal_code
# Assume the Netherlands when country not set.
if address.country:
payment.country = address.country.alpha2_code
else:
payment.country = 'NL'
# Try to use the language from the User settings if it's set.
if recurring_payment.user.primary_language:
payment.language = recurring_payment.user.primary_language[:2] # Cut off locale.
else:
payment.language = 'nl'
payment.save()
# Start the WebDirect payment.
try:
webdirect_payment_adapter.create_remote_payment_order(payment)
except DocDataPaymentException as e:
# Cleanup the Order if there's an error.
if top_three_donation:
recurring_order.delete()
error_message = "Problem creating remote payment order."
logger.error(error_message)
recurring_donation_errors.append(
RecurringDonationError(recurring_payment, "{0} {1}".format(error_message, e.message)))
continue
else:
recurring_order.status = OrderStatuses.closed
recurring_order.save()
try:
webdirect_payment_adapter.start_payment(payment, recurring_payment)
except DocDataPaymentException as e:
# Cleanup the Order if there's an error.
if top_three_donation:
recurring_order.delete()
else:
recurring_order.status = OrderStatuses.recurring
recurring_order.save()
error_message = "Problem starting payment."
logger.error(error_message)
recurring_donation_errors.append(
RecurringDonationError(recurring_payment, "{0} {1}".format(error_message, e.message)))
continue
logger.debug("Payment for '{0}' started.".format(recurring_payment))
donation_count += 1
# Send an email to the user.
if send_email:
mail_monthly_donation_processed_notification(recurring_payment, recurring_order)
# Create a new recurring Order (monthly shopping cart) for donations that are not to the 'Top Three'.
if not top_three_donation and len(user_selected_projects) > 0:
new_recurring_order = create_recurring_order(recurring_payment.user, user_selected_projects)
# Adjust donation amounts in a simple way for the recurring Order (the monthly donations shopping cart).
num_donations = new_recurring_order.donations.count()
amount_per_project = math.floor(recurring_payment.amount / num_donations)
donations = new_recurring_order.donations
for i in range(0, num_donations - 1):
donation = donations[i]
donation.amount = amount_per_project
donation.donation_type = Donation.DonationTypes.recurring
donation.save()
# Update the last donation with the remaining amount.
donation = donations[num_donations - 1]
donation.amount = recurring_payment.amount - (amount_per_project * (num_donations - 1))
donation.donation_type = Donation.DonationTypes.recurring
donation.save()
set_order_created_datetime(recurring_order, order_created_datetime)
logger.info("")
logger.info("Recurring Donation Processing Summary")
logger.info("=====================================")
logger.info("")
logger.info("Total number of recurring donations: {0}".format(recurring_payments_queryset.count()))
logger.info("Number of recurring Orders successfully processed: {0}".format(donation_count))
logger.info("Number of errors: {0}".format(len(recurring_donation_errors)))
logger.info("Number of skipped payments: {0}".format(len(skipped_recurring_payments)))
if len(recurring_donation_errors) > 0:
logger.info("")
logger.info("")
logger.info("Detailed Error List")
logger.info("===================")
logger.info("")
for error in recurring_donation_errors:
logger.info("RecurringDirectDebitPayment: {0} {1}".format(error.recurring_payment.id, error.recurring_payment))
logger.info("Error: {0}".format(error.error_message))
logger.info("--")
if len(skipped_recurring_payments) > 0:
logger.info("")
logger.info("")
logger.info("Skipped Recurring Payments")
logger.info("==========================")
logger.info("")
for skipped_payment in skipped_recurring_payments:
logger.info("RecurringDirectDebitPayment: {0} {1}".format(skipped_payment.recurring_payment.id, skipped_payment.recurring_payment))
for closed_order in skipped_payment.orders:
logger.info("Order Number: {0}".format(closed_order.order_number))
logger.info("--")
| 49.564417
| 171
| 0.67562
| 2,803
| 24,237
| 5.660007
| 0.148412
| 0.073621
| 0.026473
| 0.011346
| 0.426032
| 0.333943
| 0.271793
| 0.249291
| 0.201387
| 0.18462
| 0
| 0.005382
| 0.241119
| 24,237
| 488
| 172
| 49.665984
| 0.857174
| 0.123571
| 0
| 0.311429
| 0
| 0.002857
| 0.137797
| 0.013283
| 0
| 0
| 0
| 0.002049
| 0
| 0
| null | null | 0
| 0.06
| null | null | 0.002857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29c4a45e5143815cb47c3724fcaecb30960fac72
| 475
|
py
|
Python
|
src/kotify/fabric/procfile.py
|
kotify/kotify.fabric
|
5ce50a38210217f643c81438b53466b60fc16cb1
|
[
"MIT"
] | null | null | null |
src/kotify/fabric/procfile.py
|
kotify/kotify.fabric
|
5ce50a38210217f643c81438b53466b60fc16cb1
|
[
"MIT"
] | null | null | null |
src/kotify/fabric/procfile.py
|
kotify/kotify.fabric
|
5ce50a38210217f643c81438b53466b60fc16cb1
|
[
"MIT"
] | null | null | null |
from ._core import Collection, local, task
@task(name="main", default=True)
def start_main(c):
local(f"overmind start -l {','.join(c.start.main + c.start.minimal)}", pty=True)
@task(name="minimal")
def start_minimal(c):
local(f"overmind start -l {','.join(c.start.minimal)}", pty=True)
@task(name="all")
def start_all(c):
local("overmind start", pty=True)
ns = Collection("start")
ns.add_task(start_all)
ns.add_task(start_main)
ns.add_task(start_minimal)
| 20.652174
| 84
| 0.692632
| 77
| 475
| 4.142857
| 0.285714
| 0.15047
| 0.08464
| 0.131661
| 0.351097
| 0.351097
| 0.351097
| 0.194357
| 0.194357
| 0
| 0
| 0
| 0.12
| 475
| 22
| 85
| 21.590909
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0.290526
| 0.103158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.071429
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29c698fcf98da3c177cd1347dd70acef351370fb
| 888
|
py
|
Python
|
backend/src/feature_extraction/rolloff.py
|
AnXi-TieGuanYin-Tea/MusicGenreClassifiaction
|
a0b9f621b0a5d2451180b12af7681756c5abd138
|
[
"MIT"
] | 7
|
2018-05-01T19:39:17.000Z
|
2020-01-02T17:11:05.000Z
|
backend/src/feature_extraction/rolloff.py
|
AnXi-TieGuanYin-Tea/MusicGenreClassifiaction
|
a0b9f621b0a5d2451180b12af7681756c5abd138
|
[
"MIT"
] | 10
|
2018-12-10T22:16:43.000Z
|
2020-08-27T18:23:45.000Z
|
backend/src/feature_extraction/rolloff.py
|
AnXi-TieGuanYin-Tea/MusicGenreClassifiaction
|
a0b9f621b0a5d2451180b12af7681756c5abd138
|
[
"MIT"
] | 2
|
2021-04-16T08:20:17.000Z
|
2022-01-06T14:06:44.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 17 23:14:28 2018
@author: Akihiro Inui
"""
def rolloff(input_power_spectrum: list, param: float=0.85) -> float:
"""
Spectral Rolloff
:param input_power_spectrum: power spectrum in list
:param param: threadshold for roll off
:return Spectral Rolloff
"""
assert (param <= 0 or param >= 1) is False, "parameter must be between 0 and 1"
# Initialize energy and FFT number
energy = 0
count = 0
# Calculate total energy
total_energy = sum(input_power_spectrum[:]**2)
# Find Count which has energy below param*total_energy
while energy <= param*total_energy and count < len(input_power_spectrum):
energy = pow(input_power_spectrum[count], 2) + energy
count += 1
# Normalise Spectral Rolloff
return count/len(input_power_spectrum)
| 28.645161
| 83
| 0.667793
| 122
| 888
| 4.737705
| 0.516393
| 0.157439
| 0.186851
| 0.062284
| 0.089965
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038179
| 0.233108
| 888
| 30
| 84
| 29.6
| 0.810573
| 0.420045
| 0
| 0
| 0
| 0
| 0.069474
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29c8dfdb3c65c5e9847d8ee2d3b8fe9a5f54498a
| 1,000
|
py
|
Python
|
ssh.py
|
telkomdev/keris
|
8451f3d69df174e33003e90e4fd70f602412412a
|
[
"MIT"
] | 1
|
2020-02-11T16:10:11.000Z
|
2020-02-11T16:10:11.000Z
|
ssh.py
|
telkomdev/keris
|
8451f3d69df174e33003e90e4fd70f602412412a
|
[
"MIT"
] | null | null | null |
ssh.py
|
telkomdev/keris
|
8451f3d69df174e33003e90e4fd70f602412412a
|
[
"MIT"
] | null | null | null |
from common import is_connection_ok
import paramiko
"""
execute_ssh(host, port, username, password, cmd)
"""
def execute_ssh(host, username, password, cmd, port='22'):
if is_connection_ok():
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=host, port=int(port), username=username, password=password)
_, stdout, stderr = ssh.exec_command(cmd, timeout=5)
res = stdout.read().decode()
error = stderr.read().decode('utf-8')
if error:
print(error)
return 'SSH_CONNECTION_FAIL'
else:
ssh.close()
return 'SSH_CONNECTION_SUCCESS with username : {username} and password {password}'.format(username=username, password=password)
except Exception:
print('*')
return 'SSH_CONNECTION_FAIL'
else:
return 'CONNECTION_NOT_FOUND'
| 35.714286
| 143
| 0.601
| 107
| 1,000
| 5.429907
| 0.485981
| 0.110155
| 0.098107
| 0.110155
| 0.092943
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005658
| 0.293
| 1,000
| 28
| 144
| 35.714286
| 0.816124
| 0
| 0
| 0.181818
| 0
| 0
| 0.14709
| 0.02328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0.136364
| 0.090909
| 0
| 0.318182
| 0.090909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29ca0af350d167975f57568f8d8d244098802dd2
| 376
|
py
|
Python
|
novel/spider/config.py
|
rrcgat/novel-info
|
fcda24f9f6da5a4755e942a520045b7b5a53bef4
|
[
"MIT"
] | 4
|
2019-04-02T09:44:18.000Z
|
2020-04-15T11:47:49.000Z
|
novel/spider/config.py
|
rrcgat/novel-info
|
fcda24f9f6da5a4755e942a520045b7b5a53bef4
|
[
"MIT"
] | 1
|
2019-03-04T17:20:39.000Z
|
2019-03-04T17:48:18.000Z
|
novel/spider/config.py
|
rrcgat/novel-info
|
fcda24f9f6da5a4755e942a520045b7b5a53bef4
|
[
"MIT"
] | 1
|
2020-04-15T11:47:50.000Z
|
2020-04-15T11:47:50.000Z
|
'''
请求头
'''
HEADERS_IPHONE = {'user-agent': (
'Mozilla/5.0 '
'(iPhone; CPU iPhone OS 6_0 like Mac OS X) '
'AppleWebKit/536.26 (KHTML, like Gecko) '
'Version/6.0 Mobile/10A5376e Safari/8536.25'
)}
HEADERS_CHROME = {'user-agent': (
'Mozilla/5.0 (X11; Linux x86_64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/67.0.3396.99 Safari/537.36'
)}
| 22.117647
| 48
| 0.617021
| 57
| 376
| 4
| 0.614035
| 0.078947
| 0.140351
| 0.149123
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166113
| 0.199468
| 376
| 16
| 49
| 23.5
| 0.591362
| 0.007979
| 0
| 0.181818
| 0
| 0
| 0.709589
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29cde250e9d497ca3e7e9d2169fa12a67aa2c621
| 752
|
py
|
Python
|
core/recc/system/environ.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | 3
|
2021-06-20T02:24:10.000Z
|
2022-01-26T23:55:33.000Z
|
core/recc/system/environ.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
core/recc/system/environ.py
|
bogonets/answer
|
57f892a9841980bcbc35fa1e27521b34cd94bc25
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from os import environ
from typing import Optional, Dict, Any, Type
def get_os_envs_dict() -> Dict[str, str]:
return {k: str(environ.get(k)) for k in environ if environ}
def exchange_env(key: str, exchange: Optional[str]) -> Optional[str]:
result = environ.get(key)
if result is not None:
environ.pop(key)
if exchange is not None:
environ[key] = exchange
return result
def get_env(key: str) -> Optional[str]:
return environ.get(key)
def opt_env(key: str, default_value: Any, result_class: Type) -> Any:
value = environ.get(key)
if value is None:
return default_value
try:
return result_class(value)
except ValueError:
return default_value
| 23.5
| 69
| 0.655585
| 110
| 752
| 4.381818
| 0.327273
| 0.082988
| 0.056017
| 0.062241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001739
| 0.235372
| 752
| 31
| 70
| 24.258065
| 0.836522
| 0.027926
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.190476
| false
| 0
| 0.095238
| 0.095238
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
29cf16e7358b9161ab9d90ae6bb97701b983418a
| 436
|
py
|
Python
|
InteractiveProgramming/assignment3.3.py
|
mr-ice/pipython
|
ea27af520946cb710cb717815be625489fc8a1a3
|
[
"MIT"
] | null | null | null |
InteractiveProgramming/assignment3.3.py
|
mr-ice/pipython
|
ea27af520946cb710cb717815be625489fc8a1a3
|
[
"MIT"
] | null | null | null |
InteractiveProgramming/assignment3.3.py
|
mr-ice/pipython
|
ea27af520946cb710cb717815be625489fc8a1a3
|
[
"MIT"
] | null | null | null |
try:
s = raw_input("Enter score between 0.0 and 1.0: ")
score = float(s)
if score < 0 or score > 1:
raise Exception
except ValueError:
print "You didn't even enter a number"
except:
print "Not a possible score."
else:
if score >= 0.9:
print "A"
elif score >= 0.8:
print "B"
elif score >= 0.7:
print "C"
elif score >= 0.6:
print "D"
else:
print "F"
| 20.761905
| 54
| 0.529817
| 67
| 436
| 3.432836
| 0.537313
| 0.130435
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049645
| 0.353211
| 436
| 20
| 55
| 21.8
| 0.765957
| 0
| 0
| 0.1
| 0
| 0
| 0.204128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.35
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29dd6423703e7bd3d65394220ac73d337651b108
| 1,603
|
py
|
Python
|
src/spinnaker_ros_lsm/venv/lib/python2.7/site-packages/spinnman/messages/scp/impl/scp_version_request.py
|
Roboy/LSM_SpiNNaker_MyoArm
|
04fa1eaf78778edea3ba3afa4c527d20c491718e
|
[
"BSD-3-Clause"
] | 2
|
2020-11-01T13:22:11.000Z
|
2020-11-01T13:22:20.000Z
|
src/spinnaker_ros_lsm/venv/lib/python2.7/site-packages/spinnman/messages/scp/impl/scp_version_request.py
|
Roboy/LSM_SpiNNaker_MyoArm
|
04fa1eaf78778edea3ba3afa4c527d20c491718e
|
[
"BSD-3-Clause"
] | null | null | null |
src/spinnaker_ros_lsm/venv/lib/python2.7/site-packages/spinnman/messages/scp/impl/scp_version_request.py
|
Roboy/LSM_SpiNNaker_MyoArm
|
04fa1eaf78778edea3ba3afa4c527d20c491718e
|
[
"BSD-3-Clause"
] | null | null | null |
from spinnman.messages.scp.abstract_messages.abstract_scp_request\
import AbstractSCPRequest
from spinnman.messages.sdp.sdp_flag import SDPFlag
from spinnman.messages.sdp.sdp_header import SDPHeader
from spinnman.messages.scp.scp_request_header import SCPRequestHeader
from spinnman.messages.scp.scp_command import SCPCommand
from spinnman.messages.scp.impl.scp_version_response import SCPVersionResponse
class SCPVersionRequest(AbstractSCPRequest):
""" An SCP request to read the version of software running on a core
"""
def __init__(self, x, y, p):
"""
:param x: The x-coordinate of the chip to read from, between 0 and 255
:type x: int
:param y: The y-coordinate of the chip to read from, between 0 and 255
:type y: int
:param p: The id of the processor to read the version from,\
between 0 and 31
:type p: int
:raise spinnman.exceptions.SpinnmanInvalidParameterException:
* If the chip coordinates are out of range
* If the processor is out of range
"""
super(SCPVersionRequest, self).__init__(
SDPHeader(
flags=SDPFlag.REPLY_EXPECTED, destination_port=0,
destination_cpu=p, destination_chip_x=x,
destination_chip_y=y),
SCPRequestHeader(command=SCPCommand.CMD_VER))
def get_scp_response(self):
""" See\
:py:meth:`spinnman.messages.scp.abstract_scp_request.AbstractSCPRequest.get_scp_response`
"""
return SCPVersionResponse()
| 40.075
| 101
| 0.674984
| 198
| 1,603
| 5.30303
| 0.368687
| 0.106667
| 0.114286
| 0.087619
| 0.188571
| 0.089524
| 0.089524
| 0.089524
| 0.089524
| 0.089524
| 0
| 0.010059
| 0.25577
| 1,603
| 39
| 102
| 41.102564
| 0.870075
| 0.383656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.352941
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29e3af095a46b5abdfb783f45e3fb0c6a6c5b81f
| 652
|
py
|
Python
|
LC/201.py
|
szhu3210/LeetCode_Solutions
|
64747eb172c2ecb3c889830246f3282669516e10
|
[
"MIT"
] | 2
|
2018-02-24T17:20:02.000Z
|
2018-02-24T17:25:43.000Z
|
LC/201.py
|
szhu3210/LeetCode_Solutions
|
64747eb172c2ecb3c889830246f3282669516e10
|
[
"MIT"
] | null | null | null |
LC/201.py
|
szhu3210/LeetCode_Solutions
|
64747eb172c2ecb3c889830246f3282669516e10
|
[
"MIT"
] | null | null | null |
class Solution(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
## my solution
# res=''
# for i in xrange(len(bin(m))-2):
# if m>>i & 1 == 0:
# res='0'+res
# elif (((m>>i) + 1) << i) <= n:
# res='0'+res
# else:
# res='1'+res
# return int(res,2)
## quick solution
c=0
for i in xrange(len(bin(m))-2):
if m>>i != n>>i:
c+=1
else:
break
return m>>c<<c
| 24.148148
| 44
| 0.328221
| 77
| 652
| 2.779221
| 0.376623
| 0.028037
| 0.056075
| 0.11215
| 0.224299
| 0.224299
| 0.224299
| 0.224299
| 0.224299
| 0.224299
| 0
| 0.0347
| 0.513804
| 652
| 27
| 45
| 24.148148
| 0.640379
| 0.381902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29e805265bd23dadb56a588aaeba28a86de79226
| 4,250
|
py
|
Python
|
src/test/resources/scripts/Authentication.py
|
tomjbarry/Penstro
|
d9179852158bebf48aaba7a198de5246acb1b064
|
[
"MIT"
] | 1
|
2019-02-25T05:55:34.000Z
|
2019-02-25T05:55:34.000Z
|
src/test/resources/scripts/Authentication.py
|
tomjbarry/penstro
|
d9179852158bebf48aaba7a198de5246acb1b064
|
[
"MIT"
] | null | null | null |
src/test/resources/scripts/Authentication.py
|
tomjbarry/penstro
|
d9179852158bebf48aaba7a198de5246acb1b064
|
[
"MIT"
] | null | null | null |
from PyConstants import Paths
from PyConstants import Codes
from PyConstants import CacheTimes
from PyBaseTest import BaseTest
from PyRequest import PyRequest
import time
class Authentication(BaseTest):
password = "testPassword123"
invalidPassword = "incorrectincorrect"
def runTests(self):
print("Running authentication tests")
self.testRegister(self.username, self.email)
token = self.testLogin(self.username)
self.testRegister(self.target, self.targetEmail)
self.testLogout(token)
time.sleep(CacheTimes.USER_USERNAME)
token = self.testLogin(self.username)
targetToken = self.testLogin(self.target)
time.sleep(CacheTimes.USER_USERNAME)
return targetToken, token
def testRegister(self, username, email):
invalidBody = {"username":username, "email":email}
body = {"username":username, "email":email, "password":self.password, "confirmNewPassword":self.password, "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"email":email, "password":self.password}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "password":self.password}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "email":email, "password":self.password}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "email":email, "password":self.password, "confirmNewPassword":self.password + "s", "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
invalidBody = {"username":username, "email":email, "password":self.password, "confirmNewPassword":self.password, "ageMinimum":False, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, invalidBody, self.expectedInvalid)
restrictedBody = {"username":username, "password":"password1234567", "email":email, "confirmNewPassword":"password1234567", "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, restrictedBody, self.expectedRestrictedPassword)
restrictedBody = {"username":"penstro", "password":self.password, "email":email, "confirmNewPassword":self.password, "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, restrictedBody, self.expectedRestrictedUsername)
restrictedBody = {"username":username, "password":self.password, "email":"[email protected]", "confirmNewPassword":self.password, "ageMinimum":True, "recaptchaResponse":"test"}
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, restrictedBody, self.expectedRestrictedEmail)
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, body, self.expectedResultCreated)
PyRequest().expectResponse(Paths.REGISTER, PyRequest.POST, body, self.expectedExistsUsernameEmail)
def testLogin(self, username):
body = {"username":username, "password":self.invalidPassword}
PyRequest().expectResponse(Paths.LOGIN, PyRequest.POST, None, self.expectedInvalid)
PyRequest().expectResponse(Paths.LOGIN, PyRequest.POST, body, self.expectedDenied)
body = {"username":username, "password":self.password}
data = PyRequest().expectResponse(Paths.LOGIN, PyRequest.POST, body, self.expectedResultSuccess)
if 'dto' in data:
if 'result' in data['dto']:
print("TOKEN: " + str(data['dto']['result']))
return str(data['dto']['result'])
return None
def testLogout(self, token):
PyRequest().expectResponse(Paths.LOGOUT, PyRequest.POST, None, self.expectedDenied)
PyRequest(token).expectResponse(Paths.LOGOUT, PyRequest.POST, None, self.expectedSuccess)
| 57.432432
| 183
| 0.704941
| 386
| 4,250
| 7.756477
| 0.178756
| 0.101536
| 0.140281
| 0.132265
| 0.665665
| 0.56179
| 0.546426
| 0.515698
| 0.44155
| 0.44155
| 0
| 0.004812
| 0.168706
| 4,250
| 73
| 184
| 58.219178
| 0.842627
| 0
| 0
| 0.178571
| 0
| 0
| 0.154588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.25
| 0.107143
| 0
| 0.285714
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29f31b2343f07216325a81bd944dfce29b98de66
| 610
|
py
|
Python
|
2_sheet/2-sheet-hundt-robin/plot-data.py
|
robinhundt/practical-course-parallel-computing
|
08f1fc76324d5c6338b32b2f14c2a11fef3ad619
|
[
"MIT"
] | null | null | null |
2_sheet/2-sheet-hundt-robin/plot-data.py
|
robinhundt/practical-course-parallel-computing
|
08f1fc76324d5c6338b32b2f14c2a11fef3ad619
|
[
"MIT"
] | null | null | null |
2_sheet/2-sheet-hundt-robin/plot-data.py
|
robinhundt/practical-course-parallel-computing
|
08f1fc76324d5c6338b32b2f14c2a11fef3ad619
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
# number of threads used to compute product of 2 matrices of dim. 1024
data_x = [1, 2, 3, 4, 8, 16, 32, 64, 128,
256, 512, 1024, 2048, 4096]
# execution time in seconds
data_y = [3.300059, 1.664494, 2.294884, 3.200235,
2.915945, 3.082389, 3.023162, 3.012096,
2.958028, 2.939918, 2.847527, 2.898556,
2.876036, 2.963720]
plt.figure()
plt.plot(data_x, data_y)
plt.xlabel('# of threads')
plt.xscale('log')
plt.ylabel('execution time in seconds')
plt.title('Exection times of 1024x1024 matrix multi with different thread counts')
plt.show()
| 33.888889
| 82
| 0.672131
| 103
| 610
| 3.941748
| 0.640777
| 0.044335
| 0.073892
| 0.108374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291242
| 0.195082
| 610
| 18
| 83
| 33.888889
| 0.535642
| 0.154098
| 0
| 0
| 0
| 0
| 0.212062
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29f348ce2221e92c79d25e0d2151332aec4f637c
| 1,100
|
py
|
Python
|
memoro/wsgi.py
|
bbengfort/memorandi
|
4591d26c097513d67e11916583ed043e78e87816
|
[
"MIT"
] | null | null | null |
memoro/wsgi.py
|
bbengfort/memorandi
|
4591d26c097513d67e11916583ed043e78e87816
|
[
"MIT"
] | 18
|
2020-12-02T16:37:21.000Z
|
2021-09-22T19:40:37.000Z
|
memoro/wsgi.py
|
bbengfort/memorandi
|
4591d26c097513d67e11916583ed043e78e87816
|
[
"MIT"
] | null | null | null |
# memoro.wsgi
# WSGI config for memoro project.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Sat Nov 28 13:44:01 2020 -0500
#
# Copyright (C) 2020 Bengfort.com
# For license information, see LICENSE
#
# ID: wsgi.py [] [email protected] $
"""
WSGI config for memoro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
##########################################################################
## Imports
##########################################################################
import os
from django.core.wsgi import get_wsgi_application
from dotenv import find_dotenv, load_dotenv
##########################################################################
## Load environment and create WSGI application
##########################################################################
load_dotenv(find_dotenv())
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'memoro.settings.development')
application = get_wsgi_application()
| 28.205128
| 78
| 0.555455
| 113
| 1,100
| 5.318584
| 0.575221
| 0.079867
| 0.043261
| 0.063228
| 0.086522
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.1
| 1,100
| 38
| 79
| 28.947368
| 0.584848
| 0.464545
| 0
| 0
| 0
| 0
| 0.180812
| 0.180812
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
29f5d029a675792751ff0f3ac8e9946cca353e7b
| 1,592
|
py
|
Python
|
test.py
|
SirNate0/PYrho3D
|
b0daa3badccd12adfcb9e7cf50d554c805cc6279
|
[
"MIT"
] | 6
|
2020-02-20T07:42:07.000Z
|
2021-03-27T13:26:47.000Z
|
test.py
|
SirNate0/PYrho3D
|
b0daa3badccd12adfcb9e7cf50d554c805cc6279
|
[
"MIT"
] | null | null | null |
test.py
|
SirNate0/PYrho3D
|
b0daa3badccd12adfcb9e7cf50d554c805cc6279
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2.7
import urho
v = urho.Vector3()
c = urho.Context()
fs = urho.FileSystem(c)
from urho import StringHash as sh
import os
print (os.getcwd())
class App(urho.Application):
#def __init__(self, name):
# Dog.__init__(self) # Without this, undefind behavior may occur if the C++ portions are referenced.
def __init__(self,c):
urho.Application.__init__(self,c)
# self.name = name
#def bark(self):
# return "yap!"
def Setup(self):
print 'Setting up the applicaiton'
self.engineParameters["WindowTitle"] = "PYrho3D"
return
def Start(self):
print 'Starting up the applicaiton'
fs = c.GetSubsystem('FileSystem')
commandFile = fs.GetProgramDir() + "Data/CommandLine.txt"
print commandFile
# with open(commandFile) as f:
# line = commandFile[0]
scriptfile = 'Scripts/NinjaSnowWar.as'
c.RegisterSubsystem(urho.Script(c))
cache = c.GetSubsystem('ResourceCache')
sf = cache.GetResource('ScriptFile',scriptfile)
sf.Execute("void Start()")
a = App(c)
#help(a)
var = urho.Variant(u'/home/nathan/Desktop/testClang')
print(var)
print(fs.GetCurrentDir())
#a.engineParameters[urho.StringHash('ResourcePrefixPaths')] = var
#a.engineParameters["FullScreen"] = False
#a.engineParameters[urho.StringHash('FullScreen')] = False
a.engineParameters["WindowWidth"] = 500
c.GetSubsystem(sh('Input')).SetMouseVisible(True)
del fs
c.GetSubsystem(sh('Input')).SetMouseVisible(True)
a.Run()
#ep = a.engineParameters
| 24.875
| 107
| 0.66206
| 188
| 1,592
| 5.521277
| 0.5
| 0.081888
| 0.021195
| 0.05973
| 0.075145
| 0.075145
| 0
| 0
| 0
| 0
| 0
| 0.006304
| 0.202889
| 1,592
| 63
| 108
| 25.269841
| 0.811663
| 0.278266
| 0
| 0.060606
| 0
| 0
| 0.185349
| 0.046778
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.090909
| null | null | 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
29f94d2b334b89e0c508fee4d9e22209246bc128
| 5,970
|
py
|
Python
|
api/user.py
|
gfoo/fastapi-demo
|
44ceb9e94fa833841756136c3b446f192a311dde
|
[
"Unlicense"
] | null | null | null |
api/user.py
|
gfoo/fastapi-demo
|
44ceb9e94fa833841756136c3b446f192a311dde
|
[
"Unlicense"
] | null | null | null |
api/user.py
|
gfoo/fastapi-demo
|
44ceb9e94fa833841756136c3b446f192a311dde
|
[
"Unlicense"
] | null | null | null |
from time import time
from typing import List
from core.security import verify_password
from db import users as DBUsers
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.responses import JSONResponse
from models.user import DBUser
from schemas.user import (UserCreate, UserUpdateActivate, UserUpdatePassword,
UserUpdateSuperuser, UserView)
from sqlalchemy.orm import Session
from .deps import get_current_active_superuser, get_current_active_user, get_db
router = APIRouter(
prefix='/users',
tags=['users']
)
@router.get('/', response_model=List[UserView])
def get_all_users(skip: int = 0, limit: int = 100,
db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Retrieve users.
"""
return DBUsers.get_users(db, skip=skip, limit=limit)
@router.get("/me", response_model=UserView)
def get_user(db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Retrieve my user.
"""
return current_user
@router.get("/{user_id}", response_model=UserView)
def get_user(user_id: int, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Retrieve a user (only itself if not enough privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user == current_user:
return db_user
if not current_user.is_superuser:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="The user does not have enough privileges"
)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
return db_user
@router.post("/{user_id}/reset_password", response_model=UserView)
def update_user_password_reset(
user_id: int, user_passwords: UserUpdatePassword, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user password (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_password(
db=db, user_id=user_id, new_password=user_passwords.new_password)
return db_user
@router.post("/{user_id}/activate", response_model=UserView)
def update_user_activate(
user_id: int, user_activate: UserUpdateActivate, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user activation (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_activate(
db=db, user_id=user_id, activate=user_activate.activate)
return db_user
@router.post("/{user_id}/superuser", response_model=UserView)
def update_user_activate(
user_id: int, user_superuser: UserUpdateSuperuser, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Update any user privileges (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.update_user_superuser(
db=db, user_id=user_id, superuser=user_superuser.superuser)
return db_user
@router.post("/{user_id}/password", response_model=UserView)
def update_user_password(
user_id: int, user_passwords: UserUpdatePassword, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_user)):
"""
Update personal user password (require previous password).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
if db_user != current_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Can only update its own password"
)
if user_passwords.old_password == user_passwords.new_password:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="New password cannot be the same as the old one")
if not verify_password(user_passwords.old_password, db_user.password):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Incorrect old password")
DBUsers.update_user_password(
db=db, user_id=user_id, new_password=user_passwords.new_password)
return db_user
@router.post("/", response_model=UserView)
def create_user(user: UserCreate, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Create a user.
"""
db_user = DBUsers.get_user_by_email(db, email=user.email)
if db_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered")
return DBUsers.create_user(db=db, user=user)
@router.delete("/{user_id}", response_class=JSONResponse)
def delete_user(user_id: int, db: Session = Depends(get_db),
current_user: DBUser = Depends(get_current_active_superuser)):
"""
Delete a user (require superuser privileges).
"""
db_user = DBUsers.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
DBUsers.delete_user(db=db, user_id=user_id)
return JSONResponse(content={"status": "ok", "user_id": user_id})
| 36.402439
| 109
| 0.701675
| 786
| 5,970
| 5.043257
| 0.132316
| 0.05449
| 0.030272
| 0.036327
| 0.658426
| 0.640767
| 0.614026
| 0.591322
| 0.566095
| 0.566095
| 0
| 0.007781
| 0.203518
| 5,970
| 163
| 110
| 36.625767
| 0.825868
| 0.064322
| 0
| 0.486486
| 0
| 0
| 0.070092
| 0.004587
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0.153153
| 0.09009
| 0
| 0.261261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29faa4ea69ec98280ad24b2003914856eee015a8
| 12,800
|
py
|
Python
|
governor/postgresql.py
|
billcap/governor
|
0056ec15d973d24f36688783b415fe894ca94db7
|
[
"MIT"
] | null | null | null |
governor/postgresql.py
|
billcap/governor
|
0056ec15d973d24f36688783b415fe894ca94db7
|
[
"MIT"
] | null | null | null |
governor/postgresql.py
|
billcap/governor
|
0056ec15d973d24f36688783b415fe894ca94db7
|
[
"MIT"
] | null | null | null |
import logging
import os
import psycopg2
import time
import shlex
import subprocess
import shutil
import threading
from urllib.parse import urlparse
logger = logging.getLogger(__name__)
class Postgresql:
CONN_OPTIONS = {
'connect_timeout': 3,
'options': '-c statement_timeout=2000',
}
_conn = None
_cursor_holder = None
def __init__(self, config, psql_config):
self.config = config
self.psql_config = psql_config
self.name = config.name
self.listen_addresses, self.port = config.listen_address.split(':')
self.data_dir = config.data_dir
self.recovery_conf = os.path.join(self.data_dir, 'recovery.conf')
self.pid_path = os.path.join(self.data_dir, 'postmaster.pid')
self._pg_ctl = ('pg_ctl', '-w', '-D', self.data_dir)
self.members = set() # list of already existing replication slots
self.promoted = False
def parseurl(self, url):
r = urlparse('postgres://' + url)
options = {
'host': r.hostname,
'port': r.port or 5432,
'user': self.config.repl_user,
'password': self.config.repl_password,
'database': self.config.dbname,
'fallback_application_name': 'Governor',
}
options.update(self.CONN_OPTIONS)
return options
def pg_ctl(self, *args, **kwargs):
cmd = self._pg_ctl + args
logger.info(cmd)
return subprocess.call(cmd, **kwargs)
def connection(self):
if not self._conn or self._conn.closed:
self._conn = psycopg2.connect(
dbname=self.config.dbname,
port=self.port,
user=self.config.user,
password=self.config.password,
**self.CONN_OPTIONS
)
self._conn.autocommit = True
return self._conn
def _cursor(self):
if not self._cursor_holder or self._cursor_holder.closed:
self._cursor_holder = self.connection().cursor()
return self._cursor_holder
def disconnect(self):
if self._conn:
self._conn.close()
self._conn = self._cursor_holder = None
def query(self, sql, *params):
max_attempts = 3
for i in range(max_attempts):
ex = None
try:
cursor = self._cursor()
cursor.execute(sql, params)
return cursor
except psycopg2.InterfaceError as e:
ex = e
except psycopg2.OperationalError as e:
if self._conn and self._conn.closed == 0:
raise e
ex = e
self.disconnect()
time.sleep(5)
if ex:
raise ex
def data_directory_empty(self):
return not (os.path.exists(self.data_dir) and os.listdir(self.data_dir))
def initialize(self):
if subprocess.call(['initdb', '-D', self.data_dir, '--encoding', 'UTF-8']) == 0:
self.write_pg_hba()
return True
return False
def sync_from_leader(self, leader):
r = self.parseurl(leader.value)
env = os.environ.copy()
if r['password'] is not None:
pgpass = os.path.join(os.environ['ROOT'], 'pgpass')
with open(pgpass, 'w') as f:
os.fchmod(f.fileno(), 0o600)
f.write('{host}:{port}:*:{user}:{password}\n'.format(**r))
env['PGPASSFILE'] = pgpass
try:
subprocess.check_call([
'pg_basebackup', '-R', '-P', '-w',
'-D', self.data_dir,
'--host', r['host'],
'--port', str(r['port']),
'-U', self.config.repl_user,
], env=env)
except subprocess.CalledProcessError:
return False
finally:
os.chmod(self.data_dir, 0o700)
return True
def is_leader(self):
is_leader = not self.query('SELECT pg_is_in_recovery()').fetchone()[0]
if is_leader:
self.promoted = False
return is_leader
def is_running(self):
return self.pg_ctl('status', stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0
def start_threaded(self):
logger = logging.getLogger('postgres')
cmd = [
'postgres', '-i',
'-p', self.port,
'-h', self.listen_addresses,
'-D', self.data_dir,
] + self.psql_config
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
while True:
line = proc.stdout.readline()
if not line:
break
logging.info(line)
def start(self):
if self.is_running():
self.load_replication_slots()
logger.error('Cannot start PostgreSQL because one is already running.')
return False
if os.path.exists(self.pid_path):
os.remove(self.pid_path)
logger.info('Removed %s', self.pid_path)
self.disconnect()
thread = threading.Thread(target=self.start_threaded)
thread.daemon = True
thread.start()
return True
def stop(self):
self.disconnect()
return self.pg_ctl('stop', '-m', 'fast') != 0
def reload(self):
return self.pg_ctl('reload') == 0
def restart(self):
self.disconnect()
return self.pg_ctl('restart', '-m', 'fast') == 0
def is_healthy(self):
if not self.is_running():
logger.warning('Postgresql is not running.')
return False
return True
def is_healthiest_node(self, cluster):
if self.is_leader():
return True
if int(cluster.optime.value) - self.xlog_position() > self.config.maximum_lag:
return False
for name, m in cluster.members.items():
if name == self.name:
continue
try:
member_conn = psycopg2.connect(**self.parseurl(m.value))
member_conn.autocommit = True
member_cursor = member_conn.cursor()
member_cursor.execute(
"SELECT pg_is_in_recovery(), %s - (pg_last_xlog_replay_location() - '0/0000000'::pg_lsn)",
(self.xlog_position(), ))
row = member_cursor.fetchone()
member_cursor.close()
member_conn.close()
logger.error([self.name, name, row])
if not row[0] or row[1] < 0:
return False
except psycopg2.Error:
continue
return True
def write_pg_hba(self):
if self.config.password:
method = 'md5'
else:
logger.warning('No password specified')
method = 'trust'
hba = ['local all all trust']
for subnet in self.config.allow_address.split():
hba.append(' '.join(['host', self.config.dbname, self.config.user, subnet, method]))
if self.config.repl_password:
method = 'md5'
else:
logger.warning('No replication password specified')
method = 'trust'
for subnet in self.config.repl_allow_address.split():
hba.append(' '.join(['host', 'replication', self.config.repl_user, subnet, method]))
config = ConfigFile(os.path.join(self.data_dir, 'pg_hba.conf'))
config.write_config(*hba)
def primary_conninfo(self, leader_url):
r = self.parseurl(leader_url)
values = ['{}={}'.format(k, r[k]) for k in ['user', 'host', 'port']]
if r['password'] is not None:
values.append('password={}'.format(r['password']))
return '{} sslmode=prefer sslcompression=1'.format(' '.join(values))
def check_recovery_conf(self, leader):
if not os.path.isfile(self.recovery_conf):
return False
pattern = (leader and self.primary_conninfo(leader.value))
for key, value in RecoveryConf(self.recovery_conf).load_config():
if key == 'primary_conninfo':
if not pattern:
return False
return value[1:-1] == pattern
return not pattern
def write_recovery_conf(self, leader):
contents = [
('standby_mode', 'on'),
('recovery_target_timeline', 'latest'),
]
if leader:
contents.append(('primary_slot_name', self.name))
contents.append(('primary_conninfo', self.primary_conninfo(leader.value)))
config = RecoveryConf(self.recovery_conf)
config.write_config(*contents, truncate = not leader)
def follow_the_leader(self, leader):
if not self.check_recovery_conf(leader):
self.write_recovery_conf(leader)
self.restart()
def promote(self):
self.promoted = (self.pg_ctl('promote') == 0)
return self.promoted
def create_users(self):
op = ('ALTER' if self.config.user == 'postgres' else 'CREATE')
query = '{} USER "{}" WITH {}'.format
# normal client user
self.create_user(query(op, self.config.user, 'SUPERUSER'), self.config.password)
# replication user
self.create_user(query('CREATE', self.config.repl_user, 'REPLICATION'), self.config.repl_password)
def create_user(self, query, password):
if password:
return self.query(query + ' ENCRYPTED PASSWORD %s', password)
return self.query(query)
def xlog_position(self):
return self.query("""SELECT CASE WHEN pg_is_in_recovery()
THEN pg_last_xlog_replay_location() - '0/0000000'::pg_lsn
ELSE pg_current_xlog_location() - '0/00000'::pg_lsn END""").fetchone()[0]
def load_replication_slots(self):
cursor = self.query("SELECT slot_name FROM pg_replication_slots WHERE slot_type='physical'")
self.members = set(r[0] for r in cursor)
def sync_replication_slots(self, members):
members = set(name for name in members if name != self.name)
# drop unused slots
for slot in self.members - members:
self.query("""SELECT pg_drop_replication_slot(%s)
WHERE EXISTS(SELECT 1 FROM pg_replication_slots
WHERE slot_name = %s)""", slot, slot)
# create new slots
for slot in members - self.members:
self.query("""SELECT pg_create_physical_replication_slot(%s)
WHERE NOT EXISTS (SELECT 1 FROM pg_replication_slots
WHERE slot_name = %s)""", slot, slot)
self.members = members
def create_replication_slots(self, cluster):
self.sync_replication_slots([name for name in cluster.members if name != self.name])
def drop_replication_slots(self):
self.sync_replication_slots([])
def last_operation(self):
return self.xlog_position()
class ConfigFile:
__slots__ = ('path',)
def __init__(self, path):
self.path = path
backup = self.path + '.backup'
if not os.path.exists(backup):
if os.path.exists(self.path):
os.rename(self.path, backup)
else:
with open(backup, 'w'): pass
def reload_backup(self):
shutil.copy(self.path + '.backup', self.path)
def load_config(self):
with open(self.path) as file:
for line in file:
if not line.startswith('#'):
yield line
def write_config(self, *lines, reload=True, check_duplicates=True, truncate=False):
if reload:
self.reload_backup()
if check_duplicates:
config = set(self.load_config())
else:
config = ()
mode = ('w' if truncate else 'a')
with open(self.path, mode) as file:
for l in lines:
if l not in config:
file.write('\n' + l)
file.write('\n')
class RecoveryConf(ConfigFile):
def load_config(self):
for line in super().load_config():
k, _, v = line.strip().partition(' = ')
yield (k, v)
def write_config(self, *args, reload=True, check_duplicates=True, **kwargs):
if reload:
self.reload_backup()
if check_duplicates:
config = set(i[0] for i in self.load_config())
else:
config = ()
args = ("{} = '{}'".format(k, v) for k, v in args if k not in config)
return super().write_config(*args, reload=False, check_duplicates=False, **kwargs)
| 33.952255
| 114
| 0.563984
| 1,485
| 12,800
| 4.701684
| 0.186532
| 0.03151
| 0.01733
| 0.006875
| 0.171584
| 0.097393
| 0.070753
| 0.041249
| 0.041249
| 0.03065
| 0
| 0.007933
| 0.320469
| 12,800
| 376
| 115
| 34.042553
| 0.79478
| 0.008828
| 0
| 0.168285
| 0
| 0
| 0.126962
| 0.020503
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126214
| false
| 0.061489
| 0.029126
| 0.016181
| 0.288026
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
29fda9d9b2256b8b4efc118aa8ea61e7cbc1a09c
| 264
|
py
|
Python
|
thirdparty/flask/template/macro_demo.py
|
gwaysoft/python
|
a74a0b553dfca9606083a41ab6d03801e67d2467
|
[
"Apache-2.0"
] | null | null | null |
thirdparty/flask/template/macro_demo.py
|
gwaysoft/python
|
a74a0b553dfca9606083a41ab6d03801e67d2467
|
[
"Apache-2.0"
] | null | null | null |
thirdparty/flask/template/macro_demo.py
|
gwaysoft/python
|
a74a0b553dfca9606083a41ab6d03801e67d2467
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
return render_template("macro.html", type="text", value="from endpoint")
if __name__ == '__main__':
print(app.url_map)
app.run(debug=True, host="0.0.0.0")
| 18.857143
| 76
| 0.674242
| 39
| 264
| 4.179487
| 0.692308
| 0.03681
| 0.03681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0.151515
| 264
| 13
| 77
| 20.307692
| 0.709821
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0.125
| 0.375
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
29fded4c87d470f4257846244ccbee2b48588393
| 8,956
|
py
|
Python
|
backend/account/migrations/0001_initial.py
|
CS178A-B/final-project-bjls
|
aebb8042f2d958caac00e31b27b445b9079901d0
|
[
"MIT"
] | null | null | null |
backend/account/migrations/0001_initial.py
|
CS178A-B/final-project-bjls
|
aebb8042f2d958caac00e31b27b445b9079901d0
|
[
"MIT"
] | 20
|
2020-10-21T19:16:15.000Z
|
2021-09-03T05:48:20.000Z
|
backend/account/migrations/0001_initial.py
|
CS178A-B/R-Finder
|
aebb8042f2d958caac00e31b27b445b9079901d0
|
[
"MIT"
] | 1
|
2020-10-22T04:49:45.000Z
|
2020-10-22T04:49:45.000Z
|
# Generated by Django 2.2.13 on 2021-03-10 21:33
import account.models
import datetime
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_student', models.BooleanField(default=False, verbose_name=account.models.Student)),
('is_faculty', models.BooleanField(default=False, verbose_name=account.models.Faculty)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.CharField(max_length=150)),
('abbrev', models.CharField(max_length=50)),
('grade', models.CharField(blank=True, default='', max_length=3, null=True)),
],
),
migrations.CreateModel(
name='Faculty',
fields=[
('department', models.CharField(default='', max_length=50)),
('profile_completeness', models.IntegerField(default=0)),
('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Student',
fields=[
('major', models.CharField(default='', max_length=50)),
('GPA', models.FloatField(blank=True, default=0, null=True)),
('profile_completeness', models.IntegerField(default=0)),
('resume_pdf', models.FileField(blank=True, null=True, upload_to='pdf')),
('transcript', models.FileField(blank=True, null=True, upload_to='pdf')),
('user', models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=150)),
('posted_date', models.DateField(verbose_name=datetime.date(2021, 3, 10))),
('hourly_salary', models.FloatField(blank=True, default=10, max_length=10)),
('hours_per_week', models.IntegerField(default=10)),
('course_req', models.ManyToManyField(blank=True, default=0, to='account.Course')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.CharField(max_length=1500)),
('course', models.ManyToManyField(blank=True, default=0, to='account.Course')),
],
),
migrations.CreateModel(
name='Application',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('application_date', models.DateField(verbose_name=datetime.date(2021, 3, 10))),
('applicant_score', models.IntegerField(default=0)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Job')),
],
),
migrations.CreateModel(
name='StudentCourse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('grade', models.CharField(default='', max_length=50)),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Course')),
('student', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Student')),
],
),
migrations.AddField(
model_name='student',
name='applications',
field=models.ManyToManyField(blank=True, default=0, through='account.Application', to='account.Job'),
),
migrations.AddField(
model_name='student',
name='comments_recv',
field=models.ManyToManyField(blank=True, default=0, to='account.Comment'),
),
migrations.AddField(
model_name='student',
name='course_taken',
field=models.ManyToManyField(blank=True, default=0, through='account.StudentCourse', to='account.Course'),
),
migrations.AddField(
model_name='job',
name='applications',
field=models.ManyToManyField(blank=True, default=0, through='account.Application', to='account.Student'),
),
migrations.AddField(
model_name='job',
name='poster',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Faculty'),
),
migrations.AddField(
model_name='faculty',
name='comments_made',
field=models.ManyToManyField(blank=True, default=0, to='account.Comment'),
),
migrations.AddField(
model_name='faculty',
name='courses_taught',
field=models.ManyToManyField(blank=True, default=0, to='account.Course'),
),
migrations.AddField(
model_name='course',
name='students',
field=models.ManyToManyField(blank=True, default=0, through='account.StudentCourse', to='account.Student'),
),
migrations.AddField(
model_name='comment',
name='commenter',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='account.Faculty'),
),
migrations.AddField(
model_name='application',
name='student',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.Student'),
),
]
| 50.03352
| 329
| 0.61054
| 924
| 8,956
| 5.786797
| 0.214286
| 0.049373
| 0.035908
| 0.061717
| 0.602581
| 0.580886
| 0.500655
| 0.441743
| 0.421545
| 0.404526
| 0
| 0.013874
| 0.251563
| 8,956
| 178
| 330
| 50.314607
| 0.783828
| 0.005136
| 0
| 0.490566
| 1
| 0
| 0.186932
| 0.007971
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.006289
| 0.050314
| 0
| 0.075472
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b065798f8f3175be2995f3dc86fae9e7dc987b7
| 1,249
|
py
|
Python
|
tests/ozpcenter_model_access/test_contact_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2018-10-05T17:03:01.000Z
|
2018-10-05T17:03:01.000Z
|
tests/ozpcenter_model_access/test_contact_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2017-01-06T19:20:32.000Z
|
2017-01-06T19:20:32.000Z
|
tests/ozpcenter_model_access/test_contact_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 7
|
2016-12-16T15:42:05.000Z
|
2020-09-05T01:11:27.000Z
|
import pytest
from django.test import TestCase
from django.test import override_settings
import ozpcenter.api.contact_type.model_access as model_access
from ozpcenter.models import ContactType
from tests.cases.factories import ContactTypeFactory
@pytest.mark.model_access
@override_settings(ES_ENABLED=False)
class ContactTypeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.contact_types = ContactTypeFactory.create_batch(5)
def setUp(self):
pass
def test__get_all_contact_types(self):
results = list(model_access.get_all_contact_types().order_by("id"))
self.assertListEqual(results, self.contact_types)
def test__get_contact_type_by_name(self):
expected = self.contact_types[0]
result = model_access.get_contact_type_by_name(expected.name)
self.assertEqual(result, expected)
def test__get_contact_type_by_name__not_found(self):
contact_type = model_access.get_contact_type_by_name('Not Existent', False)
self.assertIsNone(contact_type)
def test__get_contact_type_by_name__not_found_raises_error(self):
with self.assertRaises(ContactType.DoesNotExist):
model_access.get_contact_type_by_name('Not Existent')
| 30.463415
| 83
| 0.767814
| 164
| 1,249
| 5.457317
| 0.365854
| 0.110615
| 0.093855
| 0.107263
| 0.236872
| 0.236872
| 0.236872
| 0.172067
| 0.172067
| 0
| 0
| 0.001905
| 0.159327
| 1,249
| 40
| 84
| 31.225
| 0.850476
| 0
| 0
| 0
| 0
| 0
| 0.020817
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.222222
| false
| 0.037037
| 0.222222
| 0
| 0.481481
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b096109d1a756991d2981702ec6615bda617d75
| 3,314
|
py
|
Python
|
emoji-list.unicode.crawler.py
|
SHITianhao/emoji-dataset
|
41812649f518f69472722c56d4aa77faeb9bbe8a
|
[
"MIT"
] | 2
|
2017-12-19T06:44:59.000Z
|
2020-01-17T20:06:53.000Z
|
emoji-list.unicode.crawler.py
|
SHITianhao/emoji-dataset
|
41812649f518f69472722c56d4aa77faeb9bbe8a
|
[
"MIT"
] | null | null | null |
emoji-list.unicode.crawler.py
|
SHITianhao/emoji-dataset
|
41812649f518f69472722c56d4aa77faeb9bbe8a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This module is used to crawler emoji unicode from http://www.unicode.org/ """
import urllib
import json
import base64
import os
from bs4 import BeautifulSoup
__EMOJI_V4_URL = "http://www.unicode.org/emoji/charts/emoji-list.html"
__EMOJI_V5_URL = "http://www.unicode.org/emoji/charts-beta/emoji-list.html"
__IMG_FOLDER_NAME = "emoji_imgs"
emoji_file = file("emoji_inverse.json", "r")
emojis = json.loads(emoji_file.read().decode("utf-8-sig"))
print "emoji_inverse.json loaded"
def decode_base64(data):
"""Decode base64, padding being optional.
:param data: Base64 data as an ASCII byte string
:returns: The decoded byte string.
"""
missing_padding = 4 - len(data) % 4
if missing_padding:
data += b'=' * missing_padding
return base64.decodestring(data)
def unicodes_str_to_emoji(unicodes):
if isinstance(unicodes, unicode):
unicodes = unicodes.encode("utf8")
else:
print "not a string"
return
list_unicode = unicodes.split(' ')
emoji = ''
for code in list_unicode:
code = code[2:]
pending_size = 8 - len(code)
for _ in range(pending_size):
code = '0' + code
code = '\U' + code
emoji += code
return unicode(emoji, "unicode_escape").encode("utf8")
def crawler_emojis(version):
print "get version: " + version
# create folder
dir_path = __IMG_FOLDER_NAME + '_' + version
if not os.path.exists(dir_path):
os.makedirs(dir_path)
print "folder created"
URL = ''
if version == 'V4':
URL = __EMOJI_V4_URL
elif version == 'V5':
URL = __EMOJI_V5_URL
__PAGE = urllib.urlopen(__EMOJI_V4_URL)
__HTML = __PAGE.read()
__PAGE.close()
__SOUP = BeautifulSoup(__HTML, 'html.parser')
print "Get Page"
_code_list = []
_img_list = []
_name_list = []
for td in __SOUP.find_all("td"):
_class_name = td.get("class")[0]
if _class_name == "code":
_code_list.append(td.a.get_text())
elif _class_name == "andr":
_img_list.append(td.a.img.get("src"))
elif _class_name == "name":
_name_list.append(td.get_text())
_json_list = []
for i in range(len(_code_list)):
# encode img
img_base64 = _img_list[i]
img_data = decode_base64(img_base64[21:])
code = _code_list[i]
emoji = unicodes_str_to_emoji(code)
name_to_save = code + ".png"
# save img to disk
with open(dir_path + "/" + name_to_save, "wb") as f:
f.write(img_data)
f.close()
# write data in json form
if emoji.decode('utf-8') in emojis:
name = emojis[emoji.decode('utf-8')]
else:
name = ''
data = {
"unicode": code,
"name": name,
"description": _name_list[i].encode('utf-8'),
"img": name_to_save,
"emoji": emoji
}
_json_list.append(data)
data_file_name = version + '_data.json'
with open(data_file_name, 'w') as outfile:
json.dump(_json_list, outfile, indent=4, sort_keys=True, ensure_ascii=False)
print "Done version " + version + "\n"
crawler_emojis('V4')
crawler_emojis('V5')
| 28.568966
| 84
| 0.60169
| 436
| 3,314
| 4.284404
| 0.302752
| 0.010707
| 0.022484
| 0.027302
| 0.033191
| 0.033191
| 0.033191
| 0
| 0
| 0
| 0
| 0.017348
| 0.269463
| 3,314
| 115
| 85
| 28.817391
| 0.754234
| 0.032589
| 0
| 0.022989
| 0
| 0
| 0.122189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.057471
| null | null | 0.068966
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b13fbf54481cade8e8734d48b08412beb1ed9cd
| 4,009
|
py
|
Python
|
tests/io/export/voc/test_create_annotation.py
|
wbknez/breakdb
|
f783820425c8cb70d8caedc6f5839a72de7c945e
|
[
"Apache-2.0"
] | 1
|
2020-02-03T18:31:20.000Z
|
2020-02-03T18:31:20.000Z
|
tests/io/export/voc/test_create_annotation.py
|
wbknez/breakdb
|
f783820425c8cb70d8caedc6f5839a72de7c945e
|
[
"Apache-2.0"
] | null | null | null |
tests/io/export/voc/test_create_annotation.py
|
wbknez/breakdb
|
f783820425c8cb70d8caedc6f5839a72de7c945e
|
[
"Apache-2.0"
] | null | null | null |
"""
Contains unit tests to ensure single database items are created correctly in a
Pascal VOC compatible format.
"""
import os
from xml.etree.ElementTree import Element, SubElement
import numpy as np
from breakdb.io.export.voc import create_annotation
from tests.helpers.dataset import create_random_string
from tests.helpers.xml import match
class TestCreateAnnotation:
"""
Test suite for :function: 'create_annotation'.
"""
def test_create_annotation_does_not_create_annotation_if_empty(self):
width = np.random.randint(100, 1920)
height = np.random.randint(100, 1200)
depth = np.random.choice([1, 3], 1)[0]
x = np.random.randint(0, width, 5)
y = np.random.randint(0, height, 5)
random_paths = [create_random_string(10) for _ in range(5)]
file_path = os.path.join(*random_paths) + ".png"
xml = create_annotation(file_path, width, height, depth, [])
expected = Element("annotation")
folder = SubElement(expected, 'folder')
filename = SubElement(expected, 'filename')
path = SubElement(expected, 'path')
source = SubElement(expected, 'source')
size = SubElement(expected, 'size')
segmented = SubElement(expected, 'segmented')
database = SubElement(source, 'database')
width_tag = SubElement(size, 'width')
height_tag = SubElement(size, 'height')
depth_tag = SubElement(size, 'depth')
folder.text = os.path.basename(os.path.dirname(file_path))
filename.text = os.path.basename(file_path)
path.text = file_path
segmented.text = "0"
database.text = "Unknown"
width_tag.text = str(width)
height_tag.text = str(height)
depth_tag.text = str(depth)
match(xml, expected)
def test_create_annotation_creates_well_formed_xml(self):
width = np.random.randint(100, 1920)
height = np.random.randint(100, 1200)
depth = np.random.choice([1, 3], 1)[0]
x = np.random.randint(0, width, 5)
y = np.random.randint(0, height, 5)
coords = [coord for coords in zip(x, y) for coord in coords]
random_paths = [create_random_string(10) for _ in range(5)]
file_path = os.path.join(*random_paths) + ".png"
xml = create_annotation(file_path, width, height, depth, [coords])
expected = Element("annotation")
folder = SubElement(expected, 'folder')
filename = SubElement(expected, 'filename')
path = SubElement(expected, 'path')
source = SubElement(expected, 'source')
size = SubElement(expected, 'size')
segmented = SubElement(expected, 'segmented')
obj = SubElement(expected, 'object')
database = SubElement(source, 'database')
width_tag = SubElement(size, 'width')
height_tag = SubElement(size, 'height')
depth_tag = SubElement(size, 'depth')
name = SubElement(obj, "name")
pose = SubElement(obj, "pose")
truncated = SubElement(obj, "truncated")
difficult = SubElement(obj, "difficult")
bndbox = SubElement(obj, "bndbox")
x_min = SubElement(bndbox, "xmin")
y_min = SubElement(bndbox, "ymin")
x_max = SubElement(bndbox, "xmax")
y_max = SubElement(bndbox, "ymax")
folder.text = os.path.basename(os.path.dirname(file_path))
filename.text = os.path.basename(file_path)
path.text = file_path
segmented.text = "0"
database.text = "Unknown"
width_tag.text = str(width)
height_tag.text = str(height)
depth_tag.text = str(depth)
name.text = f"{os.path.basename(os.path.splitext(file_path)[0])}-1"
pose.text = "Unspecified"
truncated.text = "0"
difficult.text = "0"
x_min.text = str(np.min(x))
y_min.text = str(np.min(y))
x_max.text = str(np.max(x))
y_max.text = str(np.max(y))
match(xml, expected)
| 32.860656
| 78
| 0.626091
| 489
| 4,009
| 5.00818
| 0.214724
| 0.095549
| 0.049
| 0.0294
| 0.651695
| 0.619028
| 0.619028
| 0.619028
| 0.619028
| 0.619028
| 0
| 0.018629
| 0.250187
| 4,009
| 121
| 79
| 33.132231
| 0.796075
| 0.038663
| 0
| 0.642857
| 0
| 0
| 0.074393
| 0.013573
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.071429
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b17b051f3187df2daa4e97e42b6ba22e41b2320
| 322
|
py
|
Python
|
base/models/provider.py
|
musicmash/notify
|
0f1c72207979e812c6485238da32ca7f5b463859
|
[
"MIT"
] | null | null | null |
base/models/provider.py
|
musicmash/notify
|
0f1c72207979e812c6485238da32ca7f5b463859
|
[
"MIT"
] | 86
|
2020-07-13T11:14:24.000Z
|
2022-03-25T01:10:30.000Z
|
base/models/provider.py
|
musicmash/notify
|
0f1c72207979e812c6485238da32ca7f5b463859
|
[
"MIT"
] | null | null | null |
from django.db import models
from .base import BaseModel
class Provider(BaseModel):
name = models.CharField(max_length=50, primary_key=True)
class Meta:
db_table = "providers"
verbose_name = "Provider"
verbose_name_plural = "Providers"
def __str__(self):
return self.name
| 18.941176
| 60
| 0.673913
| 39
| 322
| 5.307692
| 0.666667
| 0.10628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00823
| 0.245342
| 322
| 16
| 61
| 20.125
| 0.843621
| 0
| 0
| 0
| 0
| 0
| 0.080745
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.2
| 0.1
| 0.7
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
4b1fa47c925f46978fe64a19c7b80b111b447a75
| 2,798
|
py
|
Python
|
gopredict/modelo.py
|
ajalba/gopredict
|
bfcb1c4c10b6787da10c7515ae2adf65252bb8c6
|
[
"MIT"
] | null | null | null |
gopredict/modelo.py
|
ajalba/gopredict
|
bfcb1c4c10b6787da10c7515ae2adf65252bb8c6
|
[
"MIT"
] | 39
|
2021-10-31T16:51:39.000Z
|
2021-11-22T09:56:04.000Z
|
gopredict/modelo.py
|
ajalba/gopredict
|
bfcb1c4c10b6787da10c7515ae2adf65252bb8c6
|
[
"MIT"
] | null | null | null |
"""
Clase para representar a los diferentes modelos y su comportamiento
atributos(de momento)
df=dataframe de entrenamiento proviniente del conjunto de datos de entrenamiento del usuario
x_train,x_test,y_train,y_test, particiones de df para entrenar el modelo
El resto de métodos son autoexplicativos
"""
from numpy import array
from pandas.core.frame import DataFrame
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn import metrics
class Modelo:
#Inicializa un modelo tomando sus datos
def __init__(self,data):
self.df = data
self.X_train = None
self.X_test = None
self.y_train = None
self.y_test = None
self.y_pred = None
self.modelo=LogisticRegression()
# Devuelve una particion del dataframe
def realizar_particion(self,cols_atributos:array):
aux = self.df.copy(deep=True)
return aux[cols_atributos]
#Realiza una particion en train y test
def particion_train_test(self,X:DataFrame, y:DataFrame, test_porcentaje:int):
try:
self.X_train,self.X_test,self.y_train,self.y_test=train_test_split(
X,y,test_size=test_porcentaje,random_state=0)
return True
except:
return False
#Entrena el modelo con los datos de entrenamiento
def entrenar(self):
try:
self.modelo.fit(self.X_train, self.y_train)
return True
except Exception as e:
print(e)
return False
#Realiza una prediccion sobre el conjunto de entrenamiento
def predecir_entrenamiento(self):
try:
self.y_pred = self.modelo.predict(self.X_test)
return True
except:
return False
#devuelve las métricas de rendimiento del modelo en entrenamiento
def get_metricas_rendimiento(self):
accuracy = metrics.accuracy_score(self.y_test, self.y_pred)
precision = metrics.precision_score(self.y_test, self.y_pred, zero_division=0)
recall = metrics.recall_score(self.y_test, self.y_pred)
f1 = metrics.f1_score(self.y_test, self.y_pred)
return [accuracy,precision,recall,f1]
#Devuelve las métricas para la matriz de confusion
def get_metricas_matriz_confusion(self):
return metrics.confusion_matrix(self.y_test,self.y_pred)
def get_metricas_roc(self):
y_pred_proba = self.modelo.predict_proba(self.X_test)[::,1]
fpr, tpr, _ = metrics.roc_curve(self.y_test, y_pred_proba)
fpr, tpr, _ = metrics.roc_curve(self.y_test, y_pred_proba)
roc_data = pd.DataFrame([])
roc_data['True Positive'] = tpr
roc_data['False Positive'] = fpr
return roc_data
| 36.337662
| 92
| 0.686919
| 387
| 2,798
| 4.764858
| 0.29199
| 0.05423
| 0.043926
| 0.035249
| 0.132321
| 0.103037
| 0.093275
| 0.043384
| 0.043384
| 0.043384
| 0
| 0.002824
| 0.240529
| 2,798
| 76
| 93
| 36.815789
| 0.864941
| 0.225161
| 0
| 0.240741
| 0
| 0
| 0.012535
| 0
| 0
| 0
| 0
| 0.013158
| 0
| 1
| 0.148148
| false
| 0
| 0.111111
| 0.018519
| 0.462963
| 0.018519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
4b26f2f9d05f6e347a28ccd82f8bc4ee81785946
| 808
|
py
|
Python
|
essEcommerce/views.py
|
AymanTareq/cit_ecommerce
|
7a000f9f9ed76af99ec3c5a5faa1dbde8b988370
|
[
"CC0-1.0"
] | null | null | null |
essEcommerce/views.py
|
AymanTareq/cit_ecommerce
|
7a000f9f9ed76af99ec3c5a5faa1dbde8b988370
|
[
"CC0-1.0"
] | null | null | null |
essEcommerce/views.py
|
AymanTareq/cit_ecommerce
|
7a000f9f9ed76af99ec3c5a5faa1dbde8b988370
|
[
"CC0-1.0"
] | null | null | null |
from django.shortcuts import render
from .models import *
def all_product(request):
products = Product.objects.all()
context = {
'products':products,
}
return render(request, 'essEcommerce/all_product.html', context)
def cart(request):
if request.user.is_authenticated:
customer = request.user.customer
order , create = Order.objects.get_or_create(customer=customer, status=False)
items = order.orderitem_set.all()
else:
items = []
order = {
'get_cart_total':0,
'get_cart_total_price':0
}
context = {
'order':order,
'items':items,
}
return render(request, 'essEcommerce/cart.html', context)
def check_out(request):
return render(request, 'essEcommerce/checkout.html')
| 26.064516
| 85
| 0.634901
| 89
| 808
| 5.629213
| 0.426966
| 0.071856
| 0.113772
| 0.185629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003306
| 0.251238
| 808
| 30
| 86
| 26.933333
| 0.824793
| 0
| 0
| 0.076923
| 0
| 0
| 0.159653
| 0.095297
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0
| 0.076923
| 0.038462
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.