hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f179aeade8cd665162c97b36f563c6fb746edcec
| 2,125 |
py
|
Python
|
tests/unit/resolution_resolvelib/conftest.py
|
PreVeil/pip
|
286cb388bfbc7df6b4b05277c85f6e49e3e291d3
|
[
"MIT"
] | 1 |
2020-04-21T08:58:10.000Z
|
2020-04-21T08:58:10.000Z
|
tests/unit/resolution_resolvelib/conftest.py
|
PreVeil/pip
|
286cb388bfbc7df6b4b05277c85f6e49e3e291d3
|
[
"MIT"
] | null | null | null |
tests/unit/resolution_resolvelib/conftest.py
|
PreVeil/pip
|
286cb388bfbc7df6b4b05277c85f6e49e3e291d3
|
[
"MIT"
] | 1 |
2020-05-03T01:11:08.000Z
|
2020-05-03T01:11:08.000Z
|
import pytest
from pip._internal.cli.req_command import RequirementCommand
from pip._internal.commands.install import InstallCommand
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
# from pip._internal.models.index import PyPI
from pip._internal.models.search_scope import SearchScope
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.network.session import PipSession
from pip._internal.req.constructors import install_req_from_line
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.resolution.resolvelib.factory import Factory
from pip._internal.resolution.resolvelib.provider import PipProvider
from pip._internal.utils.temp_dir import TempDirectory, global_tempdir_manager
@pytest.fixture
def finder(data):
session = PipSession()
scope = SearchScope([str(data.packages)], [])
collector = LinkCollector(session, scope)
prefs = SelectionPreferences(allow_yanked=False)
finder = PackageFinder.create(collector, prefs)
yield finder
@pytest.fixture
def preparer(finder):
session = PipSession()
rc = InstallCommand("x", "y")
o = rc.parse_args([])
with global_tempdir_manager():
with TempDirectory() as tmp:
with get_requirement_tracker() as tracker:
preparer = RequirementCommand.make_requirement_preparer(
tmp,
options=o[0],
req_tracker=tracker,
session=session,
finder=finder,
use_user_site=False
)
yield preparer
@pytest.fixture
def factory(finder, preparer):
yield Factory(
finder=finder,
preparer=preparer,
make_install_req=install_req_from_line,
force_reinstall=False,
ignore_installed=False,
ignore_requires_python=False,
py_version_info=None,
)
@pytest.fixture
def provider(factory):
yield PipProvider(
factory=factory,
ignore_dependencies=False,
)
| 31.25 | 78 | 0.709176 |
8e3b2794bef6d625e9a6960fefc6b6e961906776
| 13,698 |
py
|
Python
|
sharpy/solvers/staticcoupledrbm.py
|
ostodieck/sharpy
|
b85aa1c001a0ec851af4eb259cce7c01dfa68b9e
|
[
"BSD-3-Clause"
] | 1 |
2020-07-27T05:15:35.000Z
|
2020-07-27T05:15:35.000Z
|
sharpy/solvers/staticcoupledrbm.py
|
briandesilva/sharpy
|
aed86428ff88fd14d36cabd91cf7e04b5fc9a39a
|
[
"BSD-3-Clause"
] | null | null | null |
sharpy/solvers/staticcoupledrbm.py
|
briandesilva/sharpy
|
aed86428ff88fd14d36cabd91cf7e04b5fc9a39a
|
[
"BSD-3-Clause"
] | null | null | null |
import ctypes as ct
import numpy as np
import sharpy.aero.utils.mapping as mapping
import sharpy.utils.cout_utils as cout
import sharpy.utils.solver_interface as solver_interface
from sharpy.utils.solver_interface import solver, BaseSolver
import sharpy.utils.settings as settings
import sharpy.utils.algebra as algebra
@solver
class StaticCoupledRBM(BaseSolver):
"""
Steady coupled solver including rigid body motions
"""
solver_id = 'StaticCoupledRBM'
solver_classification = 'coupled'
settings_types = dict()
settings_default = dict()
settings_description = dict()
settings_types['print_info'] = 'bool'
settings_default['print_info'] = True
settings_description['print_info'] = 'Output run-time information'
settings_types['structural_solver'] = 'str'
settings_default['structural_solver'] = None
settings_description['structural_solver'] = 'Name of the structural solver used in the computation'
settings_types['structural_solver_settings'] = 'dict'
settings_default['structural_solver_settings'] = None
settings_description['structural_solver_settings'] = 'Dictionary os settings needed by the structural solver'
settings_types['aero_solver'] = 'str'
settings_default['aero_solver'] = None
settings_description['aero_solver'] = 'Name of the aerodynamic solver used in the computation'
settings_types['aero_solver_settings'] = 'dict'
settings_default['aero_solver_settings'] = None
settings_description['aero_solver_settings'] = 'Dictionary os settings needed by the aerodynamic solver'
settings_types['max_iter'] = 'int'
settings_default['max_iter'] = 100
settings_description['max_iter'] = 'Maximum numeber of FSI iterations'
settings_types['n_load_steps'] = 'int'
settings_default['n_load_steps'] = 1
settings_description['n_load_steps'] = 'Number of steps to ramp up the application of loads'
settings_types['tolerance'] = 'float'
settings_default['tolerance'] = 1e-5
settings_description['tolerance'] = 'FSI tolerance'
settings_types['relaxation_factor'] = 'float'
settings_default['relaxation_factor'] = 0.
settings_description['relaxation_factor'] = 'Relaxation factor'
settings_table = settings.SettingsTable()
__doc__ += settings_table.generate(settings_types, settings_default, settings_description)
def __init__(self):
self.data = None
self.settings = None
self.structural_solver = None
self.aero_solver = None
self.previous_force = None
def initialise(self, data, input_dict=None):
self.data = data
if input_dict is None:
self.settings = data.settings[self.solver_id]
else:
self.settings = input_dict
settings.to_custom_types(self.settings, self.settings_types, self.settings_default)
self.structural_solver = solver_interface.initialise_solver(self.settings['structural_solver'])
self.structural_solver.initialise(self.data, self.settings['structural_solver_settings'])
self.aero_solver = solver_interface.initialise_solver(self.settings['aero_solver'])
self.aero_solver.initialise(self.structural_solver.data, self.settings['aero_solver_settings'])
self.data = self.aero_solver.data
# load info from dyn dictionary
self.data.structure.add_unsteady_information(self.data.structure.dyn_dict, 1)
def increase_ts(self):
self.data.ts += 1
self.structural_solver.next_step()
self.aero_solver.next_step()
def cleanup_timestep_info(self):
if max(len(self.data.aero.timestep_info), len(self.data.structure.timestep_info)) > 1:
# copy last info to first
self.data.aero.timestep_info[0] = self.data.aero.timestep_info[-1].copy()
self.data.structure.timestep_info[0] = self.data.structure.timestep_info[-1].copy()
# delete all the rest
while len(self.data.aero.timestep_info) - 1:
del self.data.aero.timestep_info[-1]
while len(self.data.structure.timestep_info) - 1:
del self.data.structure.timestep_info[-1]
self.data.ts = 0
def run(self):
# Include the rbm
# print("ts", self.data.ts)
self.data.structure.timestep_info[-1].for_vel = self.data.structure.dynamic_input[0]['for_vel']
for i_step in range(self.settings['n_load_steps'].value + 1):
if (i_step == self.settings['n_load_steps'].value and
self.settings['n_load_steps'].value > 0):
break
# load step coefficient
if not self.settings['n_load_steps'].value == 0:
load_step_multiplier = (i_step + 1.0)/self.settings['n_load_steps'].value
else:
load_step_multiplier = 1.0
# new storage every load step
if i_step > 0:
self.increase_ts()
for i_iter in range(self.settings['max_iter'].value):
if self.settings['print_info'].value:
cout.cout_wrap('i_step: %u, i_iter: %u' % (i_step, i_iter))
# run aero
self.data = self.aero_solver.run()
# map force
struct_forces = mapping.aero2struct_force_mapping(
self.data.aero.timestep_info[self.data.ts].forces,
self.data.aero.struct2aero_mapping,
self.data.aero.timestep_info[self.data.ts].zeta,
self.data.structure.timestep_info[self.data.ts].pos,
self.data.structure.timestep_info[self.data.ts].psi,
self.data.structure.node_master_elem,
self.data.structure.connectivities,
self.data.structure.timestep_info[self.data.ts].cag(),
self.data.aero.aero_dict)
if not self.settings['relaxation_factor'].value == 0.:
if i_iter == 0:
self.previous_force = struct_forces.copy()
temp = struct_forces.copy()
struct_forces = ((1.0 - self.settings['relaxation_factor'].value)*struct_forces +
self.settings['relaxation_factor'].value*self.previous_force)
self.previous_force = temp
# copy force in beam
with_gravity_setting = True
try:
old_g = self.structural_solver.settings['gravity'].value
self.structural_solver.settings['gravity'] = old_g*load_step_multiplier
except KeyError:
with_gravity_setting = False
temp1 = load_step_multiplier*(struct_forces + self.data.structure.ini_info.steady_applied_forces)
self.data.structure.timestep_info[self.data.ts].steady_applied_forces[:] = temp1
# run beam
prev_quat = self.data.structure.timestep_info[self.data.ts].quat.copy()
self.data = self.structural_solver.run()
# The following line removes the rbm
self.data.structure.timestep_info[self.data.ts].quat = prev_quat.copy()
if with_gravity_setting:
self.structural_solver.settings['gravity'] = ct.c_double(old_g)
# update grid
self.aero_solver.update_step()
# print("psi[-1]", self.data.structure.timestep_info[-1].psi[-1,1,:])
# convergence
if self.convergence(i_iter):
# create q and dqdt vectors
self.structural_solver.update(self.data.structure.timestep_info[self.data.ts])
self.data = self.aero_solver.run()
self.cleanup_timestep_info()
break
if self.settings['print_info']:
resultants = self.extract_resultants()
cout.cout_wrap('Resultant forces and moments: ' + str(resultants))
return self.data
def convergence(self, i_iter):
if i_iter == self.settings['max_iter'].value - 1:
cout.cout_wrap('StaticCoupled did not converge!', 0)
# quit(-1)
if i_iter == 0:
self.initial_pos = self.data.structure.timestep_info[self.data.ts].pos.copy()
self.initial_psi = self.data.structure.timestep_info[self.data.ts].psi.copy()
self.prev_pos = self.initial_pos.copy()
self.prev_psi = self.initial_psi.copy()
for i,j in np.ndindex(self.initial_pos.shape):
if np.abs(self.initial_pos[i,j]) < 1.:
self.initial_pos[i,j] = 1.
for i,j,k in np.ndindex(self.initial_psi.shape):
if np.abs(self.initial_psi[i,j,k]) < 1.:
self.initial_psi[i,j,k] = 1.
return False
res_pos = np.amax(np.abs((self.data.structure.timestep_info[self.data.ts].pos - self.prev_pos)/self.initial_pos))
res_psi = np.amax(np.abs((self.data.structure.timestep_info[self.data.ts].psi - self.prev_psi)/self.initial_psi))
res_pos_dot = np.amax(np.abs(self.data.structure.timestep_info[self.data.ts].pos_dot))
res_psi_dot = np.amax(np.abs(self.data.structure.timestep_info[self.data.ts].psi_dot))
self.prev_pos = self.data.structure.timestep_info[self.data.ts].pos.copy()
self.prev_psi = self.data.structure.timestep_info[self.data.ts].psi.copy()
if self.settings['print_info'].value:
cout.cout_wrap('Pos res = %8e. Psi res = %8e.' % (res_pos, res_psi), 2)
cout.cout_wrap('Pos_dot res = %8e. Psi_dot res = %8e.' % (res_pos_dot, res_psi_dot), 2)
if res_pos < self.settings['tolerance'].value:
if res_psi < self.settings['tolerance'].value:
if res_pos_dot < self.settings['tolerance'].value:
if res_psi_dot < self.settings['tolerance'].value:
return True
return False
# return_value = None
# if i_iter == 0:
# self.initial_residual = np.linalg.norm(self.data.structure.timestep_info[self.data.ts].pos)
# self.previous_residual = self.initial_residual
# self.current_residual = self.initial_residual
# return False
#
# self.current_residual = np.linalg.norm(self.data.structure.timestep_info[self.data.ts].pos)
# if self.settings['print_info'].value:
# cout.cout_wrap('Res = %8e' % (np.abs(self.current_residual - self.previous_residual)/self.previous_residual), 2)
#
# if return_value is None:
# if np.abs(self.current_residual - self.previous_residual)/self.initial_residual < self.settings['tolerance'].value:
# return_value = True
# else:
# self.previous_residual = self.current_residual
# return_value = False
#
# if return_value is None:
# return_value = False
#
# return return_value
def change_trim(self, alpha, thrust, thrust_nodes, tail_deflection, tail_cs_index):
# self.cleanup_timestep_info()
self.data.structure.timestep_info = []
self.data.structure.timestep_info.append(self.data.structure.ini_info.copy())
aero_copy = self.data.aero.timestep_info[-1]
self.data.aero.timestep_info = []
self.data.aero.timestep_info.append(aero_copy)
self.data.ts = 0
# alpha
orientation_quat = algebra.euler2quat(np.array([0.0, alpha, 0.0]))
self.data.structure.timestep_info[0].quat[:] = orientation_quat[:]
try:
self.force_orientation
except AttributeError:
self.force_orientation = np.zeros((len(thrust_nodes), 3))
for i_node, node in enumerate(thrust_nodes):
self.force_orientation[i_node, :] = (
algebra.unit_vector(self.data.structure.ini_info.steady_applied_forces[node, 0:3]))
# print(self.force_orientation)
# thrust
# thrust is scaled so that the direction of the forces is conserved
# in all nodes.
# the `thrust` parameter is the force PER node.
# if there are two or more nodes in thrust_nodes, the total forces
# is n_nodes_in_thrust_nodes*thrust
# thrust forces have to be indicated in structure.ini_info
# print(algebra.unit_vector(self.data.structure.ini_info.steady_applied_forces[0, 0:3])*thrust)
for i_node, node in enumerate(thrust_nodes):
# self.data.structure.ini_info.steady_applied_forces[i_node, 0:3] = (
# algebra.unit_vector(self.data.structure.ini_info.steady_applied_forces[i_node, 0:3])*thrust)
self.data.structure.ini_info.steady_applied_forces[node, 0:3] = (
self.force_orientation[i_node, :]*thrust)
self.data.structure.timestep_info[0].steady_applied_forces[node, 0:3] = (
self.force_orientation[i_node, :]*thrust)
# tail deflection
try:
self.data.aero.aero_dict['control_surface_deflection'][tail_cs_index] = tail_deflection
except KeyError:
raise Exception('This model has no control surfaces')
except IndexError:
raise Exception('The tail control surface index > number of surfaces')
# update grid
self.aero_solver.update_step()
def extract_resultants(self, tstep=None):
return self.structural_solver.extract_resultants(tstep)
| 45.059211 | 129 | 0.632063 |
4de6a160fd3067912c5b297d9ef663ee7e735321
| 1,081 |
py
|
Python
|
app/grandchallenge/evaluation/migrations/0030_auto_20200702_0845.py
|
njmhendrix/grand-challenge.org
|
9bc36f5e26561a78bd405e8ea5e4c0f86c95f011
|
[
"Apache-2.0"
] | null | null | null |
app/grandchallenge/evaluation/migrations/0030_auto_20200702_0845.py
|
njmhendrix/grand-challenge.org
|
9bc36f5e26561a78bd405e8ea5e4c0f86c95f011
|
[
"Apache-2.0"
] | null | null | null |
app/grandchallenge/evaluation/migrations/0030_auto_20200702_0845.py
|
njmhendrix/grand-challenge.org
|
9bc36f5e26561a78bd405e8ea5e4c0f86c95f011
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.8 on 2020-07-02 08:45
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("evaluation", "0029_auto_20200622_1306"),
]
operations = [
migrations.AddField(
model_name="job",
name="published",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="job",
name="rank",
field=models.PositiveIntegerField(
default=0,
help_text="The position of this result on the leaderboard. If the value is zero, then the result is unranked.",
),
),
migrations.AddField(
model_name="job",
name="rank_per_metric",
field=django.contrib.postgres.fields.jsonb.JSONField(default=dict),
),
migrations.AddField(
model_name="job",
name="rank_score",
field=models.FloatField(default=0.0),
),
]
| 29.216216 | 127 | 0.572618 |
4e5110b4a298daed7101ea045848346bc98b09de
| 3,522 |
py
|
Python
|
practises/best-trading-by-year.py
|
m860/data-analysis-with-python
|
e96a5ae79913d7805383ea3dfb8dcc731c60681d
|
[
"MIT"
] | null | null | null |
practises/best-trading-by-year.py
|
m860/data-analysis-with-python
|
e96a5ae79913d7805383ea3dfb8dcc731c60681d
|
[
"MIT"
] | null | null | null |
practises/best-trading-by-year.py
|
m860/data-analysis-with-python
|
e96a5ae79913d7805383ea3dfb8dcc731c60681d
|
[
"MIT"
] | null | null | null |
from classes import Stock, format
import numpy as np
import os
from datetime import date, timedelta
from dateutil.parser import parse
import sys
from multiprocessing import Pool
import time
import json
stock = Stock()
raterange = np.arange(0.02, 0.5, 0.001)[::-1]
def getFiles(dirname):
files = None
for dirpath, dirnames, filenames in os.walk(dirname):
files = [os.path.join(dirpath, p) for p in filenames if p.endswith('.json')]
return files
files = getFiles('formated')
def getDates(arr):
return [' - '.join([d['date'], str(d['close'])]) for d in arr]
def reformatItem(item, index):
item['index'] = index
item['odate'] = parse(item['date']).date()
return item
def test(rate, nextdate=date(2016, 1, 1), amount=float(10000)):
total = len(stock.items)
durs = []
turnings = [stock.items[i] for i in range(1, total - 1) if
stock.isFirstTurningByMACD(stock.items[i], stock.items[i - 1])]
for d in turnings:
if d['odate'] < nextdate:
continue
if d['odate'] > nextdate:
nextdate = d['odate']
# in
num = int(amount / d['close'])
for dd in [j for j in stock.items if j['odate'] > nextdate]:
diff = dd['high'] - d['close']
diffp = diff / d['close']
if diffp >= rate:
# out
out = d['close'] * (1 + rate)
diffout = out - d['close']
dur = parse(dd['date']) - parse(d['date'])
durs.extend([dur])
# print('%s - %-5s | %s - %-7s open=%-5s,high=%-5s,close=%-5s | %-6s:%s | %s' % (
# d['date'], d['close'], dd['date'], out, dd['open'], dd['high'],
# dd['close'], diffout, num, dur))
amount += diffout * num
nextdate = dd['odate']
break
lendurs = len(durs)
avg = 0
if lendurs > 0:
avg = sum(durs, timedelta()) / lendurs
return {
'tradingTimes': lendurs,
'amount': amount,
'avgTradingTimes': str(avg),
'rate': rate
}
def testWrapper(args):
return test(*args)
def displayResult(arr):
for d in arr:
print('%-20s | %-10s' % (d['avgtimes'], d['amount']))
def fmtitem(item):
item['odate'] = parse(item['date']).date()
return item
def run(begindate=date(2016, 1, 1), enddate=date(2017, 1, 1)):
result = []
i = 1
l = len(files)
start_time = time.time()
for fp in files:
stock.load(fp)
stock.items = [fmtitem(item) for item in stock.items if
parse(item['date']).date() >= begindate and parse(item['date']).date() < enddate]
pool = Pool(10)
p = pool.map(test, raterange)
if len(p) > 0:
sp = sorted(p, key=lambda d: (d['tradingTimes'], d['amount']), reverse=True)
fir = sp[0]
fir['code'] = stock.code
result.extend([fir])
pool.close()
sys.stdout.write("\r %s/%s cost %s seconds" % (i, l, (time.time() - start_time)))
start_time = time.time()
i += 1
sr = sorted(result, key=lambda d: (d['tradingTimes'], d['amount']), reverse=True)
# save to file
with open('output/{}.json'.format(begindate.year), 'w+') as f:
json.dump(sr, f)
run()
run(begindate=date(2015, 1, 1), enddate=date(2016, 1, 1))
run(begindate=date(2014, 1, 1), enddate=date(2015, 1, 1))
run(begindate=date(2013, 1, 1), enddate=date(2014, 1, 1))
| 28.868852 | 104 | 0.536343 |
1c997f153ba0303369de0741fff1d8b21ab7630a
| 5,790 |
py
|
Python
|
test/functional/wallet_txn_doublespend.py
|
Bits-Coin/bits-coin
|
dd8220018f5582e76d43e8c52bd323524e495d8c
|
[
"MIT"
] | 2 |
2021-11-17T23:05:13.000Z
|
2021-11-17T23:05:32.000Z
|
test/functional/wallet_txn_doublespend.py
|
Bits-Coin/bits-coin
|
dd8220018f5582e76d43e8c52bd323524e495d8c
|
[
"MIT"
] | null | null | null |
test/functional/wallet_txn_doublespend.py
|
Bits-Coin/bits-coin
|
dd8220018f5582e76d43e8c52bd323524e495d8c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2009-2019 The Bitcoin Core developers
# Copyright (c) 2014-2019 The BitsCoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there is a double-spend conflict."""
from decimal import Decimal
from test_framework.test_framework import BitsCoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
disconnect_nodes,
find_output,
sync_blocks,
)
class TxnMallTest(BitsCoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 DGB:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar addresses:
node0_address_foo = self.nodes[0].getnewaddress()
fund_foo_txid = self.nodes[0].sendtoaddress(node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress()
fund_bar_txid = self.nodes[0].sendtoaddress(node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(),
starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# First: use raw transaction API to send 1240 DGB to node1_address,
# but don't broadcast:
doublespend_fee = Decimal('-.02')
rawtx_input_0 = {}
rawtx_input_0["txid"] = fund_foo_txid
rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 1219)
rawtx_input_1 = {}
rawtx_input_1["txid"] = fund_bar_txid
rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29)
inputs = [rawtx_input_0, rawtx_input_1]
change_address = self.nodes[0].getnewaddress()
outputs = {}
outputs[node1_address] = 1240
outputs[change_address] = 1248 - 1240 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
doublespend = self.nodes[0].signrawtransactionwithwallet(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 DGB coin each
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50DGB for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block:
expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance(), starting_balance - tx1["amount"] - tx2["amount"])
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Both transactions should be conflicted
assert_equal(tx1["confirmations"], -2)
assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 100DGB for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
# negative):
expected = starting_balance + 100 - 1240 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee
assert_equal(self.nodes[0].getbalance(), expected)
# Node1's balance should be its initial balance (1250 for 25 block rewards) plus the doublespend:
assert_equal(self.nodes[1].getbalance(), 1250 + 1240)
if __name__ == '__main__':
TxnMallTest().main()
| 42.262774 | 111 | 0.658895 |
36e7e59438f994e3c995a2ba654333f716a2241c
| 6,207 |
py
|
Python
|
ppcls/modeling/architectures/resnet_vc.py
|
cq2019git/PaddleClas
|
10326f0d0b89dc5aaf711fd0b9f45f6557007208
|
[
"Apache-2.0"
] | 13 |
2020-09-09T12:23:36.000Z
|
2022-03-16T09:42:07.000Z
|
ppcls/modeling/architectures/resnet_vc.py
|
cq2019git/PaddleClas
|
10326f0d0b89dc5aaf711fd0b9f45f6557007208
|
[
"Apache-2.0"
] | null | null | null |
ppcls/modeling/architectures/resnet_vc.py
|
cq2019git/PaddleClas
|
10326f0d0b89dc5aaf711fd0b9f45f6557007208
|
[
"Apache-2.0"
] | 5 |
2020-09-14T07:35:39.000Z
|
2021-12-22T02:03:31.000Z
|
#copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import paddle
import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr
__all__ = ["ResNet", "ResNet50_vc", "ResNet101_vc", "ResNet152_vc"]
train_parameters = {
"input_size": [3, 224, 224],
"input_mean": [0.485, 0.456, 0.406],
"input_std": [0.229, 0.224, 0.225],
"learning_strategy": {
"name": "piecewise_decay",
"batch_size": 256,
"epochs": [30, 60, 90],
"steps": [0.1, 0.01, 0.001, 0.0001]
}
}
class ResNet():
def __init__(self, layers=50):
self.params = train_parameters
self.layers = layers
def net(self, input, class_dim=1000):
layers = self.layers
supported_layers = [50, 101, 152]
assert layers in supported_layers, \
"supported layers are {} but input layer is {}".format(supported_layers, layers)
if layers == 50:
depth = [3, 4, 6, 3]
elif layers == 101:
depth = [3, 4, 23, 3]
elif layers == 152:
depth = [3, 8, 36, 3]
num_filters = [64, 128, 256, 512]
conv = self.conv_bn_layer(
input=input,
num_filters=32,
filter_size=3,
stride=2,
act='relu',
name='conv1_1')
conv = self.conv_bn_layer(
input=conv,
num_filters=32,
filter_size=3,
stride=1,
act='relu',
name='conv1_2')
conv = self.conv_bn_layer(
input=conv,
num_filters=64,
filter_size=3,
stride=1,
act='relu',
name='conv1_3')
conv = fluid.layers.pool2d(
input=conv,
pool_size=3,
pool_stride=2,
pool_padding=1,
pool_type='max')
for block in range(len(depth)):
for i in range(depth[block]):
if layers in [101, 152] and block == 2:
if i == 0:
conv_name = "res" + str(block + 2) + "a"
else:
conv_name = "res" + str(block + 2) + "b" + str(i)
else:
conv_name = "res" + str(block + 2) + chr(97 + i)
conv = self.bottleneck_block(
input=conv,
num_filters=num_filters[block],
stride=2 if i == 0 and block != 0 else 1,
name=conv_name)
pool = fluid.layers.pool2d(
input=conv, pool_type='avg', global_pooling=True)
stdv = 1.0 / math.sqrt(pool.shape[1] * 1.0)
out = fluid.layers.fc(input=pool,
size=class_dim,
param_attr=fluid.param_attr.ParamAttr(
name="fc_0.w_0",
initializer=fluid.initializer.Uniform(-stdv,
stdv)),
bias_attr=ParamAttr(name="fc_0.b_0"))
return out
def conv_bn_layer(self,
input,
num_filters,
filter_size,
stride=1,
groups=1,
act=None,
name=None):
conv = fluid.layers.conv2d(
input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=(filter_size - 1) // 2,
groups=groups,
act=None,
param_attr=ParamAttr(name=name + "_weights"),
bias_attr=False,
name=name + '.conv2d.output.1')
if name == "conv1":
bn_name = "bn_" + name
else:
bn_name = "bn" + name[3:]
return fluid.layers.batch_norm(
input=conv,
act=act,
name=bn_name + '.output.1',
param_attr=ParamAttr(name=bn_name + '_scale'),
bias_attr=ParamAttr(bn_name + '_offset'),
moving_mean_name=bn_name + '_mean',
moving_variance_name=bn_name + '_variance', )
def shortcut(self, input, ch_out, stride, name):
ch_in = input.shape[1]
if ch_in != ch_out or stride != 1:
return self.conv_bn_layer(input, ch_out, 1, stride, name=name)
else:
return input
def bottleneck_block(self, input, num_filters, stride, name):
conv0 = self.conv_bn_layer(
input=input,
num_filters=num_filters,
filter_size=1,
act='relu',
name=name + "_branch2a")
conv1 = self.conv_bn_layer(
input=conv0,
num_filters=num_filters,
filter_size=3,
stride=stride,
act='relu',
name=name + "_branch2b")
conv2 = self.conv_bn_layer(
input=conv1,
num_filters=num_filters * 4,
filter_size=1,
act=None,
name=name + "_branch2c")
short = self.shortcut(
input, num_filters * 4, stride, name=name + "_branch1")
return fluid.layers.elementwise_add(
x=short, y=conv2, act='relu', name=name + ".add.output.5")
def ResNet50_vc():
model = ResNet(layers=50)
return model
def ResNet101_vc():
model = ResNet(layers=101)
return model
def ResNet152_vc():
model = ResNet(layers=152)
return model
| 31.830769 | 92 | 0.520864 |
a822bb57f58673b08ead1160f00bca91ecce3710
| 42,953 |
py
|
Python
|
testing/test_debugging.py
|
aminalaee/pytest
|
e01231c215e0d4150cea8d08f57bd65f1a88b964
|
[
"MIT"
] | 9,225 |
2015-06-15T21:56:14.000Z
|
2022-03-31T20:47:38.000Z
|
testing/test_debugging.py
|
aminalaee/pytest
|
e01231c215e0d4150cea8d08f57bd65f1a88b964
|
[
"MIT"
] | 7,794 |
2015-06-15T21:06:34.000Z
|
2022-03-31T10:56:54.000Z
|
testing/test_debugging.py
|
aminalaee/pytest
|
e01231c215e0d4150cea8d08f57bd65f1a88b964
|
[
"MIT"
] | 2,598 |
2015-06-15T21:42:39.000Z
|
2022-03-29T13:48:22.000Z
|
import os
import sys
from typing import List
import _pytest._code
import pytest
from _pytest.debugging import _validate_usepdb_cls
from _pytest.monkeypatch import MonkeyPatch
from _pytest.pytester import Pytester
try:
# Type ignored for Python <= 3.6.
breakpoint # type: ignore
except NameError:
SUPPORTS_BREAKPOINT_BUILTIN = False
else:
SUPPORTS_BREAKPOINT_BUILTIN = True
_ENVIRON_PYTHONBREAKPOINT = os.environ.get("PYTHONBREAKPOINT", "")
@pytest.fixture(autouse=True)
def pdb_env(request):
if "pytester" in request.fixturenames:
# Disable pdb++ with inner tests.
pytester = request.getfixturevalue("pytester")
pytester._monkeypatch.setenv("PDBPP_HIJACK_PDB", "0")
def runpdb_and_get_report(pytester: Pytester, source: str):
p = pytester.makepyfile(source)
result = pytester.runpytest_inprocess("--pdb", p)
reports = result.reprec.getreports("pytest_runtest_logreport") # type: ignore[attr-defined]
assert len(reports) == 3, reports # setup/call/teardown
return reports[1]
@pytest.fixture
def custom_pdb_calls() -> List[str]:
called = []
# install dummy debugger class and track which methods were called on it
class _CustomPdb:
quitting = False
def __init__(self, *args, **kwargs):
called.append("init")
def reset(self):
called.append("reset")
def interaction(self, *args):
called.append("interaction")
_pytest._CustomPdb = _CustomPdb # type: ignore
return called
@pytest.fixture
def custom_debugger_hook():
called = []
# install dummy debugger class and track which methods were called on it
class _CustomDebugger:
def __init__(self, *args, **kwargs):
called.append("init")
def reset(self):
called.append("reset")
def interaction(self, *args):
called.append("interaction")
def set_trace(self, frame):
print("**CustomDebugger**")
called.append("set_trace")
_pytest._CustomDebugger = _CustomDebugger # type: ignore
yield called
del _pytest._CustomDebugger # type: ignore
class TestPDB:
@pytest.fixture
def pdblist(self, request):
monkeypatch = request.getfixturevalue("monkeypatch")
pdblist = []
def mypdb(*args):
pdblist.append(args)
plugin = request.config.pluginmanager.getplugin("debugging")
monkeypatch.setattr(plugin, "post_mortem", mypdb)
return pdblist
def test_pdb_on_fail(self, pytester: Pytester, pdblist) -> None:
rep = runpdb_and_get_report(
pytester,
"""
def test_func():
assert 0
""",
)
assert rep.failed
assert len(pdblist) == 1
tb = _pytest._code.Traceback(pdblist[0][0])
assert tb[-1].name == "test_func"
def test_pdb_on_xfail(self, pytester: Pytester, pdblist) -> None:
rep = runpdb_and_get_report(
pytester,
"""
import pytest
@pytest.mark.xfail
def test_func():
assert 0
""",
)
assert "xfail" in rep.keywords
assert not pdblist
def test_pdb_on_skip(self, pytester, pdblist) -> None:
rep = runpdb_and_get_report(
pytester,
"""
import pytest
def test_func():
pytest.skip("hello")
""",
)
assert rep.skipped
assert len(pdblist) == 0
def test_pdb_on_BdbQuit(self, pytester, pdblist) -> None:
rep = runpdb_and_get_report(
pytester,
"""
import bdb
def test_func():
raise bdb.BdbQuit
""",
)
assert rep.failed
assert len(pdblist) == 0
def test_pdb_on_KeyboardInterrupt(self, pytester, pdblist) -> None:
rep = runpdb_and_get_report(
pytester,
"""
def test_func():
raise KeyboardInterrupt
""",
)
assert rep.failed
assert len(pdblist) == 1
@staticmethod
def flush(child):
if child.isalive():
# Read if the test has not (e.g. test_pdb_unittest_skip).
child.read()
child.wait()
assert not child.isalive()
def test_pdb_unittest_postmortem(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import unittest
class Blub(unittest.TestCase):
def tearDown(self):
self.filename = None
def test_false(self):
self.filename = 'debug' + '.me'
assert 0
"""
)
child = pytester.spawn_pytest(f"--pdb {p1}")
child.expect("Pdb")
child.sendline("p self.filename")
child.sendeof()
rest = child.read().decode("utf8")
assert "debug.me" in rest
self.flush(child)
def test_pdb_unittest_skip(self, pytester: Pytester) -> None:
"""Test for issue #2137"""
p1 = pytester.makepyfile(
"""
import unittest
@unittest.skipIf(True, 'Skipping also with pdb active')
class MyTestCase(unittest.TestCase):
def test_one(self):
assert 0
"""
)
child = pytester.spawn_pytest(f"-rs --pdb {p1}")
child.expect("Skipping also with pdb active")
child.expect_exact("= 1 skipped in")
child.sendeof()
self.flush(child)
def test_pdb_print_captured_stdout_and_stderr(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def test_1():
import sys
sys.stderr.write("get\\x20rekt")
print("get\\x20rekt")
assert False
def test_not_called_due_to_quit():
pass
"""
)
child = pytester.spawn_pytest("--pdb %s" % p1)
child.expect("captured stdout")
child.expect("get rekt")
child.expect("captured stderr")
child.expect("get rekt")
child.expect("traceback")
child.expect("def test_1")
child.expect("Pdb")
child.sendeof()
rest = child.read().decode("utf8")
assert "Exit: Quitting debugger" in rest
assert "= 1 failed in" in rest
assert "def test_1" not in rest
assert "get rekt" not in rest
self.flush(child)
def test_pdb_dont_print_empty_captured_stdout_and_stderr(
self, pytester: Pytester
) -> None:
p1 = pytester.makepyfile(
"""
def test_1():
assert False
"""
)
child = pytester.spawn_pytest("--pdb %s" % p1)
child.expect("Pdb")
output = child.before.decode("utf8")
child.sendeof()
assert "captured stdout" not in output
assert "captured stderr" not in output
self.flush(child)
@pytest.mark.parametrize("showcapture", ["all", "no", "log"])
def test_pdb_print_captured_logs(self, pytester, showcapture: str) -> None:
p1 = pytester.makepyfile(
"""
def test_1():
import logging
logging.warn("get " + "rekt")
assert False
"""
)
child = pytester.spawn_pytest(f"--show-capture={showcapture} --pdb {p1}")
if showcapture in ("all", "log"):
child.expect("captured log")
child.expect("get rekt")
child.expect("Pdb")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
self.flush(child)
def test_pdb_print_captured_logs_nologging(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def test_1():
import logging
logging.warn("get " + "rekt")
assert False
"""
)
child = pytester.spawn_pytest("--show-capture=all --pdb -p no:logging %s" % p1)
child.expect("get rekt")
output = child.before.decode("utf8")
assert "captured log" not in output
child.expect("Pdb")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
self.flush(child)
def test_pdb_interaction_exception(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def globalfunc():
pass
def test_1():
pytest.raises(ValueError, globalfunc)
"""
)
child = pytester.spawn_pytest("--pdb %s" % p1)
child.expect(".*def test_1")
child.expect(".*pytest.raises.*globalfunc")
child.expect("Pdb")
child.sendline("globalfunc")
child.expect(".*function")
child.sendeof()
child.expect("1 failed")
self.flush(child)
def test_pdb_interaction_on_collection_issue181(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
xxx
"""
)
child = pytester.spawn_pytest("--pdb %s" % p1)
# child.expect(".*import pytest.*")
child.expect("Pdb")
child.sendline("c")
child.expect("1 error")
self.flush(child)
def test_pdb_interaction_on_internal_error(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
def pytest_runtest_protocol():
0/0
"""
)
p1 = pytester.makepyfile("def test_func(): pass")
child = pytester.spawn_pytest("--pdb %s" % p1)
child.expect("Pdb")
# INTERNALERROR is only displayed once via terminal reporter.
assert (
len(
[
x
for x in child.before.decode().splitlines()
if x.startswith("INTERNALERROR> Traceback")
]
)
== 1
)
child.sendeof()
self.flush(child)
def test_pdb_prevent_ConftestImportFailure_hiding_exception(
self, pytester: Pytester
) -> None:
pytester.makepyfile("def test_func(): pass")
sub_dir = pytester.path.joinpath("ns")
sub_dir.mkdir()
sub_dir.joinpath("conftest").with_suffix(".py").write_text(
"import unknown", "utf-8"
)
sub_dir.joinpath("test_file").with_suffix(".py").write_text(
"def test_func(): pass", "utf-8"
)
result = pytester.runpytest_subprocess("--pdb", ".")
result.stdout.fnmatch_lines(["-> import unknown"])
def test_pdb_interaction_capturing_simple(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def test_1():
i = 0
print("hello17")
pytest.set_trace()
i == 1
assert 0
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect(r"test_1\(\)")
child.expect("i == 1")
child.expect("Pdb")
child.sendline("c")
rest = child.read().decode("utf-8")
assert "AssertionError" in rest
assert "1 failed" in rest
assert "def test_1" in rest
assert "hello17" in rest # out is captured
self.flush(child)
def test_pdb_set_trace_kwargs(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def test_1():
i = 0
print("hello17")
pytest.set_trace(header="== my_header ==")
x = 3
assert 0
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("== my_header ==")
assert "PDB set_trace" not in child.before.decode()
child.expect("Pdb")
child.sendline("c")
rest = child.read().decode("utf-8")
assert "1 failed" in rest
assert "def test_1" in rest
assert "hello17" in rest # out is captured
self.flush(child)
def test_pdb_set_trace_interception(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pdb
def test_1():
pdb.set_trace()
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("Pdb")
child.sendline("q")
rest = child.read().decode("utf8")
assert "no tests ran" in rest
assert "reading from stdin while output" not in rest
assert "BdbQuit" not in rest
self.flush(child)
def test_pdb_and_capsys(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def test_1(capsys):
print("hello1")
pytest.set_trace()
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("test_1")
child.send("capsys.readouterr()\n")
child.expect("hello1")
child.sendeof()
child.read()
self.flush(child)
def test_pdb_with_caplog_on_pdb_invocation(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def test_1(capsys, caplog):
import logging
logging.getLogger(__name__).warning("some_warning")
assert 0
"""
)
child = pytester.spawn_pytest("--pdb %s" % str(p1))
child.send("caplog.record_tuples\n")
child.expect_exact(
"[('test_pdb_with_caplog_on_pdb_invocation', 30, 'some_warning')]"
)
child.sendeof()
child.read()
self.flush(child)
def test_set_trace_capturing_afterwards(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pdb
def test_1():
pdb.set_trace()
def test_2():
print("hello")
assert 0
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("test_1")
child.send("c\n")
child.expect("test_2")
child.expect("Captured")
child.expect("hello")
child.sendeof()
child.read()
self.flush(child)
def test_pdb_interaction_doctest(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def function_1():
'''
>>> i = 0
>>> assert i == 1
'''
"""
)
child = pytester.spawn_pytest("--doctest-modules --pdb %s" % p1)
child.expect("Pdb")
assert "UNEXPECTED EXCEPTION: AssertionError()" in child.before.decode("utf8")
child.sendline("'i=%i.' % i")
child.expect("Pdb")
assert "\r\n'i=0.'\r\n" in child.before.decode("utf8")
child.sendeof()
rest = child.read().decode("utf8")
assert "! _pytest.outcomes.Exit: Quitting debugger !" in rest
assert "BdbQuit" not in rest
assert "1 failed" in rest
self.flush(child)
def test_doctest_set_trace_quit(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def function_1():
'''
>>> __import__('pdb').set_trace()
'''
"""
)
# NOTE: does not use pytest.set_trace, but Python's patched pdb,
# therefore "-s" is required.
child = pytester.spawn_pytest("--doctest-modules --pdb -s %s" % p1)
child.expect("Pdb")
child.sendline("q")
rest = child.read().decode("utf8")
assert "! _pytest.outcomes.Exit: Quitting debugger !" in rest
assert "= no tests ran in" in rest
assert "BdbQuit" not in rest
assert "UNEXPECTED EXCEPTION" not in rest
def test_pdb_interaction_capturing_twice(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def test_1():
i = 0
print("hello17")
pytest.set_trace()
x = 3
print("hello18")
pytest.set_trace()
x = 4
assert 0
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect(r"PDB set_trace \(IO-capturing turned off\)")
child.expect("test_1")
child.expect("x = 3")
child.expect("Pdb")
child.sendline("c")
child.expect(r"PDB continue \(IO-capturing resumed\)")
child.expect(r"PDB set_trace \(IO-capturing turned off\)")
child.expect("x = 4")
child.expect("Pdb")
child.sendline("c")
child.expect("_ test_1 _")
child.expect("def test_1")
rest = child.read().decode("utf8")
assert "Captured stdout call" in rest
assert "hello17" in rest # out is captured
assert "hello18" in rest # out is captured
assert "1 failed" in rest
self.flush(child)
def test_pdb_with_injected_do_debug(self, pytester: Pytester) -> None:
"""Simulates pdbpp, which injects Pdb into do_debug, and uses
self.__class__ in do_continue.
"""
p1 = pytester.makepyfile(
mytest="""
import pdb
import pytest
count_continue = 0
class CustomPdb(pdb.Pdb, object):
def do_debug(self, arg):
import sys
import types
do_debug_func = pdb.Pdb.do_debug
newglobals = do_debug_func.__globals__.copy()
newglobals['Pdb'] = self.__class__
orig_do_debug = types.FunctionType(
do_debug_func.__code__, newglobals,
do_debug_func.__name__, do_debug_func.__defaults__,
)
return orig_do_debug(self, arg)
do_debug.__doc__ = pdb.Pdb.do_debug.__doc__
def do_continue(self, *args, **kwargs):
global count_continue
count_continue += 1
return super(CustomPdb, self).do_continue(*args, **kwargs)
def foo():
print("print_from_foo")
def test_1():
i = 0
print("hello17")
pytest.set_trace()
x = 3
print("hello18")
assert count_continue == 2, "unexpected_failure: %d != 2" % count_continue
pytest.fail("expected_failure")
"""
)
child = pytester.spawn_pytest("--pdbcls=mytest:CustomPdb %s" % str(p1))
child.expect(r"PDB set_trace \(IO-capturing turned off\)")
child.expect(r"\n\(Pdb")
child.sendline("debug foo()")
child.expect("ENTERING RECURSIVE DEBUGGER")
child.expect(r"\n\(\(Pdb")
child.sendline("c")
child.expect("LEAVING RECURSIVE DEBUGGER")
assert b"PDB continue" not in child.before
# No extra newline.
assert child.before.endswith(b"c\r\nprint_from_foo\r\n")
# set_debug should not raise outcomes. Exit, if used recursively.
child.sendline("debug 42")
child.sendline("q")
child.expect("LEAVING RECURSIVE DEBUGGER")
assert b"ENTERING RECURSIVE DEBUGGER" in child.before
assert b"Quitting debugger" not in child.before
child.sendline("c")
child.expect(r"PDB continue \(IO-capturing resumed\)")
rest = child.read().decode("utf8")
assert "hello17" in rest # out is captured
assert "hello18" in rest # out is captured
assert "1 failed" in rest
assert "Failed: expected_failure" in rest
assert "AssertionError: unexpected_failure" not in rest
self.flush(child)
def test_pdb_without_capture(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def test_1():
pytest.set_trace()
"""
)
child = pytester.spawn_pytest("-s %s" % p1)
child.expect(r">>> PDB set_trace >>>")
child.expect("Pdb")
child.sendline("c")
child.expect(r">>> PDB continue >>>")
child.expect("1 passed")
self.flush(child)
@pytest.mark.parametrize("capture_arg", ("", "-s", "-p no:capture"))
def test_pdb_continue_with_recursive_debug(
self, capture_arg, pytester: Pytester
) -> None:
"""Full coverage for do_debug without capturing.
This is very similar to test_pdb_interaction_continue_recursive in general,
but mocks out ``pdb.set_trace`` for providing more coverage.
"""
p1 = pytester.makepyfile(
"""
try:
input = raw_input
except NameError:
pass
def set_trace():
__import__('pdb').set_trace()
def test_1(monkeypatch):
import _pytest.debugging
class pytestPDBTest(_pytest.debugging.pytestPDB):
@classmethod
def set_trace(cls, *args, **kwargs):
# Init PytestPdbWrapper to handle capturing.
_pdb = cls._init_pdb("set_trace", *args, **kwargs)
# Mock out pdb.Pdb.do_continue.
import pdb
pdb.Pdb.do_continue = lambda self, arg: None
print("===" + " SET_TRACE ===")
assert input() == "debug set_trace()"
# Simulate PytestPdbWrapper.do_debug
cls._recursive_debug += 1
print("ENTERING RECURSIVE DEBUGGER")
print("===" + " SET_TRACE_2 ===")
assert input() == "c"
_pdb.do_continue("")
print("===" + " SET_TRACE_3 ===")
# Simulate PytestPdbWrapper.do_debug
print("LEAVING RECURSIVE DEBUGGER")
cls._recursive_debug -= 1
print("===" + " SET_TRACE_4 ===")
assert input() == "c"
_pdb.do_continue("")
def do_continue(self, arg):
print("=== do_continue")
monkeypatch.setattr(_pytest.debugging, "pytestPDB", pytestPDBTest)
import pdb
monkeypatch.setattr(pdb, "set_trace", pytestPDBTest.set_trace)
set_trace()
"""
)
child = pytester.spawn_pytest(f"--tb=short {p1} {capture_arg}")
child.expect("=== SET_TRACE ===")
before = child.before.decode("utf8")
if not capture_arg:
assert ">>> PDB set_trace (IO-capturing turned off) >>>" in before
else:
assert ">>> PDB set_trace >>>" in before
child.sendline("debug set_trace()")
child.expect("=== SET_TRACE_2 ===")
before = child.before.decode("utf8")
assert "\r\nENTERING RECURSIVE DEBUGGER\r\n" in before
child.sendline("c")
child.expect("=== SET_TRACE_3 ===")
# No continue message with recursive debugging.
before = child.before.decode("utf8")
assert ">>> PDB continue " not in before
child.sendline("c")
child.expect("=== SET_TRACE_4 ===")
before = child.before.decode("utf8")
assert "\r\nLEAVING RECURSIVE DEBUGGER\r\n" in before
child.sendline("c")
rest = child.read().decode("utf8")
if not capture_arg:
assert "> PDB continue (IO-capturing resumed) >" in rest
else:
assert "> PDB continue >" in rest
assert "= 1 passed in" in rest
def test_pdb_used_outside_test(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
pytest.set_trace()
x = 5
"""
)
child = pytester.spawn(f"{sys.executable} {p1}")
child.expect("x = 5")
child.expect("Pdb")
child.sendeof()
self.flush(child)
def test_pdb_used_in_generate_tests(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pytest
def pytest_generate_tests(metafunc):
pytest.set_trace()
x = 5
def test_foo(a):
pass
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("x = 5")
child.expect("Pdb")
child.sendeof()
self.flush(child)
def test_pdb_collection_failure_is_shown(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile("xxx")
result = pytester.runpytest_subprocess("--pdb", p1)
result.stdout.fnmatch_lines(
["E NameError: *xxx*", "*! *Exit: Quitting debugger !*"] # due to EOF
)
@pytest.mark.parametrize("post_mortem", (False, True))
def test_enter_leave_pdb_hooks_are_called(
self, post_mortem, pytester: Pytester
) -> None:
pytester.makeconftest(
"""
mypdb = None
def pytest_configure(config):
config.testing_verification = 'configured'
def pytest_enter_pdb(config, pdb):
assert config.testing_verification == 'configured'
print('enter_pdb_hook')
global mypdb
mypdb = pdb
mypdb.set_attribute = "bar"
def pytest_leave_pdb(config, pdb):
assert config.testing_verification == 'configured'
print('leave_pdb_hook')
global mypdb
assert mypdb is pdb
assert mypdb.set_attribute == "bar"
"""
)
p1 = pytester.makepyfile(
"""
import pytest
def test_set_trace():
pytest.set_trace()
assert 0
def test_post_mortem():
assert 0
"""
)
if post_mortem:
child = pytester.spawn_pytest(str(p1) + " --pdb -s -k test_post_mortem")
else:
child = pytester.spawn_pytest(str(p1) + " -k test_set_trace")
child.expect("enter_pdb_hook")
child.sendline("c")
if post_mortem:
child.expect(r"PDB continue")
else:
child.expect(r"PDB continue \(IO-capturing resumed\)")
child.expect("Captured stdout call")
rest = child.read().decode("utf8")
assert "leave_pdb_hook" in rest
assert "1 failed" in rest
self.flush(child)
def test_pdb_custom_cls(
self, pytester: Pytester, custom_pdb_calls: List[str]
) -> None:
p1 = pytester.makepyfile("""xxx """)
result = pytester.runpytest_inprocess(
"--pdb", "--pdbcls=_pytest:_CustomPdb", p1
)
result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
assert custom_pdb_calls == ["init", "reset", "interaction"]
def test_pdb_custom_cls_invalid(self, pytester: Pytester) -> None:
result = pytester.runpytest_inprocess("--pdbcls=invalid")
result.stderr.fnmatch_lines(
[
"*: error: argument --pdbcls: 'invalid' is not in the format 'modname:classname'"
]
)
def test_pdb_validate_usepdb_cls(self):
assert _validate_usepdb_cls("os.path:dirname.__name__") == (
"os.path",
"dirname.__name__",
)
assert _validate_usepdb_cls("pdb:DoesNotExist") == ("pdb", "DoesNotExist")
def test_pdb_custom_cls_without_pdb(
self, pytester: Pytester, custom_pdb_calls: List[str]
) -> None:
p1 = pytester.makepyfile("""xxx """)
result = pytester.runpytest_inprocess("--pdbcls=_pytest:_CustomPdb", p1)
result.stdout.fnmatch_lines(["*NameError*xxx*", "*1 error*"])
assert custom_pdb_calls == []
def test_pdb_custom_cls_with_set_trace(
self,
pytester: Pytester,
monkeypatch: MonkeyPatch,
) -> None:
pytester.makepyfile(
custom_pdb="""
class CustomPdb(object):
def __init__(self, *args, **kwargs):
skip = kwargs.pop("skip")
assert skip == ["foo.*"]
print("__init__")
super(CustomPdb, self).__init__(*args, **kwargs)
def set_trace(*args, **kwargs):
print('custom set_trace>')
"""
)
p1 = pytester.makepyfile(
"""
import pytest
def test_foo():
pytest.set_trace(skip=['foo.*'])
"""
)
monkeypatch.setenv("PYTHONPATH", str(pytester.path))
child = pytester.spawn_pytest("--pdbcls=custom_pdb:CustomPdb %s" % str(p1))
child.expect("__init__")
child.expect("custom set_trace>")
self.flush(child)
class TestDebuggingBreakpoints:
def test_supports_breakpoint_module_global(self) -> None:
"""Test that supports breakpoint global marks on Python 3.7+."""
if sys.version_info >= (3, 7):
assert SUPPORTS_BREAKPOINT_BUILTIN is True
@pytest.mark.skipif(
not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
)
@pytest.mark.parametrize("arg", ["--pdb", ""])
def test_sys_breakpointhook_configure_and_unconfigure(
self, pytester: Pytester, arg: str
) -> None:
"""
Test that sys.breakpointhook is set to the custom Pdb class once configured, test that
hook is reset to system value once pytest has been unconfigured
"""
pytester.makeconftest(
"""
import sys
from pytest import hookimpl
from _pytest.debugging import pytestPDB
def pytest_configure(config):
config.add_cleanup(check_restored)
def check_restored():
assert sys.breakpointhook == sys.__breakpointhook__
def test_check():
assert sys.breakpointhook == pytestPDB.set_trace
"""
)
pytester.makepyfile(
"""
def test_nothing(): pass
"""
)
args = (arg,) if arg else ()
result = pytester.runpytest_subprocess(*args)
result.stdout.fnmatch_lines(["*1 passed in *"])
@pytest.mark.skipif(
not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
)
def test_pdb_custom_cls(self, pytester: Pytester, custom_debugger_hook) -> None:
p1 = pytester.makepyfile(
"""
def test_nothing():
breakpoint()
"""
)
result = pytester.runpytest_inprocess(
"--pdb", "--pdbcls=_pytest:_CustomDebugger", p1
)
result.stdout.fnmatch_lines(["*CustomDebugger*", "*1 passed*"])
assert custom_debugger_hook == ["init", "set_trace"]
@pytest.mark.parametrize("arg", ["--pdb", ""])
@pytest.mark.skipif(
not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
)
def test_environ_custom_class(
self, pytester: Pytester, custom_debugger_hook, arg: str
) -> None:
pytester.makeconftest(
"""
import os
import sys
os.environ['PYTHONBREAKPOINT'] = '_pytest._CustomDebugger.set_trace'
def pytest_configure(config):
config.add_cleanup(check_restored)
def check_restored():
assert sys.breakpointhook == sys.__breakpointhook__
def test_check():
import _pytest
assert sys.breakpointhook is _pytest._CustomDebugger.set_trace
"""
)
pytester.makepyfile(
"""
def test_nothing(): pass
"""
)
args = (arg,) if arg else ()
result = pytester.runpytest_subprocess(*args)
result.stdout.fnmatch_lines(["*1 passed in *"])
@pytest.mark.skipif(
not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
)
@pytest.mark.skipif(
not _ENVIRON_PYTHONBREAKPOINT == "",
reason="Requires breakpoint() default value",
)
def test_sys_breakpoint_interception(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def test_1():
breakpoint()
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("Pdb")
child.sendline("quit")
rest = child.read().decode("utf8")
assert "Quitting debugger" in rest
assert "reading from stdin while output" not in rest
TestPDB.flush(child)
@pytest.mark.skipif(
not SUPPORTS_BREAKPOINT_BUILTIN, reason="Requires breakpoint() builtin"
)
def test_pdb_not_altered(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
import pdb
def test_1():
pdb.set_trace()
assert 0
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("Pdb")
child.sendline("c")
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "reading from stdin while output" not in rest
TestPDB.flush(child)
class TestTraceOption:
def test_trace_sets_breakpoint(self, pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def test_1():
assert True
def test_2():
pass
def test_3():
pass
"""
)
child = pytester.spawn_pytest("--trace " + str(p1))
child.expect("test_1")
child.expect("Pdb")
child.sendline("c")
child.expect("test_2")
child.expect("Pdb")
child.sendline("c")
child.expect("test_3")
child.expect("Pdb")
child.sendline("q")
child.expect_exact("Exit: Quitting debugger")
rest = child.read().decode("utf8")
assert "= 2 passed in" in rest
assert "reading from stdin while output" not in rest
# Only printed once - not on stderr.
assert "Exit: Quitting debugger" not in child.before.decode("utf8")
TestPDB.flush(child)
def test_trace_with_parametrize_handles_shared_fixtureinfo(
self, pytester: Pytester
) -> None:
p1 = pytester.makepyfile(
"""
import pytest
@pytest.mark.parametrize('myparam', [1,2])
def test_1(myparam, request):
assert myparam in (1, 2)
assert request.function.__name__ == "test_1"
@pytest.mark.parametrize('func', [1,2])
def test_func(func, request):
assert func in (1, 2)
assert request.function.__name__ == "test_func"
@pytest.mark.parametrize('myparam', [1,2])
def test_func_kw(myparam, request, func="func_kw"):
assert myparam in (1, 2)
assert func == "func_kw"
assert request.function.__name__ == "test_func_kw"
"""
)
child = pytester.spawn_pytest("--trace " + str(p1))
for func, argname in [
("test_1", "myparam"),
("test_func", "func"),
("test_func_kw", "myparam"),
]:
child.expect_exact("> PDB runcall (IO-capturing turned off) >")
child.expect_exact(func)
child.expect_exact("Pdb")
child.sendline("args")
child.expect_exact(f"{argname} = 1\r\n")
child.expect_exact("Pdb")
child.sendline("c")
child.expect_exact("Pdb")
child.sendline("args")
child.expect_exact(f"{argname} = 2\r\n")
child.expect_exact("Pdb")
child.sendline("c")
child.expect_exact("> PDB continue (IO-capturing resumed) >")
rest = child.read().decode("utf8")
assert "= 6 passed in" in rest
assert "reading from stdin while output" not in rest
# Only printed once - not on stderr.
assert "Exit: Quitting debugger" not in child.before.decode("utf8")
TestPDB.flush(child)
def test_trace_after_runpytest(pytester: Pytester) -> None:
"""Test that debugging's pytest_configure is re-entrant."""
p1 = pytester.makepyfile(
"""
from _pytest.debugging import pytestPDB
def test_outer(pytester) -> None:
assert len(pytestPDB._saved) == 1
pytester.makepyfile(
\"""
from _pytest.debugging import pytestPDB
def test_inner():
assert len(pytestPDB._saved) == 2
print()
print("test_inner_" + "end")
\"""
)
result = pytester.runpytest("-s", "-k", "test_inner")
assert result.ret == 0
assert len(pytestPDB._saved) == 1
"""
)
result = pytester.runpytest_subprocess("-s", "-p", "pytester", str(p1))
result.stdout.fnmatch_lines(["test_inner_end"])
assert result.ret == 0
def test_quit_with_swallowed_SystemExit(pytester: Pytester) -> None:
"""Test that debugging's pytest_configure is re-entrant."""
p1 = pytester.makepyfile(
"""
def call_pdb_set_trace():
__import__('pdb').set_trace()
def test_1():
try:
call_pdb_set_trace()
except SystemExit:
pass
def test_2():
pass
"""
)
child = pytester.spawn_pytest(str(p1))
child.expect("Pdb")
child.sendline("q")
child.expect_exact("Exit: Quitting debugger")
rest = child.read().decode("utf8")
assert "no tests ran" in rest
TestPDB.flush(child)
@pytest.mark.parametrize("fixture", ("capfd", "capsys"))
def test_pdb_suspends_fixture_capturing(pytester: Pytester, fixture: str) -> None:
"""Using "-s" with pytest should suspend/resume fixture capturing."""
p1 = pytester.makepyfile(
"""
def test_inner({fixture}):
import sys
print("out_inner_before")
sys.stderr.write("err_inner_before\\n")
__import__("pdb").set_trace()
print("out_inner_after")
sys.stderr.write("err_inner_after\\n")
out, err = {fixture}.readouterr()
assert out =="out_inner_before\\nout_inner_after\\n"
assert err =="err_inner_before\\nerr_inner_after\\n"
""".format(
fixture=fixture
)
)
child = pytester.spawn_pytest(str(p1) + " -s")
child.expect("Pdb")
before = child.before.decode("utf8")
assert (
"> PDB set_trace (IO-capturing turned off for fixture %s) >" % (fixture)
in before
)
# Test that capturing is really suspended.
child.sendline("p 40 + 2")
child.expect("Pdb")
assert "\r\n42\r\n" in child.before.decode("utf8")
child.sendline("c")
rest = child.read().decode("utf8")
assert "out_inner" not in rest
assert "err_inner" not in rest
TestPDB.flush(child)
assert child.exitstatus == 0
assert "= 1 passed in" in rest
assert "> PDB continue (IO-capturing resumed for fixture %s) >" % (fixture) in rest
def test_pdbcls_via_local_module(pytester: Pytester) -> None:
"""It should be imported in pytest_configure or later only."""
p1 = pytester.makepyfile(
"""
def test():
print("before_set_trace")
__import__("pdb").set_trace()
""",
mypdb="""
class Wrapped:
class MyPdb:
def set_trace(self, *args):
print("set_trace_called", args)
def runcall(self, *args, **kwds):
print("runcall_called", args, kwds)
""",
)
result = pytester.runpytest(
str(p1), "--pdbcls=really.invalid:Value", syspathinsert=True
)
result.stdout.fnmatch_lines(
[
"*= FAILURES =*",
"E * --pdbcls: could not import 'really.invalid:Value': No module named *really*",
]
)
assert result.ret == 1
result = pytester.runpytest(
str(p1), "--pdbcls=mypdb:Wrapped.MyPdb", syspathinsert=True
)
assert result.ret == 0
result.stdout.fnmatch_lines(["*set_trace_called*", "* 1 passed in *"])
# Ensure that it also works with --trace.
result = pytester.runpytest(
str(p1), "--pdbcls=mypdb:Wrapped.MyPdb", "--trace", syspathinsert=True
)
assert result.ret == 0
result.stdout.fnmatch_lines(["*runcall_called*", "* 1 passed in *"])
def test_raises_bdbquit_with_eoferror(pytester: Pytester) -> None:
"""It is not guaranteed that DontReadFromInput's read is called."""
p1 = pytester.makepyfile(
"""
def input_without_read(*args, **kwargs):
raise EOFError()
def test(monkeypatch):
import builtins
monkeypatch.setattr(builtins, "input", input_without_read)
__import__('pdb').set_trace()
"""
)
result = pytester.runpytest(str(p1))
result.stdout.fnmatch_lines(["E *BdbQuit", "*= 1 failed in*"])
assert result.ret == 1
def test_pdb_wrapper_class_is_reused(pytester: Pytester) -> None:
p1 = pytester.makepyfile(
"""
def test():
__import__("pdb").set_trace()
__import__("pdb").set_trace()
import mypdb
instances = mypdb.instances
assert len(instances) == 2
assert instances[0].__class__ is instances[1].__class__
""",
mypdb="""
instances = []
class MyPdb:
def __init__(self, *args, **kwargs):
instances.append(self)
def set_trace(self, *args):
print("set_trace_called", args)
""",
)
result = pytester.runpytest(str(p1), "--pdbcls=mypdb:MyPdb", syspathinsert=True)
assert result.ret == 0
result.stdout.fnmatch_lines(
["*set_trace_called*", "*set_trace_called*", "* 1 passed in *"]
)
| 32.344127 | 97 | 0.544386 |
b93ddd4c659b7a31d8188c948e5216efcf0a4373
| 5,289 |
py
|
Python
|
lungmask/mask.py
|
tp61/lungmask
|
5d12abf2b9a500da1b6abeae6711628a8081b97c
|
[
"Apache-2.0"
] | null | null | null |
lungmask/mask.py
|
tp61/lungmask
|
5d12abf2b9a500da1b6abeae6711628a8081b97c
|
[
"Apache-2.0"
] | null | null | null |
lungmask/mask.py
|
tp61/lungmask
|
5d12abf2b9a500da1b6abeae6711628a8081b97c
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import torch
from lungmask import utils
import SimpleITK as sitk
from .resunet import UNet
import warnings
import sys
from tqdm import tqdm
import skimage
import logging
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
warnings.filterwarnings("ignore", category=UserWarning)
# stores urls and number of classes of the models
model_urls = {('unet', 'R231'): ('https://github.com/JoHof/lungmask/releases/download/v0.0/unet_r231-d5d2fc3d.pth', 3),
('unet', 'LTRCLobes'): (
'https://github.com/JoHof/lungmask/releases/download/v0.0/unet_ltrclobes-3a07043d.pth', 6),
('unet', 'R231CovidWeb'): (
'https://github.com/JoHof/lungmask/releases/download/v0.0/unet_r231covid-0de78a7e.pth', 3)}
def apply(image, model=None, force_cpu=False, batch_size=20, volume_postprocessing=True, noHU=False):
if model is None:
model = get_model('unet', 'R231')
numpy_mode = isinstance(image, np.ndarray)
if numpy_mode:
inimg_raw = image.copy()
else:
inimg_raw = sitk.GetArrayFromImage(image)
directions = np.asarray(image.GetDirection())
if len(directions) == 9:
inimg_raw = np.flip(inimg_raw, np.where(directions[[0,4,8]][::-1]<0)[0])
del image
if force_cpu:
device = torch.device('cpu')
else:
if torch.cuda.is_available():
device = torch.device('cuda')
else:
logging.info("No GPU support available, will use CPU. Note, that this is significantly slower!")
batch_size = 1
device = torch.device('cpu')
model.to(device)
if not noHU:
tvolslices, xnew_box = utils.preprocess(inimg_raw, resolution=[256, 256])
tvolslices[tvolslices > 600] = 600
tvolslices = np.divide((tvolslices + 1024), 1624)
else:
# support for non HU images. This is just a hack. The models were not trained with this in mind
tvolslices = skimage.color.rgb2gray(inimg_raw)
tvolslices = skimage.transform.resize(tvolslices, [256, 256])
tvolslices = np.asarray([tvolslices*x for x in np.linspace(0.3,2,20)])
tvolslices[tvolslices>1] = 1
sanity = [(tvolslices[x]>0.6).sum()>25000 for x in range(len(tvolslices))]
tvolslices = tvolslices[sanity]
torch_ds_val = utils.LungLabelsDS_inf(tvolslices)
dataloader_val = torch.utils.data.DataLoader(torch_ds_val, batch_size=batch_size, shuffle=False, num_workers=1,
pin_memory=False)
timage_res = np.empty((np.append(0, tvolslices[0].shape)), dtype=np.uint8)
with torch.no_grad():
for X in tqdm(dataloader_val):
X = X.float().to(device)
prediction = model(X)
pls = torch.max(prediction, 1)[1].detach().cpu().numpy().astype(np.uint8)
timage_res = np.vstack((timage_res, pls))
# postprocessing includes removal of small connected components, hole filling and mapping of small components to
# neighbors
if volume_postprocessing:
outmask = utils.postrocessing(timage_res)
else:
outmask = timage_res
if noHU:
outmask = skimage.transform.resize(outmask[np.argmax((outmask==1).sum(axis=(1,2)))], inimg_raw.shape[:2], order=0, anti_aliasing=False, preserve_range=True)[None,:,:]
else:
outmask = np.asarray(
[utils.reshape_mask(outmask[i], xnew_box[i], inimg_raw.shape[1:]) for i in range(outmask.shape[0])],
dtype=np.uint8)
if not numpy_mode:
if len(directions) == 9:
outmask = np.flip(outmask, np.where(directions[[0,4,8]][::-1]<0)[0])
return outmask.astype(np.uint8)
def get_model(modeltype, modelname):
model_url, n_classes = model_urls[(modeltype, modelname)]
state_dict = torch.hub.load_state_dict_from_url(model_url, progress=True, map_location=torch.device('cpu'))
if modeltype == 'unet':
model = UNet(n_classes=n_classes, padding=True, depth=5, up_mode='upsample', batch_norm=True, residual=False)
elif modeltype == 'resunet':
model = UNet(n_classes=n_classes, padding=True, depth=5, up_mode='upsample', batch_norm=True, residual=True)
else:
logging.exception(f"Model {modelname} not known")
model.load_state_dict(state_dict)
model.eval()
return model
def apply_fused(image, basemodel = 'LTRCLobes', fillmodel = 'R231', force_cpu=False, batch_size=20, volume_postprocessing=True, noHU=False):
'''Will apply basemodel and use fillmodel to mitiage false negatives'''
mdl_r = get_model('unet',fillmodel)
mdl_l = get_model('unet',basemodel)
logging.info("Apply: %s" % basemodel)
res_l = apply(image, mdl_l, force_cpu=force_cpu, batch_size=batch_size, volume_postprocessing=volume_postprocessing, noHU=noHU)
logging.info("Apply: %s" % fillmodel)
res_r = apply(image, mdl_r, force_cpu=force_cpu, batch_size=batch_size, volume_postprocessing=volume_postprocessing, noHU=noHU)
spare_value = res_l.max()+1
res_l[np.logical_and(res_l==0, res_r>0)] = spare_value
res_l[res_r==0] = 0
logging.info("Fusing results... this may take up to several minutes!")
return utils.postrocessing(res_l, spare=[spare_value])
| 43.352459 | 174 | 0.665532 |
e6d7ab2f52cc06d8408dd17501d0fee3a59e8ce6
| 30,197 |
py
|
Python
|
2_LUCIR_+DC/class_incremental_cosine_imagenet.py
|
lywang3081/MRDC
|
7f8ec5060495560d6e307d89341ccf5598dcb85b
|
[
"MIT"
] | 1 |
2022-03-18T04:46:01.000Z
|
2022-03-18T04:46:01.000Z
|
2_LUCIR_+DC/class_incremental_cosine_imagenet.py
|
lywang3081/MRDC
|
7f8ec5060495560d6e307d89341ccf5598dcb85b
|
[
"MIT"
] | null | null | null |
2_LUCIR_+DC/class_incremental_cosine_imagenet.py
|
lywang3081/MRDC
|
7f8ec5060495560d6e307d89341ccf5598dcb85b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.optim import lr_scheduler
import torchvision
from torchvision import datasets, models, transforms
from torch.autograd import Variable
import numpy as np
import time
import os
import sys
import copy
import argparse
from PIL import Image
try:
import cPickle as pickle
except:
import pickle
import math
import modified_resnet
import modified_linear
import utils_pytorch
from utils_imagenet.utils_dataset import split_images_labels
from utils_imagenet.utils_dataset import merge_images_labels
from utils_incremental.compute_features import compute_features
from utils_incremental.compute_accuracy import compute_accuracy
from utils_incremental.compute_confusion_matrix import compute_confusion_matrix
from utils_incremental.incremental_train_and_eval import incremental_train_and_eval
from utils_incremental.incremental_train_and_eval_MS import incremental_train_and_eval_MS
from utils_incremental.incremental_train_and_eval_LF import incremental_train_and_eval_LF
from utils_incremental.incremental_train_and_eval_MR_LF import incremental_train_and_eval_MR_LF
from utils_incremental.incremental_train_and_eval_AMR_LF import incremental_train_and_eval_AMR_LF
import time
######### Modifiable Settings ##########
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', default='seed_1993_subset_100_imagenet', type=str)
parser.add_argument('--num_classes', default=100, type=int)
parser.add_argument('--num_workers', default=16, type=int, \
help='the number of workers for loading data')
parser.add_argument('--nb_cl_fg', default=50, type=int, \
help='the number of classes in first group')
parser.add_argument('--nb_cl', default=10, type=int, \
help='Classes per group')
parser.add_argument('--nb_protos', default=20, type=int, \
help='Number of prototypes per class at the end')
parser.add_argument('--nb_runs', default=1, type=int, \
help='Number of runs (random ordering of classes at each run)')
parser.add_argument('--ckp_prefix', default=os.path.basename(sys.argv[0])[:-3], type=str, \
help='Checkpoint prefix')
parser.add_argument('--epochs', default=90, type=int, \
help='Epochs')
parser.add_argument('--T', default=2, type=float, \
help='Temporature for distialltion')
parser.add_argument('--beta', default=0.25, type=float, \
help='Beta for distialltion')
parser.add_argument('--resume', action='store_true', \
help='resume from checkpoint')
parser.add_argument('--fix_budget', action='store_true', \
help='fix budget')
########################################
parser.add_argument('--mimic_score', action='store_true', \
help='To mimic scores for cosine embedding')
parser.add_argument('--lw_ms', default=1, type=float, \
help='loss weight for mimicking score')
########################################
#improved class incremental learning
parser.add_argument('--rs_ratio', default=0, type=float, \
help='The ratio for resample')
parser.add_argument('--imprint_weights', action='store_true', \
help='Imprint the weights for novel classes')
parser.add_argument('--less_forget', action='store_true', \
help='Less forgetful')
parser.add_argument('--lamda', default=5, type=float, \
help='Lamda for LF')
parser.add_argument('--adapt_lamda', action='store_true', \
help='Adaptively change lamda')
parser.add_argument('--mr_loss', action='store_true', \
help='Margin ranking loss v1')
parser.add_argument('--amr_loss', action='store_true', \
help='Margin ranking loss v2')
parser.add_argument('--dist', default=0.5, type=float, \
help='Dist for MarginRankingLoss')
parser.add_argument('--K', default=2, type=int, \
help='K for MarginRankingLoss')
parser.add_argument('--lw_mr', default=1, type=float, \
help='loss weight for margin ranking loss')
########################################
parser.add_argument('--random_seed', default=1993, type=int, \
help='random seed')
parser.add_argument('--datadir', default='/home/username/data/ImageNet/seed_1993_subset_100_imagenet/', type=str)
parser.add_argument('--traindir_compression', default='/home/username/data/ImageNet/seed_1993_subset_100_imagenet_quality_5/train', type=str)
parser.add_argument('--quality', default=10, type=float)
args = parser.parse_args()
########################################
assert(args.nb_cl_fg % args.nb_cl == 0)
assert(args.nb_cl_fg >= args.nb_cl)
train_batch_size = 128 # Batch size for train
test_batch_size = 50 # Batch size for test
eval_batch_size = 128 # Batch size for eval
base_lr = 0.1 # Initial learning rate
lr_strat = [30, 60] # Epochs where learning rate gets decreased
lr_factor = 0.1 # Learning rate decrease factor
custom_weight_decay = 1e-4 # Weight Decay
custom_momentum = 0.9 # Momentum
args.ckp_prefix = '{}_nb_cl_fg_{}_nb_cl_{}_nb_protos_{}_quality_{}_seed_{}'.format(args.ckp_prefix, args.nb_cl_fg, args.nb_cl, args.nb_protos, args.quality, args.random_seed)
np.random.seed(args.random_seed) # Fix the random seed
print(args)
########################################
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
traindir = args.datadir + 'train'
valdir = args.datadir + 'val'
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
trainset = datasets.ImageFolder(
traindir,
transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
testset = datasets.ImageFolder(valdir, transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
]))
evalset = datasets.ImageFolder(valdir, transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
]))
# load compressed data
transform_train_compression = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,])
traindir_compression = args.traindir_compression
trainset_compression = datasets.ImageFolder(traindir_compression, transform_train_compression)
# Initialization
dictionary_size = 1500
top1_acc_list_cumul = np.zeros((int(args.num_classes/args.nb_cl),3,args.nb_runs))
top1_acc_list_ori = np.zeros((int(args.num_classes/args.nb_cl),3,args.nb_runs))
X_train_total, Y_train_total = split_images_labels(trainset.imgs)
X_valid_total, Y_valid_total = split_images_labels(testset.imgs)
X_train_total_compression, Y_train_total_compression = split_images_labels(trainset_compression.imgs)
start = time.clock()
# Launch the different runs
for iteration_total in range(args.nb_runs):
# Select the order for the class learning
order_name = "./checkpoint/seed_{}_{}_order_run_{}.pkl".format(args.random_seed, args.dataset, iteration_total)
print("Order name:{}".format(order_name))
if os.path.exists(order_name):
print("Loading orders")
order = utils_pytorch.unpickle(order_name)
else:
print("Generating orders")
order = np.arange(args.num_classes)
np.random.shuffle(order)
utils_pytorch.savepickle(order, order_name)
order_list = list(order)
print(order_list)
# Initialization of the variables for this run
X_valid_cumuls = []
X_protoset_cumuls = []
X_train_cumuls = []
Y_valid_cumuls = []
Y_protoset_cumuls = []
Y_train_cumuls = []
alpha_dr_herding = np.zeros((int(args.num_classes/args.nb_cl),dictionary_size,args.nb_cl),np.float32)
# The following contains all the training samples of the different classes
# because we want to compare our method with the theoretical case where all the training samples are stored
# prototypes = np.zeros((args.num_classes,dictionary_size,X_train_total.shape[1],X_train_total.shape[2],X_train_total.shape[3]))
prototypes = [[] for i in range(args.num_classes)]
for orde in range(args.num_classes):
prototypes[orde] = X_train_total_compression[np.where(Y_train_total_compression==order[orde])]
prototypes = np.array(prototypes)
start_iter = int(args.nb_cl_fg/args.nb_cl)-1
for iteration in range(start_iter, int(args.num_classes/args.nb_cl)):
#init model
if iteration == start_iter:
############################################################
last_iter = 0
############################################################
tg_model = modified_resnet.resnet18(num_classes=args.nb_cl_fg)
in_features = tg_model.fc.in_features
out_features = tg_model.fc.out_features
print("in_features:", in_features, "out_features:", out_features)
ref_model = None
elif iteration == start_iter+1:
############################################################
last_iter = iteration
############################################################
#increment classes
ref_model = copy.deepcopy(tg_model)
in_features = tg_model.fc.in_features
out_features = tg_model.fc.out_features
print("in_features:", in_features, "out_features:", out_features)
new_fc = modified_linear.SplitCosineLinear(in_features, out_features, args.nb_cl)
new_fc.fc1.weight.data = tg_model.fc.weight.data
new_fc.sigma.data = tg_model.fc.sigma.data
tg_model.fc = new_fc
lamda_mult = out_features*1.0 / args.nb_cl
else:
############################################################
last_iter = iteration
############################################################
ref_model = copy.deepcopy(tg_model)
in_features = tg_model.fc.in_features
out_features1 = tg_model.fc.fc1.out_features
out_features2 = tg_model.fc.fc2.out_features
print("in_features:", in_features, "out_features1:", \
out_features1, "out_features2:", out_features2)
new_fc = modified_linear.SplitCosineLinear(in_features, out_features1+out_features2, args.nb_cl)
new_fc.fc1.weight.data[:out_features1] = tg_model.fc.fc1.weight.data
new_fc.fc1.weight.data[out_features1:] = tg_model.fc.fc2.weight.data
new_fc.sigma.data = tg_model.fc.sigma.data
tg_model.fc = new_fc
lamda_mult = (out_features1+out_features2)*1.0 / (args.nb_cl)
if iteration > start_iter and args.less_forget and args.adapt_lamda:
#cur_lamda = lamda_base * sqrt(num_old_classes/num_new_classes)
cur_lamda = args.lamda * math.sqrt(lamda_mult)
else:
cur_lamda = args.lamda
if iteration > start_iter and args.less_forget:
print("###############################")
print("Lamda for less forget is set to ", cur_lamda)
print("###############################")
# Prepare the training data for the current batch of classes
actual_cl = order[range(last_iter*args.nb_cl,(iteration+1)*args.nb_cl)]
indices_train_10 = np.array([i in order[range(last_iter*args.nb_cl,(iteration+1)*args.nb_cl)] for i in Y_train_total])
indices_test_10 = np.array([i in order[range(last_iter*args.nb_cl,(iteration+1)*args.nb_cl)] for i in Y_valid_total])
X_train = X_train_total[indices_train_10]
X_valid = X_valid_total[indices_test_10]
X_valid_cumuls.append(X_valid)
X_train_cumuls.append(X_train)
X_valid_cumul = np.concatenate(X_valid_cumuls)
X_train_cumul = np.concatenate(X_train_cumuls)
Y_train = Y_train_total[indices_train_10]
Y_valid = Y_valid_total[indices_test_10]
Y_valid_cumuls.append(Y_valid)
Y_train_cumuls.append(Y_train)
Y_valid_cumul = np.concatenate(Y_valid_cumuls)
Y_train_cumul = np.concatenate(Y_train_cumuls)
# Add the stored exemplars to the training data
if iteration == start_iter:
X_valid_ori = X_valid
Y_valid_ori = Y_valid
else:
X_protoset = np.concatenate(X_protoset_cumuls)
Y_protoset = np.concatenate(Y_protoset_cumuls)
if args.rs_ratio > 0:
#1/rs_ratio = (len(X_train)+len(X_protoset)*scale_factor)/(len(X_protoset)*scale_factor)
scale_factor = (len(X_train) * args.rs_ratio) / (len(X_protoset) * (1 - args.rs_ratio))
rs_sample_weights = np.concatenate((np.ones(len(X_train)), np.ones(len(X_protoset))*scale_factor))
#number of samples per epoch
#rs_num_samples = len(X_train) + len(X_protoset)
rs_num_samples = int(len(X_train) / (1 - args.rs_ratio))
print("X_train:{}, X_protoset:{}, rs_num_samples:{}".format(len(X_train), len(X_protoset), rs_num_samples))
X_train = np.concatenate((X_train,X_protoset),axis=0)
Y_train = np.concatenate((Y_train,Y_protoset))
# Launch the training loop
print('Batch of classes number {0} arrives ...'.format(iteration+1))
map_Y_train = np.array([order_list.index(i) for i in Y_train])
map_Y_valid_cumul = np.array([order_list.index(i) for i in Y_valid_cumul])
#imprint weights
if iteration > start_iter and args.imprint_weights:
#input: tg_model, X_train, map_Y_train
#class_start = iteration*nb_cl class_end = (iteration+1)*nb_cl
print("Imprint weights")
#########################################
#compute the average norm of old embdding
old_embedding_norm = tg_model.fc.fc1.weight.data.norm(dim=1, keepdim=True)
average_old_embedding_norm = torch.mean(old_embedding_norm, dim=0).to('cpu').type(torch.DoubleTensor)
#########################################
tg_feature_model = nn.Sequential(*list(tg_model.children())[:-1])
num_features = tg_model.fc.in_features
novel_embedding = torch.zeros((args.nb_cl, num_features))
for cls_idx in range(iteration*args.nb_cl, (iteration+1)*args.nb_cl):
cls_indices = np.array([i == cls_idx for i in map_Y_train])
assert(len(np.where(cls_indices==1)[0])<=dictionary_size)
#evalset.test_data = X_train[cls_indices].astype('uint8')
#evalset.test_labels = np.zeros(evalset.test_data.shape[0]) #zero labels
current_eval_set = merge_images_labels(X_train[cls_indices], np.zeros(len(X_train[cls_indices])))
evalset.imgs = evalset.samples = current_eval_set
evalloader = torch.utils.data.DataLoader(evalset, batch_size=eval_batch_size,
shuffle=False, num_workers=2)
num_samples = len(X_train[cls_indices])
cls_features = compute_features(tg_feature_model, evalloader, num_samples, num_features)
#cls_features = cls_features.T
#cls_features = cls_features / np.linalg.norm(cls_features,axis=0)
#cls_embedding = np.mean(cls_features, axis=1)
norm_features = F.normalize(torch.from_numpy(cls_features), p=2, dim=1)
cls_embedding = torch.mean(norm_features, dim=0)
#novel_embedding[cls_idx-iteration*args.nb_cl] = cls_embedding
novel_embedding[cls_idx-iteration*args.nb_cl] = F.normalize(cls_embedding, p=2, dim=0) * average_old_embedding_norm
tg_model.to(device)
#torch.save(tg_model, "tg_model_before_imprint_weights.pth")
tg_model.fc.fc2.weight.data = novel_embedding.to(device)
#torch.save(tg_model, "tg_model_after_imprint_weights.pth")
############################################################
#trainset.train_data = X_train.astype('uint8')
#trainset.train_labels = map_Y_train
current_train_imgs = merge_images_labels(X_train, map_Y_train)
trainset.imgs = trainset.samples = current_train_imgs
if iteration > start_iter and args.rs_ratio > 0 and scale_factor > 1:
print("Weights from sampling:", rs_sample_weights)
index1 = np.where(rs_sample_weights>1)[0]
index2 = np.where(map_Y_train<iteration*args.nb_cl)[0]
assert((index1==index2).all())
train_sampler = torch.utils.data.sampler.WeightedRandomSampler(rs_sample_weights, rs_num_samples)
#trainloader = torch.utils.data.DataLoader(trainset, batch_size=train_batch_size, \
# shuffle=False, sampler=train_sampler, num_workers=2)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=train_batch_size, \
shuffle=False, sampler=train_sampler, num_workers=args.num_workers, pin_memory=True)
else:
#trainloader = torch.utils.data.DataLoader(trainset, batch_size=train_batch_size,
# shuffle=True, num_workers=2)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=train_batch_size,
shuffle=True, num_workers=args.num_workers, pin_memory=True)
#testset.test_data = X_valid_cumul.astype('uint8')
#testset.test_labels = map_Y_valid_cumul
current_test_imgs = merge_images_labels(X_valid_cumul, map_Y_valid_cumul)
testset.imgs = testset.samples = current_test_imgs
testloader = torch.utils.data.DataLoader(testset, batch_size=test_batch_size, shuffle=False, num_workers=2)
print('Max and Min of train labels: {}, {}'.format(min(map_Y_train), max(map_Y_train)))
print('Max and Min of valid labels: {}, {}'.format(min(map_Y_valid_cumul), max(map_Y_valid_cumul)))
##############################################################
ckp_name = './checkpoint/{}_run_{}_iteration_{}_model.pth'.format(args.ckp_prefix, iteration_total, iteration)
print('ckp_name', ckp_name)
if args.resume and os.path.exists(ckp_name):
print("###############################")
print("Loading models from checkpoint")
tg_model = torch.load(ckp_name)
print("###############################")
else:
###############################
if iteration > start_iter and args.less_forget:
#fix the embedding of old classes
ignored_params = list(map(id, tg_model.fc.fc1.parameters()))
base_params = filter(lambda p: id(p) not in ignored_params, \
tg_model.parameters())
tg_params =[{'params': base_params, 'lr': base_lr, 'weight_decay': custom_weight_decay}, \
{'params': tg_model.fc.fc1.parameters(), 'lr': 0, 'weight_decay': 0}]
else:
tg_params = tg_model.parameters()
###############################
tg_model = tg_model.to(device)
if iteration > start_iter:
ref_model = ref_model.to(device)
tg_optimizer = optim.SGD(tg_params, lr=base_lr, momentum=custom_momentum, weight_decay=custom_weight_decay)
tg_lr_scheduler = lr_scheduler.MultiStepLR(tg_optimizer, milestones=lr_strat, gamma=lr_factor)
###############################
if args.less_forget and args.mr_loss:
print("incremental_train_and_eval_MR_LF")
tg_model = incremental_train_and_eval_MR_LF(args.epochs, tg_model, ref_model, tg_optimizer, tg_lr_scheduler, \
trainloader, testloader, \
iteration, start_iter, \
cur_lamda, \
args.dist, args.K, args.lw_mr)
elif args.less_forget and args.amr_loss:
print("incremental_train_and_eval_AMR_LF")
tg_model = incremental_train_and_eval_AMR_LF(args.epochs, tg_model, ref_model, tg_optimizer, tg_lr_scheduler, \
trainloader, testloader, \
iteration, start_iter, \
cur_lamda, \
args.dist, args.K, args.lw_mr)
else:
if args.less_forget:
print("incremental_train_and_eval_LF")
tg_model = incremental_train_and_eval_LF(args.epochs, tg_model, ref_model, tg_optimizer, tg_lr_scheduler, \
trainloader, testloader, \
iteration, start_iter, \
cur_lamda)
else:
if args.mimic_score:
print("incremental_train_and_eval_MS")
tg_model = incremental_train_and_eval_MS(args.epochs, tg_model, ref_model, tg_optimizer, tg_lr_scheduler, \
trainloader, testloader, \
iteration, start_iter,
args.lw_ms)
else:
print("incremental_train_and_eval")
tg_model = incremental_train_and_eval(args.epochs, tg_model, ref_model, tg_optimizer, tg_lr_scheduler, \
trainloader, testloader, \
iteration, start_iter,
args.T, args.beta)
if not os.path.isdir('checkpoint'):
os.mkdir('checkpoint')
torch.save(tg_model, ckp_name)
### Exemplars
if args.fix_budget:
nb_protos_cl = int(np.ceil(args.nb_protos*args.num_classes*1.0/args.nb_cl/(iteration+1)))
else:
nb_protos_cl = args.nb_protos
tg_feature_model = nn.Sequential(*list(tg_model.children())[:-1])
num_features = tg_model.fc.in_features
# Herding
print('Updating exemplar set...')
for iter_dico in range(last_iter*args.nb_cl, (iteration+1)*args.nb_cl):
# Possible exemplars in the feature space and projected on the L2 sphere
# evalset.test_data = prototypes[iter_dico].astype('uint8')
# evalset.test_labels = np.zeros(evalset.test_data.shape[0]) #zero labels
current_eval_set = merge_images_labels(prototypes[iter_dico], np.zeros(len(prototypes[iter_dico])))
evalset.imgs = evalset.samples = current_eval_set
evalloader = torch.utils.data.DataLoader(evalset, batch_size=eval_batch_size,
shuffle=False, num_workers=args.num_workers, pin_memory=True)
num_samples = len(prototypes[iter_dico])
mapped_prototypes = compute_features(tg_feature_model, evalloader, num_samples, num_features)
D = mapped_prototypes.T
D = D/np.linalg.norm(D,axis=0)
# Herding procedure : ranking of the potential exemplars
mu = np.mean(D,axis=1)
index1 = int(iter_dico/args.nb_cl)
index2 = iter_dico % args.nb_cl
alpha_dr_herding[index1,:,index2] = alpha_dr_herding[index1,:,index2]*0
w_t = mu
iter_herding = 0
iter_herding_eff = 0
while not(np.sum(alpha_dr_herding[index1,:,index2]!=0)==min(nb_protos_cl,500)) and iter_herding_eff<1000:
tmp_t = np.dot(w_t,D)
ind_max = np.argmax(tmp_t)
iter_herding_eff += 1
if alpha_dr_herding[index1,ind_max,index2] == 0:
alpha_dr_herding[index1,ind_max,index2] = 1+iter_herding
iter_herding += 1
w_t = w_t+mu-D[:,ind_max]
# Prepare the protoset
X_protoset_cumuls = []
Y_protoset_cumuls = []
# Class means for iCaRL and NCM + Storing the selected exemplars in the protoset
print('Computing mean-of_exemplars and theoretical mean...')
# class_means = np.zeros((64,100,2))
class_means = np.zeros((num_features, args.num_classes, 2))
for iteration2 in range(iteration+1):
for iter_dico in range(args.nb_cl):
current_cl = order[range(iteration2*args.nb_cl,(iteration2+1)*args.nb_cl)]
# Collect data in the feature space for each class
# evalset.test_data = prototypes[iteration2*args.nb_cl+iter_dico].astype('uint8')
# evalset.test_labels = np.zeros(evalset.test_data.shape[0]) #zero labels
current_eval_set = merge_images_labels(prototypes[iteration2*args.nb_cl+iter_dico], \
np.zeros(len(prototypes[iteration2*args.nb_cl+iter_dico])))
evalset.imgs = evalset.samples = current_eval_set
evalloader = torch.utils.data.DataLoader(evalset, batch_size=eval_batch_size,
shuffle=False, num_workers=args.num_workers, pin_memory=True)
num_samples = len(prototypes[iteration2*args.nb_cl+iter_dico])
mapped_prototypes = compute_features(tg_feature_model, evalloader, num_samples, num_features)
D = mapped_prototypes.T
D = D/np.linalg.norm(D,axis=0)
# Flipped version also
# evalset.test_data = prototypes[iteration2*args.nb_cl+iter_dico][:,:,:,::-1].astype('uint8')
# evalloader = torch.utils.data.DataLoader(evalset, batch_size=eval_batch_size,
# shuffle=False, num_workers=2)
# mapped_prototypes2 = compute_features(tg_feature_model, evalloader, num_samples, num_features)
# D2 = mapped_prototypes2.T
# D2 = D2/np.linalg.norm(D2,axis=0)
D2 = D
# iCaRL
alph = alpha_dr_herding[iteration2,:,iter_dico]
assert((alph[num_samples:]==0).all())
alph = alph[:num_samples]
alph = (alph>0)*(alph<nb_protos_cl+1)*1.
# X_protoset_cumuls.append(prototypes[iteration2*args.nb_cl+iter_dico,np.where(alph==1)[0]])
X_protoset_cumuls.append(prototypes[iteration2*args.nb_cl+iter_dico][np.where(alph==1)[0]])
Y_protoset_cumuls.append(order[iteration2*args.nb_cl+iter_dico]*np.ones(len(np.where(alph==1)[0])))
alph = alph/np.sum(alph)
class_means[:,current_cl[iter_dico],0] = (np.dot(D,alph)+np.dot(D2,alph))/2
class_means[:,current_cl[iter_dico],0] /= np.linalg.norm(class_means[:,current_cl[iter_dico],0])
# Normal NCM
# alph = np.ones(dictionary_size)/dictionary_size
alph = np.ones(num_samples)/num_samples
class_means[:,current_cl[iter_dico],1] = (np.dot(D,alph)+np.dot(D2,alph))/2
class_means[:,current_cl[iter_dico],1] /= np.linalg.norm(class_means[:,current_cl[iter_dico],1])
torch.save(class_means, './checkpoint/{}_run_{}_iteration_{}_class_means.pth'.format(args.ckp_prefix,iteration_total, iteration))
current_means = class_means[:, order[range(0,(iteration+1)*args.nb_cl)]]
##############################################################
# Calculate validation error of model on the first nb_cl classes:
map_Y_valid_ori = np.array([order_list.index(i) for i in Y_valid_ori])
print('Computing accuracy on the original batch of classes...')
# evalset.test_data = X_valid_ori.astype('uint8')
# evalset.test_labels = map_Y_valid_ori
current_eval_set = merge_images_labels(X_valid_ori, map_Y_valid_ori)
evalset.imgs = evalset.samples = current_eval_set
evalloader = torch.utils.data.DataLoader(evalset, batch_size=eval_batch_size,
shuffle=False, num_workers=args.num_workers, pin_memory=True)
ori_acc = compute_accuracy(tg_model, tg_feature_model, current_means, evalloader)
top1_acc_list_ori[iteration, :, iteration_total] = np.array(ori_acc).T
##############################################################
# Calculate validation error of model on the cumul of classes:
map_Y_valid_cumul = np.array([order_list.index(i) for i in Y_valid_cumul])
print('Computing cumulative accuracy...')
# evalset.test_data = X_valid_cumul.astype('uint8')
# evalset.test_labels = map_Y_valid_cumul
current_eval_set = merge_images_labels(X_valid_cumul, map_Y_valid_cumul)
evalset.imgs = evalset.samples = current_eval_set
evalloader = torch.utils.data.DataLoader(evalset, batch_size=eval_batch_size,
shuffle=False, num_workers=args.num_workers, pin_memory=True)
cumul_acc = compute_accuracy(tg_model, tg_feature_model, current_means, evalloader)
top1_acc_list_cumul[iteration, :, iteration_total] = np.array(cumul_acc).T
##############################################################
# Calculate confusion matrix
# print('Computing confusion matrix...')
# cm = compute_confusion_matrix(tg_model, tg_feature_model, current_means, evalloader)
# cm_name = './checkpoint/{}_run_{}_iteration_{}_confusion_matrix.pth'.format(args.ckp_prefix,iteration_total, iteration)
# with open(cm_name, 'wb') as f:
# pickle.dump(cm, f, 2) #for reading with Python 2
##############################################################
end = time.clock()
print("Current Task Run Time:", end - start)
# Final save of the data
torch.save(top1_acc_list_ori, './checkpoint/{}_run_{}_top1_acc_list_ori.pth'.format(args.ckp_prefix, iteration_total))
torch.save(top1_acc_list_cumul, './checkpoint/{}_run_{}_top1_acc_list_cumul.pth'.format(args.ckp_prefix, iteration_total))
end = time.clock()
print("Total Run Time:", end-start)
| 54.70471 | 181 | 0.628076 |
dbe947e310ce4320f5ce181c46b6a5dc8dda8f83
| 2,299 |
py
|
Python
|
core/turing_machine.py
|
mjutzi/TikzTuringSimulator
|
2d0a54d5fc13bb1b80e903ad304809a49e8d99e7
|
[
"Apache-2.0"
] | 3 |
2017-07-02T12:45:46.000Z
|
2020-08-14T19:43:43.000Z
|
core/turing_machine.py
|
mjutzi/TikzTuringSimulator
|
2d0a54d5fc13bb1b80e903ad304809a49e8d99e7
|
[
"Apache-2.0"
] | null | null | null |
core/turing_machine.py
|
mjutzi/TikzTuringSimulator
|
2d0a54d5fc13bb1b80e903ad304809a49e8d99e7
|
[
"Apache-2.0"
] | null | null | null |
class InvalidCharacterException(Exception):
def __init__(self, chars):
super(InvalidCharacterException, self).__init__('\'{}\' are not in alphabet.'.format(chars))
class TuringMachine:
def __init__(self, num_of_tapes, initial_state, final_states, transition_graph, alphabet=None):
self.__num_of_tapes = num_of_tapes
self.initial_state = initial_state
self.final_states = final_states
self.transition_graph = transition_graph
self.alphabet = alphabet
def assert_charset(self, tape):
'''
Prüft ob das Band nur character aus dem gesetzten Alphabet hat und setzt das Bandalphabet, des Tape Objektes.
'''
tape.set_alphabet(self.alphabet)
non_alphabet_chars = tape.non_alphabet_chars()
if len(non_alphabet_chars) > 0:
raise InvalidCharacterException(non_alphabet_chars)
def as_iterator(self, tape):
'''
Führt das Program mit dem aktuellen Band als input aus. In jeder Iteration wird dabei eine Instanz von
TransitionEvent und TransitionTarget zurück gegeben. Das Event Beschreibt ein Tupel aus dem aktuelllen Zustand
und den gelesenen Buchstaben. TransitionTarget, beschreibt ein Tupel aus dem Zielzustand, den zu schreibenden
Buchstaben und der Kopfbewegung(en) aud den Band bzw. Bändern.
Ist der Input valide, so Terminiert das Program in einem der finalen Zustände. Anderen Falls ist in einer Iteration
Traget None und die Iteration wird abgebrochen.
:param tape: eine Instanz von Tape oder MultiTape
:return: einen Generator, der die Zustandswechsel zurück gibt.
'''
current_state = self.initial_state
while current_state and not current_state in self.final_states:
event, target = self.transition_graph.get_transition(current_state, tape.read())
if target:
tape.write(target.new_chars)
tape.move(target.move_directions)
current_state = target.new_state
else:
current_state = None
yield event, target
@property
def states(self):
return self.transition_graph.states
@property
def num_of_tapes(self):
return self.__num_of_tapes
| 40.333333 | 123 | 0.68595 |
f1693080896fedda21bacc58a70edc998dfed039
| 6,015 |
py
|
Python
|
tests/test_transport.py
|
alexdz18/pymysensors
|
41d002b5c9f4b2594147b72178de2fc2293fdb89
|
[
"MIT"
] | 66 |
2015-05-29T16:15:29.000Z
|
2022-01-07T14:06:24.000Z
|
tests/test_transport.py
|
alexdz18/pymysensors
|
41d002b5c9f4b2594147b72178de2fc2293fdb89
|
[
"MIT"
] | 95 |
2015-04-07T17:46:25.000Z
|
2022-01-24T17:16:18.000Z
|
tests/test_transport.py
|
alexdz18/pymysensors
|
41d002b5c9f4b2594147b72178de2fc2293fdb89
|
[
"MIT"
] | 59 |
2015-04-03T02:06:05.000Z
|
2022-01-19T17:03:17.000Z
|
"""Test the gateway transport."""
from unittest import mock
import pytest
from mysensors import Gateway
from mysensors.task import SyncTasks
from mysensors.transport import BaseMySensorsProtocol, Transport
# pylint: disable=redefined-outer-name
@pytest.fixture
def connection_transport():
"""Return a mock connection transport."""
return mock.MagicMock()
@pytest.fixture
def reconnect_callback():
"""Return a mock reconnect callback."""
return mock.MagicMock()
@pytest.fixture
def gateway(connection_transport, reconnect_callback):
"""Return gateway instance."""
_gateway = Gateway()
protocol = BaseMySensorsProtocol(_gateway, reconnect_callback)
def connect():
"""Connect to device."""
protocol.connection_made(connection_transport)
transport = Transport(gateway, connect)
transport.connect = connect
transport.protocol = protocol
_gateway.tasks = SyncTasks(_gateway.const, False, None, _gateway.sensors, transport)
return _gateway
def test_connection_made(gateway, connection_transport):
"""Test connection is made."""
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
def test_connection_made_callback(gateway, connection_transport):
"""Test that callbacks are called when connection is made."""
conn_made = mock.MagicMock()
gateway.on_conn_made = conn_made
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
assert conn_made.call_count == 1
def test_handle_line(gateway):
"""Test handle line."""
line = "1;255;0;0;17;1.4.1\n"
gateway.tasks.transport.protocol.handle_line(line)
gateway.tasks.run_job()
assert 1 in gateway.sensors
def test_disconnect(gateway, connection_transport):
"""Test disconnect."""
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
gateway.tasks.transport.disconnect()
assert connection_transport.close.call_count == 1
assert gateway.tasks.transport.protocol is None
def test_disconnect_no_connection(gateway, connection_transport):
"""Test disconnect without active connection."""
assert gateway.tasks.transport.protocol is not None
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.disconnect()
assert connection_transport.close.call_count == 0
assert gateway.tasks.transport.protocol is None
def test_connection_lost(gateway, connection_transport, reconnect_callback):
"""Test connection is lost."""
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
gateway.tasks.transport.protocol.connection_lost("error")
assert connection_transport.serial.close.call_count == 1
assert reconnect_callback.call_count == 1
assert gateway.tasks.transport.protocol.transport is None
def test_connection_lost_callback(gateway, connection_transport, reconnect_callback):
"""Test connection is lost and that callbacks are called."""
conn_lost = mock.MagicMock()
gateway.on_conn_lost = conn_lost
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
gateway.tasks.transport.protocol.connection_lost("error")
assert connection_transport.serial.close.call_count == 1
assert conn_lost.call_count == 1
assert conn_lost.call_args == mock.call(gateway, "error")
assert reconnect_callback.call_count == 1
assert gateway.tasks.transport.protocol.transport is None
def test_send(gateway, connection_transport):
"""Test send."""
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
msg_string = "1;255;3;0;1;123456789\n"
gateway.tasks.transport.send(msg_string)
assert connection_transport.write.call_count == 1
assert connection_transport.write.call_args == mock.call(msg_string.encode())
def test_send_no_message(gateway, connection_transport):
"""Test send with falsy message."""
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
msg_string = ""
gateway.tasks.transport.send(msg_string)
assert connection_transport.write.call_count == 0
def test_send_no_protocol(gateway, connection_transport):
"""Test send with no protocol."""
gateway.tasks.transport.protocol = None
msg_string = "1;255;3;0;1;123456789\n"
gateway.tasks.transport.send(msg_string)
assert connection_transport.write.call_count == 0
def test_send_no_transport(gateway, connection_transport):
"""Test send with no transport."""
assert gateway.tasks.transport.protocol.transport is None
msg_string = "1;255;3;0;1;123456789\n"
gateway.tasks.transport.send(msg_string)
assert connection_transport.write.call_count == 0
def test_send_error(gateway, connection_transport, reconnect_callback):
"""Test send raises OSError."""
assert gateway.tasks.transport.protocol.transport is None
gateway.tasks.transport.connect()
assert gateway.tasks.transport.protocol.transport is connection_transport
msg_string = "1;255;3;0;1;123456789\n"
connection_transport.write = mock.MagicMock(side_effect=OSError())
gateway.tasks.transport.send(msg_string)
assert connection_transport.write.call_count == 1
assert connection_transport.close.call_count == 1
assert reconnect_callback.call_count == 1
| 37.59375 | 88 | 0.76542 |
179e82083888108d775213e1d8129d1a405d9ef9
| 7,688 |
py
|
Python
|
saleor/graphql/payment/types.py
|
VALKYR-Developers/saleor
|
adc189d429199b9e0203726102a50d5673bafece
|
[
"CC-BY-4.0"
] | 1 |
2022-02-21T07:17:08.000Z
|
2022-02-21T07:17:08.000Z
|
saleor/graphql/payment/types.py
|
VALKYR-Developers/saleor
|
adc189d429199b9e0203726102a50d5673bafece
|
[
"CC-BY-4.0"
] | 81 |
2021-10-11T04:26:07.000Z
|
2022-03-28T04:46:43.000Z
|
saleor/graphql/payment/types.py
|
VALKYR-Developers/saleor
|
adc189d429199b9e0203726102a50d5673bafece
|
[
"CC-BY-4.0"
] | null | null | null |
import graphene
from graphene import relay
from ...core.exceptions import PermissionDenied
from ...core.permissions import OrderPermissions
from ...core.tracing import traced_resolver
from ...payment import models
from ..checkout.dataloaders import CheckoutByTokenLoader
from ..core.connection import CountableConnection
from ..core.descriptions import ADDED_IN_31
from ..core.types import ModelObjectType, Money
from ..decorators import permission_required
from ..meta.permissions import public_payment_permissions
from ..meta.resolvers import resolve_metadata
from ..meta.types import MetadataItem, ObjectWithMetadata
from ..utils import get_user_or_app_from_context
from .enums import OrderAction, PaymentChargeStatusEnum, TransactionKindEnum
class Transaction(ModelObjectType):
id = graphene.GlobalID(required=True)
created = graphene.DateTime(required=True)
payment = graphene.Field(lambda: Payment, required=True)
token = graphene.String(required=True)
kind = TransactionKindEnum(required=True)
is_success = graphene.Boolean(required=True)
error = graphene.String()
gateway_response = graphene.JSONString(required=True)
amount = graphene.Field(Money, description="Total amount of the transaction.")
class Meta:
description = "An object representing a single payment."
interfaces = [relay.Node]
model = models.Transaction
@staticmethod
def resolve_amount(root: models.Transaction, _info):
return root.get_amount()
class CreditCard(graphene.ObjectType):
brand = graphene.String(description="Card brand.", required=True)
first_digits = graphene.String(
description="First 4 digits of the card number.", required=False
)
last_digits = graphene.String(
description="Last 4 digits of the card number.", required=True
)
exp_month = graphene.Int(
description=("Two-digit number representing the card’s expiration month."),
required=False,
)
exp_year = graphene.Int(
description=("Four-digit number representing the card’s expiration year."),
required=False,
)
class PaymentSource(graphene.ObjectType):
class Meta:
description = (
"Represents a payment source stored "
"for user in payment gateway, such as credit card."
)
gateway = graphene.String(description="Payment gateway name.", required=True)
payment_method_id = graphene.String(description="ID of stored payment method.")
credit_card_info = graphene.Field(
CreditCard, description="Stored credit card details if available."
)
metadata = graphene.List(
MetadataItem,
required=True,
description=(
f"{ADDED_IN_31} List of public metadata items. "
"Can be accessed without permissions."
),
)
class Payment(ModelObjectType):
id = graphene.GlobalID(required=True)
gateway = graphene.String(required=True)
is_active = graphene.Boolean(required=True)
created = graphene.DateTime(required=True)
modified = graphene.DateTime(required=True)
token = graphene.String(required=True)
checkout = graphene.Field("saleor.graphql.checkout.types.Checkout")
order = graphene.Field("saleor.graphql.order.types.Order")
payment_method_type = graphene.String(required=True)
customer_ip_address = graphene.String()
charge_status = PaymentChargeStatusEnum(
description="Internal payment status.", required=True
)
actions = graphene.List(
OrderAction,
description=(
"List of actions that can be performed in the current state of a payment."
),
required=True,
)
total = graphene.Field(Money, description="Total amount of the payment.")
captured_amount = graphene.Field(
Money, description="Total amount captured for this payment."
)
transactions = graphene.List(
Transaction, description="List of all transactions within this payment."
)
available_capture_amount = graphene.Field(
Money, description="Maximum amount of money that can be captured."
)
available_refund_amount = graphene.Field(
Money, description="Maximum amount of money that can be refunded."
)
credit_card = graphene.Field(
CreditCard, description="The details of the card used for this payment."
)
class Meta:
description = "Represents a payment of a given type."
interfaces = [relay.Node, ObjectWithMetadata]
model = models.Payment
@staticmethod
@permission_required(OrderPermissions.MANAGE_ORDERS)
def resolve_customer_ip_address(root: models.Payment, _info):
return root.customer_ip_address
@staticmethod
@permission_required(OrderPermissions.MANAGE_ORDERS)
def resolve_actions(root: models.Payment, _info):
actions = []
if root.can_capture():
actions.append(OrderAction.CAPTURE)
if root.can_refund():
actions.append(OrderAction.REFUND)
if root.can_void():
actions.append(OrderAction.VOID)
return actions
@staticmethod
@traced_resolver
def resolve_total(root: models.Payment, _info):
return root.get_total()
@staticmethod
def resolve_captured_amount(root: models.Payment, _info):
return root.get_captured_amount()
@staticmethod
@permission_required(OrderPermissions.MANAGE_ORDERS)
def resolve_transactions(root: models.Payment, _info):
return root.transactions.all()
@staticmethod
@permission_required(OrderPermissions.MANAGE_ORDERS)
def resolve_available_refund_amount(root: models.Payment, _info):
if not root.can_refund():
return None
return root.get_captured_amount()
@staticmethod
@permission_required(OrderPermissions.MANAGE_ORDERS)
def resolve_available_capture_amount(root: models.Payment, _info):
if not root.can_capture():
return None
return Money(amount=root.get_charge_amount(), currency=root.currency)
@staticmethod
def resolve_credit_card(root: models.Payment, _info):
data = {
"brand": root.cc_brand,
"exp_month": root.cc_exp_month,
"exp_year": root.cc_exp_year,
"first_digits": root.cc_first_digits,
"last_digits": root.cc_last_digits,
}
if not any(data.values()):
return None
return CreditCard(**data)
@staticmethod
def resolve_metadata(root: models.Payment, info):
permissions = public_payment_permissions(info, root.pk)
requester = get_user_or_app_from_context(info.context)
if not requester.has_perms(permissions):
raise PermissionDenied()
return resolve_metadata(root.metadata)
def resolve_checkout(root: models.Payment, info):
if not root.checkout_id:
return None
return CheckoutByTokenLoader(info.context).load(root.checkout_id)
class PaymentCountableConnection(CountableConnection):
class Meta:
node = Payment
class PaymentInitialized(graphene.ObjectType):
class Meta:
description = (
"Server-side data generated by a payment gateway. Optional step when the "
"payment provider requires an additional action to initialize payment "
"session."
)
gateway = graphene.String(description="ID of a payment gateway.", required=True)
name = graphene.String(description="Payment gateway name.", required=True)
data = graphene.JSONString(
description="Initialized data by gateway.", required=False
)
| 35.925234 | 86 | 0.69732 |
060153ef84c8610221f2a7f942344021755bfe92
| 365 |
py
|
Python
|
Numbers genenne.py
|
SixLeopard/Python-Random-NUmber-Generator
|
7c66d97f9a4332f26228685f575c170f9bd6e191
|
[
"MIT"
] | 2 |
2019-03-04T02:55:02.000Z
|
2019-05-03T01:47:03.000Z
|
Numbers genenne.py
|
SixLeopard/Python-Random-NUmber-Generator
|
7c66d97f9a4332f26228685f575c170f9bd6e191
|
[
"MIT"
] | null | null | null |
Numbers genenne.py
|
SixLeopard/Python-Random-NUmber-Generator
|
7c66d97f9a4332f26228685f575c170f9bd6e191
|
[
"MIT"
] | 2 |
2021-02-08T16:34:48.000Z
|
2021-02-22T21:38:07.000Z
|
import random
yes = "yes"
f = open('Numbes.txt','w')
while yes == "yes":
lol = str(random.randint(100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000,999999999999999999999999999999999999999999999999999999999990000000000000000000000000000000000000000000000000000000000))
f.write(lol)
| 45.625 | 267 | 0.838356 |
457231d003ea1a8b76943cf2f42c248da0a739cd
| 2,090 |
py
|
Python
|
vsts/vsts/service_endpoint/v4_1/models/service_endpoint_request.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/service_endpoint/v4_1/models/service_endpoint_request.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/service_endpoint/v4_1/models/service_endpoint_request.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceEndpointRequest(Model):
"""ServiceEndpointRequest.
:param data_source_details: Gets or sets the data source details for the service endpoint request.
:type data_source_details: :class:`DataSourceDetails <service-endpoint.v4_1.models.DataSourceDetails>`
:param result_transformation_details: Gets or sets the result transformation details for the service endpoint request.
:type result_transformation_details: :class:`ResultTransformationDetails <service-endpoint.v4_1.models.ResultTransformationDetails>`
:param service_endpoint_details: Gets or sets the service endpoint details for the service endpoint request.
:type service_endpoint_details: :class:`ServiceEndpointDetails <service-endpoint.v4_1.models.ServiceEndpointDetails>`
"""
_attribute_map = {
'data_source_details': {'key': 'dataSourceDetails', 'type': 'DataSourceDetails'},
'result_transformation_details': {'key': 'resultTransformationDetails', 'type': 'ResultTransformationDetails'},
'service_endpoint_details': {'key': 'serviceEndpointDetails', 'type': 'ServiceEndpointDetails'}
}
def __init__(self, data_source_details=None, result_transformation_details=None, service_endpoint_details=None):
super(ServiceEndpointRequest, self).__init__()
self.data_source_details = data_source_details
self.result_transformation_details = result_transformation_details
self.service_endpoint_details = service_endpoint_details
| 61.470588 | 136 | 0.677033 |
8dfa233ca37cab45453777290536b806d1e5cd3f
| 6,326 |
py
|
Python
|
rlalgos/tf/mf/dqn.py
|
vermouth1992/rl-util
|
4c06ab8f5c96a44e58f88cf30146bcb837057112
|
[
"Apache-2.0"
] | null | null | null |
rlalgos/tf/mf/dqn.py
|
vermouth1992/rl-util
|
4c06ab8f5c96a44e58f88cf30146bcb837057112
|
[
"Apache-2.0"
] | null | null | null |
rlalgos/tf/mf/dqn.py
|
vermouth1992/rl-util
|
4c06ab8f5c96a44e58f88cf30146bcb837057112
|
[
"Apache-2.0"
] | null | null | null |
"""
Deep Q Network for low-dimensional observation space
"""
import rlutils.tf as rlu
import tensorflow as tf
from rlutils.infra.runner import TFOffPolicyRunner, run_func_as_main
from rlutils.interface.agent import Agent
def gather_q_values(q_values, actions):
batch_size = tf.shape(actions)[0]
idx = tf.stack([tf.range(batch_size, dtype=actions.dtype), actions], axis=-1) # (None, 2)
q_values = tf.gather_nd(q_values, indices=idx)
return q_values
class DQN(Agent, tf.keras.Model):
def __init__(self,
obs_spec,
act_spec,
mlp_hidden=128,
double_q=True,
epsilon=0.1,
q_lr=1e-4,
gamma=0.99,
tau=5e-3,
huber_delta=None):
super(DQN, self).__init__()
self.obs_spec = obs_spec
self.act_spec = act_spec
obs_dim = obs_spec.shape[0]
act_dim = act_spec.n
self.q_network = rlu.nn.build_mlp(obs_dim, act_dim, mlp_hidden=mlp_hidden, num_layers=3)
self.target_q_network = rlu.nn.build_mlp(obs_dim, act_dim, mlp_hidden=mlp_hidden, num_layers=3)
self.q_optimizer = tf.keras.optimizers.Adam(lr=q_lr)
self.epsilon = tf.Variable(initial_value=epsilon, dtype=tf.float32, trainable=False)
self.act_dim = act_dim
self.double_q = double_q
self.huber_delta = huber_delta
self.gamma = gamma
self.tau = tau
reduction = tf.keras.losses.Reduction.NONE # Note: tensorflow uses reduce_mean at axis=-1 by default
if huber_delta is None:
self.loss_fn = tf.keras.losses.MeanSquaredError(reduction=reduction)
else:
self.loss_fn = tf.keras.losses.Huber(delta=huber_delta, reduction=reduction)
rlu.functional.hard_update(self.target_q_network, self.q_network)
def set_logger(self, logger):
self.logger = logger
def log_tabular(self):
self.logger.log_tabular('QVals', with_min_and_max=True)
self.logger.log_tabular('LossQ', average_only=True)
def set_epsilon(self, epsilon):
assert epsilon >= 0. and epsilon <= 1.
self.epsilon.assign(epsilon)
@tf.function
def update_target(self):
rlu.functional.soft_update(self.target_q_network, self.q_network, tau=self.tau)
@tf.function
def _update_nets(self, obs, act, next_obs, done, rew):
print('Tracing _update_nets')
# compute target Q values
target_q_values = self.target_q_network(next_obs)
if self.double_q:
# select action using Q network instead of target Q network
target_actions = tf.argmax(self.q_network(next_obs), axis=-1, output_type=self.act_spec.dtype)
target_q_values = gather_q_values(target_q_values, target_actions)
else:
target_q_values = tf.reduce_max(target_q_values, axis=-1)
target_q_values = rew + self.gamma * (1. - done) * target_q_values
with tf.GradientTape() as tape:
q_values = gather_q_values(self.q_network(obs), act) # (None,)
loss = self.loss_fn(q_values, target_q_values) # (None,)
grad = tape.gradient(loss, self.q_network.trainable_variables)
self.q_optimizer.apply_gradients(zip(grad, self.q_network.trainable_variables))
info = dict(
QVals=q_values,
LossQ=loss
)
return info
@tf.function
def train_step(self, data):
obs = data['obs']
act = data['act']
next_obs = data['next_obs']
done = data['done']
rew = data['rew']
update_target = data['update_target']
info = self._update_nets(obs, act, next_obs, done, rew)
if update_target:
self.update_target()
return info
def train_on_batch(self, data, **kwargs):
info = self.train_step(data=data)
self.logger.store(**rlu.functional.to_numpy_or_python_type(info))
@tf.function
def act_batch_explore_tf(self, obs):
return self.act_batch(obs, deterministic=tf.convert_to_tensor(False))
@tf.function
def act_batch_test_tf(self, obs):
return self.act_batch(obs, deterministic=tf.convert_to_tensor(True))
def act_batch_test(self, obs):
return self.act_batch_test_tf(tf.convert_to_tensor(obs)).numpy()
def act_batch_explore(self, obs):
return self.act_batch_explore_tf(tf.convert_to_tensor(obs)).numpy()
@tf.function
def act_batch(self, obs, deterministic):
""" Implement epsilon-greedy here """
batch_size = tf.shape(obs)[0]
epsilon = tf.random.uniform(shape=(batch_size,), minval=0., maxval=1., dtype=tf.float32)
epsilon_indicator = tf.cast(epsilon > self.epsilon, dtype=tf.int32) # (None,)
random_actions = tf.random.uniform(shape=(batch_size,), minval=0, maxval=self.act_dim,
dtype=self.act_spec.dtype)
deterministic_actions = tf.argmax(self.q_network(obs), axis=-1, output_type=self.act_spec.dtype)
epsilon_greedy_actions = tf.stack([random_actions, deterministic_actions], axis=-1) # (None, 2)
epsilon_greedy_actions = gather_q_values(epsilon_greedy_actions, epsilon_indicator)
final_actions = tf.cond(deterministic, true_fn=lambda: deterministic_actions,
false_fn=lambda: epsilon_greedy_actions)
return final_actions
class Runner(TFOffPolicyRunner):
@classmethod
def main(cls,
env_name,
mlp_hidden=256,
double_q=True,
q_lr=1e-4,
gamma=0.99,
huber_delta: float = None,
tau=5e-3,
epsilon=0.1,
**kwargs
):
agent_kwargs = dict(
mlp_hidden=mlp_hidden,
double_q=double_q,
q_lr=q_lr,
gamma=gamma,
huber_delta=huber_delta,
tau=tau,
epsilon=epsilon
)
super(Runner, cls).main(env_name=env_name,
agent_cls=DQN,
agent_kwargs=agent_kwargs,
**kwargs
)
if __name__ == '__main__':
run_func_as_main(Runner.main)
| 37.654762 | 109 | 0.619823 |
8d03c89243f5e99160f9f82d96d06f87635f74b7
| 4,301 |
py
|
Python
|
openmdao.lib/src/openmdao/lib/drivers/newton_solver.py
|
mjfwest/OpenMDAO-Framework
|
a5521f47ad7686c25b203de74e1c7dff5fd7a52b
|
[
"Apache-2.0"
] | 69 |
2015-01-02T19:10:08.000Z
|
2021-11-14T04:42:28.000Z
|
openmdao.lib/src/openmdao/lib/drivers/newton_solver.py
|
jcchin/OpenMDAO-Framework
|
038e89b06da1c74f00918f4c6fbd8bd365e25657
|
[
"Apache-2.0"
] | 3 |
2015-01-15T23:08:18.000Z
|
2015-03-11T16:57:35.000Z
|
openmdao.lib/src/openmdao/lib/drivers/newton_solver.py
|
jcchin/OpenMDAO-Framework
|
038e89b06da1c74f00918f4c6fbd8bd365e25657
|
[
"Apache-2.0"
] | 31 |
2015-09-16T00:37:35.000Z
|
2022-01-10T06:27:55.000Z
|
"""
A python Newton solver with line-search adapation of the relaxation parameter.
"""
# pylint: disable=C0103
#public symbols
__all__ = ['NewtonSolver']
import numpy
from openmdao.main.mpiwrap import MPI, get_norm
# pylint: disable=E0611, F0401
from openmdao.main.case import Case
from openmdao.main.driver import Driver
from openmdao.main.datatypes.api import Float, Int, Enum
from openmdao.main.hasparameters import HasParameters
from openmdao.main.hasconstraints import HasEqConstraints
from openmdao.main.interfaces import IHasParameters, IHasEqConstraints, \
ISolver, implements
from openmdao.util.decorators import add_delegate
@add_delegate(HasParameters, HasEqConstraints)
class NewtonSolver(Driver):
''' Wrapper for some Newton style solvers. Currently supports
fsolve from scipy.optimize.
'''
implements(IHasParameters, IHasEqConstraints, ISolver)
# pylint: disable=E1101
atol = Float(1.0e-12, iotype='in', desc='Absolute convergence tolerance')
rtol = Float(1.0e-10, iotype='in', desc='Relative convergence tolerance')
max_iteration = Int(20, iotype='in', desc='Maximum number of iterations')
ls_atol = Float(1.0e-10, iotype='in',
desc='Absolute convergence tolerance for line search')
ls_rtol = Float(0.9, iotype='in',
desc='Relative convergence tolerance for line search')
ls_max_iteration = Int(10, iotype='in',
desc='Maximum number of line searches')
alpha = Float(1.0, iotype='in', low=0.0, high=1.0,
desc='Initial over-relaxation factor')
iprint = Enum(0, [0, 1, 2], iotype='in', desc='set to 1 to print '
'convergence. Set to 2 to get backtracking convergence '
'as well.')
def execute(self):
""" General Newton's method. """
if MPI:
if self.workflow._system.mpi.comm == MPI.COMM_NULL:
return
system = self.workflow._system
options = self.gradient_options
fvec = system.vec['f']
dfvec = system.vec['df']
uvec = system.vec['u']
iterbase = self.workflow._iterbase()
nstring = 'NEWTON'
# perform an initial run
system.evaluate(iterbase, case_uuid=Case.next_uuid())
f_norm = get_norm(fvec)
f_norm0 = f_norm
if self.iprint > 0:
self.print_norm(nstring, 0, f_norm, f_norm0)
itercount = 0
alpha = self.alpha
while itercount < self.max_iteration and f_norm > self.atol and \
f_norm/f_norm0 > self.rtol:
system.calc_newton_direction(options=options)
#print "LS 1", uvec.array, '+', dfvec.array
uvec.array += alpha*dfvec.array
# Just evaluate the model with the new points
system.evaluate(iterbase, case_uuid=Case.next_uuid())
f_norm = get_norm(fvec)
if self.iprint > 0:
self.print_norm(nstring, itercount+1, f_norm, f_norm0)
itercount += 1
ls_itercount = 0
# Backtracking Line Search
while ls_itercount < self.ls_max_iteration and \
f_norm > self.ls_atol and \
f_norm/f_norm0 > self.ls_rtol:
alpha *= 0.5
uvec.array -= alpha*dfvec.array
# Just evaluate the model with the new points
system.evaluate(iterbase, case_uuid=Case.next_uuid())
f_norm = get_norm(fvec)
if self.iprint> 1:
self.print_norm('BK_TKG', itercount+1,
f_norm, f_norm/f_norm0,
indent=1, solver='LS')
ls_itercount += 1
# Reset backtracking
alpha = self.alpha
# Need to make sure the whole workflow is executed at the final
# point, not just evaluated.
self.pre_iteration()
self.run_iteration()
self.post_iteration()
if self.iprint > 0:
self.print_norm(nstring, itercount, f_norm, f_norm0, msg='Converged')
def requires_derivs(self):
"""Newtonsolver always requires derivatives."""
return True
| 31.859259 | 81 | 0.601023 |
b0530fe355cdb6f77a3fb25eddafbab663c026c4
| 51,822 |
py
|
Python
|
docs.py
|
raccoongang/openprocurement.tender.openuadefense
|
66976b86ca0f82a731eebe7274cafd87f81d8f13
|
[
"Apache-2.0"
] | null | null | null |
docs.py
|
raccoongang/openprocurement.tender.openuadefense
|
66976b86ca0f82a731eebe7274cafd87f81d8f13
|
[
"Apache-2.0"
] | 40 |
2016-03-18T09:55:05.000Z
|
2018-03-19T15:23:20.000Z
|
docs.py
|
raccoongang/openprocurement.tender.openuadefense
|
66976b86ca0f82a731eebe7274cafd87f81d8f13
|
[
"Apache-2.0"
] | 17 |
2016-03-18T09:46:47.000Z
|
2018-05-25T07:50:55.000Z
|
# -*- coding: utf-8 -*-
import json
import os
from datetime import timedelta
import openprocurement.tender.openuadefense.tests.base as base_test
from openprocurement.api.models import get_now
from openprocurement.api.tests.base import PrefixedRequestClass
from openprocurement.tender.openuadefense.tests.tender import BaseTenderUAWebTest, test_tender_data
from webtest import TestApp
test_tender_ua_data = {
"tenderPeriod": {
"endDate": "2016-02-11T14:04:18.962451"
},
"title": "футляри до державних нагород",
"minimalStep": {
"currency": "UAH",
"amount": 35
},
"procurementMethodType": "aboveThresholdUA.defense",
"value": {
"currency": "UAH",
"amount": 500
},
"procuringEntity": {
"kind": "defense",
"address": {
"countryName": "Україна",
"locality": "м. Вінниця",
"postalCode": "21027",
"region": "м. Вінниця",
"streetAddress": "вул. Стахурського. 22"
},
"contactPoint": {
"name": "Куца Світлана Валентинівна",
"telephone": "+380 (432) 46-53-02",
"url": "http://sch10.edu.vn.ua/"
},
"identifier": {
"id": "21725150",
"legalName": "Заклад \"Загальноосвітня школа І-ІІІ ступенів № 10 Вінницької міської ради\"",
"scheme": "UA-EDR"
},
"name": "ЗОСШ #10 м.Вінниці"
},
"items": [
{
"unit": {
"code": "44617100-9",
"name": "item"
},
"additionalClassifications": [
{
"scheme": "ДКПП",
"id": "17.21.1",
"description": "Послуги шкільних їдалень"
}
],
"description": "Послуги шкільних їдалень",
"deliveryDate": {
"startDate": (get_now() + timedelta(days=20)).isoformat(),
"endDate": (get_now() + timedelta(days=50)).isoformat()
},
"deliveryAddress": {
"countryName": u"Україна",
"postalCode": "79000",
"region": u"м. Київ",
"locality": u"м. Київ",
"streetAddress": u"вул. Банкова 1"
},
"classification": {
"scheme": "ДК021",
"id": "37810000-9",
"description": "Test"
},
"quantity": 1
}
]
}
test_tender_ua_data["tenderPeriod"] = {
"endDate": (get_now() + timedelta(days=16)).isoformat()
}
bid = {
"data": {
"tenderers": [
{
"address": {
"countryName": "Україна",
"locality": "м. Вінниця",
"postalCode": "21100",
"region": "м. Вінниця",
"streetAddress": "вул. Островського, 33"
},
"contactPoint": {
"email": "[email protected]",
"name": "Сергій Олексюк",
"telephone": "+380 (432) 21-69-30"
},
"identifier": {
"scheme": u"UA-EDR",
"id": u"00137256",
"uri": u"http://www.sc.gov.ua/"
},
"name": "ДКП «Школяр»"
}
],
"subcontractingDetails": "ДКП «книга», Україна, м. Львів, вул. Островського, 33",
"value": {
"amount": 500
},
'selfEligible': True, 'selfQualified': True,
}
}
bid2 = {
"data": {
"tenderers": [
{
"address": {
"countryName": "Україна",
"locality": "м. Львів",
"postalCode": "79013",
"region": "м. Львів",
"streetAddress": "вул. Островського, 34"
},
"contactPoint": {
"email": "[email protected]",
"name": "Андрій Олексюк",
"telephone": "+380 (322) 91-69-30"
},
"identifier": {
"scheme": u"UA-EDR",
"id": u"00137226",
"uri": u"http://www.sc.gov.ua/"
},
"name": "ДКП «Книга»"
}
],
"value": {
"amount": 499
},
'selfEligible': True, 'selfQualified': True,
}
}
question = {
"data": {
"author": {
"address": {
"countryName": "Україна",
"locality": "м. Вінниця",
"postalCode": "21100",
"region": "м. Вінниця",
"streetAddress": "вул. Островського, 33"
},
"contactPoint": {
"email": "[email protected]",
"name": "Сергій Олексюк",
"telephone": "+380 (432) 21-69-30"
},
"identifier": {
"id": "00137226",
"legalName": "Державне комунальне підприємство громадського харчування «Школяр»",
"scheme": "UA-EDR",
"uri": "http://sch10.edu.vn.ua/"
},
"name": "ДКП «Школяр»"
},
"description": "Просимо додати таблицю потрібної калорійності харчування",
"title": "Калорійність"
}
}
answer = {
"data": {
"answer": "Таблицю додано в файлі \"Kalorijnist.xslx\""
}
}
cancellation = {
'data': {
'reason': 'cancellation reason'
}
}
complaint = {
"data": {
"author": {
"address": {
"countryName": "Україна",
"locality": "м. Вінниця",
"postalCode": "21100",
"region": "м. Вінниця",
"streetAddress": "вул. Островського, 33"
},
"contactPoint": {
"email": "[email protected]",
"name": "Сергій Олексюк",
"telephone": "+380 (432) 21-69-30"
},
"identifier": {
"id": "13313462",
"legalName": "Державне комунальне підприємство громадського харчування «Школяр»",
"scheme": "UA-EDR",
"uri": "http://sch10.edu.vn.ua/"
},
"name": "ДКП «Школяр»"
},
"description": "Умови виставлені замовником не містять достатньо інформації, щоб заявка мала сенс.",
"title": "Недостатньо інформації"
}
}
class DumpsTestAppwebtest(TestApp):
def do_request(self, req, status=None, expect_errors=None):
req.headers.environ["HTTP_HOST"] = "api-sandbox.openprocurement.org"
if hasattr(self, 'file_obj') and not self.file_obj.closed:
self.file_obj.write(req.as_bytes(True))
self.file_obj.write("\n")
if req.body:
try:
self.file_obj.write(
'DATA:\n' + json.dumps(json.loads(req.body), indent=2, ensure_ascii=False).encode('utf8'))
self.file_obj.write("\n")
except:
pass
self.file_obj.write("\n")
resp = super(DumpsTestAppwebtest, self).do_request(req, status=status, expect_errors=expect_errors)
if hasattr(self, 'file_obj') and not self.file_obj.closed:
headers = [(n.title(), v)
for n, v in resp.headerlist
if n.lower() != 'content-length']
headers.sort()
self.file_obj.write(str('Response: %s\n%s\n') % (
resp.status,
str('\n').join([str('%s: %s') % (n, v) for n, v in headers]),
))
if resp.testbody:
try:
self.file_obj.write(json.dumps(json.loads(resp.testbody), indent=2, ensure_ascii=False).encode('utf8'))
except:
pass
self.file_obj.write("\n\n")
return resp
class TenderUAResourceTest(BaseTenderUAWebTest):
initial_data = test_tender_ua_data
def setUp(self):
self.app = DumpsTestAppwebtest(
"config:tests.ini", relative_to=os.path.dirname(base_test.__file__))
self.app.RequestClass = PrefixedRequestClass
self.app.authorization = ('Basic', ('broker', ''))
self.couchdb_server = self.app.app.registry.couchdb_server
self.db = self.app.app.registry.db
def test_docs(self):
request_path = '/tenders?opt_pretty=1'
#### Exploring basic rules
#
with open('docs/source/tutorial/tender-listing.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get(request_path)
self.assertEqual(response.status, '200 OK')
self.app.file_obj.write("\n")
with open('docs/source/tutorial/tender-post-attempt.http', 'w') as self.app.file_obj:
response = self.app.post(request_path, 'data', status=415)
self.assertEqual(response.status, '415 Unsupported Media Type')
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/tender-post-attempt-json.http', 'w') as self.app.file_obj:
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post(
request_path, 'data', content_type='application/json', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
#### Creating tender
#
with open('docs/source/tutorial/tender-post-attempt-json-data.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders?opt_pretty=1', {"data": test_tender_ua_data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
owner_token = response.json['access']['token']
with open('docs/source/tutorial/blank-tender-view.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}'.format(tender['id']))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/tender-listing-no-auth.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get(request_path)
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
#### Modifying tender
#
tenderPeriod_endDate = get_now() + timedelta(days=15, seconds=10)
with open('docs/source/tutorial/patch-items-value-periods.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token), {'data':
{
"tenderPeriod": {
"endDate": tenderPeriod_endDate.isoformat()
}
}
})
with open('docs/source/tutorial/tender-listing-after-patch.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get(request_path)
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
self.tender_id = tender['id']
# Setting Bid guarantee
#
with open('docs/source/tutorial/set-bid-guarantee.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(
self.tender_id, owner_token), {"data": {"guarantee": {"amount": 8, "currency": "USD"}}})
self.assertEqual(response.status, '200 OK')
self.assertIn('guarantee', response.json['data'])
#### Uploading documentation
#
with open('docs/source/tutorial/upload-tender-notice.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/documents?acc_token={}'.format(
self.tender_id, owner_token), upload_files=[('file', u'Notice.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
doc_id = response.json["data"]["id"]
with open('docs/source/tutorial/tender-documents.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/documents/{}?acc_token={}'.format(
self.tender_id, doc_id, owner_token))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/upload-award-criteria.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/documents?acc_token={}'.format(
self.tender_id, owner_token), upload_files=[('file', u'AwardCriteria.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
doc_id = response.json["data"]["id"]
with open('docs/source/tutorial/tender-documents-2.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/documents?acc_token={}'.format(
self.tender_id, owner_token))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/update-award-criteria.http', 'w') as self.app.file_obj:
response = self.app.put('/tenders/{}/documents/{}?acc_token={}'.format(
self.tender_id, doc_id, owner_token), upload_files=[('file', 'AwardCriteria-2.pdf', 'content2')])
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/tender-documents-3.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/documents'.format(
self.tender_id))
self.assertEqual(response.status, '200 OK')
#### Enquiries
#
with open('docs/source/tutorial/ask-question.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id), question, status=201)
question_id = response.json['data']['id']
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/answer-question.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/questions/{}?acc_token={}'.format(
self.tender_id, question_id, owner_token), answer, status=200)
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/list-question.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/questions'.format(
self.tender_id))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/get-answer.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/questions/{}'.format(
self.tender_id, question_id))
self.assertEqual(response.status, '200 OK')
self.go_to_enquiryPeriod_end()
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/update-tender-after-enqiery.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {"value": {'amount': 501.0}}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
with open('docs/source/tutorial/ask-question-after-enquiry-period.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id), question, status=403)
self.assertEqual(response.status, '403 Forbidden')
with open('docs/source/tutorial/update-tender-after-enqiery-with-update-periods.http', 'w') as self.app.file_obj:
tenderPeriod_endDate = get_now() + timedelta(days=8)
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token), {'data':
{
"value": {
"amount": 501,
"currency": u"UAH"
},
"tenderPeriod": {
"endDate": tenderPeriod_endDate.isoformat()
}
}
})
self.assertEqual(response.status, '200 OK')
#### Registering bid
#
bids_access = {}
with open('docs/source/tutorial/register-bidder.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), bid)
bid1_id = response.json['data']['id']
bids_access[bid1_id] = response.json['access']['token']
self.assertEqual(response.status, '201 Created')
#### Proposal Uploading
#
with open('docs/source/tutorial/upload-bid-proposal.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/bids/{}/documents?acc_token={}'.format(
self.tender_id, bid1_id, bids_access[bid1_id]), upload_files=[('file', 'Proposal.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/bidder-documents.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/bids/{}/documents?acc_token={}'.format(
self.tender_id, bid1_id, bids_access[bid1_id]))
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(tender['id'], owner_token),
{'data': {"value": {'amount': 501.0}}})
self.assertEqual(response.status, '200 OK')
#### Bid invalidation
#
with open('docs/source/tutorial/bidder-after-changing-tender.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/bids/{}?acc_token={}'.format(
self.tender_id, bid1_id, bids_access[bid1_id]))
self.assertEqual(response.status, '200 OK')
#### Bid confirmation
#
with open('docs/source/tutorial/bidder-activate-after-changing-tender.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/bids/{}?acc_token={}'.format(
self.tender_id, bid1_id, bids_access[bid1_id]), {'data': {"status": "active"}})
self.assertEqual(response.status, '200 OK')
# with open('docs/source/tutorial/bidder-after-activate-bid-tender.http', 'w') as self.app.file_obj:
# response = self.app.get('/tenders/{}/bids/{}?acc_token={}'.format(
# self.tender_id, bid1_id, bids_access[bid1_id]))
# self.assertEqual(response.status, '200 OK')
# tutorial/register-2nd-bidder.http
with open('docs/source/tutorial/register-2nd-bidder.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), bid2)
bid2_id = response.json['data']['id']
bids_access[bid2_id] = response.json['access']['token']
self.assertEqual(response.status, '201 Created')
#### Auction
#
self.set_status('active.auction')
self.app.authorization = ('Basic', ('auction', ''))
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/tenders/{}'.format(self.tender_id),
'bids': [
{
"id": bid1_id,
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, bid1_id)
},
{
"id": bid2_id,
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, bid2_id)
}
]
}
response = self.app.patch_json('/tenders/{}/auction?acc_token={}'.format(self.tender_id, owner_token),
{'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/auction-url.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/bidder-participation-url.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/bids/{}?acc_token={}'.format(self.tender_id, bid1_id, bids_access[bid1_id]))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/bidder2-participation-url.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/bids/{}?acc_token={}'.format(self.tender_id, bid2_id, bids_access[bid2_id]))
self.assertEqual(response.status, '200 OK')
#### Confirming qualification
#
# self.set_status('active.qualification')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.get('/tenders/{}/auction'.format(self.tender_id))
auction_bids_data = response.json['data']['bids']
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id),
{'data': {'bids': auction_bids_data}})
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(self.tender_id, owner_token))
# get pending award
award_id = [i['id'] for i in response.json['data'] if i['status'] == 'pending'][0]
with open('docs/source/tutorial/confirm-qualification.http', 'w') as self.app.file_obj:
self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(self.tender_id, award_id, owner_token), {"data": {"status": "active", "qualified": True, "eligible": True}})
self.assertEqual(response.status, '200 OK')
response = self.app.get('/tenders/{}/contracts?acc_token={}'.format(
self.tender_id, owner_token))
self.contract_id = response.json['data'][0]['id']
#### Set contract value
tender = self.db.get(self.tender_id)
for i in tender.get('awards', []):
i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
self.db.save(tender)
with open('docs/source/tutorial/tender-contract-set-contract-value.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
self.tender_id, self.contract_id, owner_token), {"data": {"value": {"amount": 238}}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['value']['amount'], 238)
#### Setting contract signature date
#
with open('docs/source/tutorial/tender-contract-sign-date.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
self.tender_id, self.contract_id, owner_token), {'data': {"dateSigned": get_now().isoformat()} })
self.assertEqual(response.status, '200 OK')
#### Setting contract period
period_dates = {"period": {"startDate": (get_now()).isoformat(), "endDate": (get_now() + timedelta(days=365)).isoformat()}}
with open('docs/source/tutorial/tender-contract-period.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
self.tender_id, self.contract_id, owner_token), {'data': {'period': period_dates["period"]}})
self.assertEqual(response.status, '200 OK')
#### Uploading contract documentation
#
with open('docs/source/tutorial/tender-contract-upload-document.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/contracts/{}/documents?acc_token={}'.format(
self.tender_id, self.contract_id, owner_token), upload_files=[('file', 'contract_document.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.document_id = response.json['data']['id']
with open('docs/source/tutorial/tender-contract-get.http', 'w') as self.app.file_obj:
response = self.app.get('/tenders/{}/contracts/{}?acc_token={}'.format(
self.tender_id, self.contract_id, owner_token))
self.assertEqual(response.status, '200 OK')
#### Preparing the cancellation request
#
with open('docs/source/tutorial/prepare-cancellation.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(
self.tender_id, owner_token), cancellation)
self.assertEqual(response.status, '201 Created')
cancellation_id = response.json['data']['id']
with open('docs/source/tutorial/update-cancellation-reasonType.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/cancellations/{}?acc_token={}'.format(
self.tender_id, cancellation_id, owner_token), {"data":{'reasonType': 'unsuccessful'}})
self.assertEqual(response.status, '200 OK')
#### Filling cancellation with protocol and supplementary documentation
#
with open('docs/source/tutorial/upload-cancellation-doc.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/cancellations/{}/documents?acc_token={}'.format(
self.tender_id, cancellation_id, owner_token), upload_files=[('file', u'Notice.pdf', 'content')])
cancellation_doc_id = response.json['data']['id']
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/patch-cancellation.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/cancellations/{}/documents/{}?acc_token={}'.format(
self.tender_id, cancellation_id, cancellation_doc_id, owner_token), {'data': {"description": 'Changed description'}} )
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/update-cancellation-doc.http', 'w') as self.app.file_obj:
response = self.app.put('/tenders/{}/cancellations/{}/documents/{}?acc_token={}'.format(
self.tender_id, cancellation_id, cancellation_doc_id, owner_token), upload_files=[('file', 'Notice-2.pdf', 'content2')])
self.assertEqual(response.status, '200 OK')
#### Activating the request and cancelling tender
#
with open('docs/source/tutorial/active-cancellation.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/cancellations/{}?acc_token={}'.format(
self.tender_id, cancellation_id, owner_token), {"data":{"status":"active"}})
self.assertEqual(response.status, '200 OK')
#### Creating tender
#
with open('docs/source/tutorial/tender-post-attempt-json-data.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders?opt_pretty=1', {"data": test_tender_ua_data})
self.assertEqual(response.status, '201 Created')
def test_complaints(self):
response = self.app.post_json('/tenders?opt_pretty=1', {"data": test_tender_ua_data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
owner_token = response.json['access']['token']
self.tender_id = tender['id']
with open('docs/source/tutorial/complaint-submission.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), complaint)
self.assertEqual(response.status, '201 Created')
complaint1_token = response.json['access']['token']
complaint1_id = response.json['data']['id']
with open('docs/source/tutorial/complaint-submission-upload.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/complaints/{}/documents?acc_token={}'.format(self.tender_id, complaint1_id, complaint1_token),
upload_files=[('file', u'Complaint_Attachement.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/complaint-claim.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint1_id, complaint1_token), {"data": {"status": "claim"}})
self.assertEqual(response.status, '200 OK')
claim = {'data': complaint['data'].copy()}
claim['data']['status'] = 'claim'
with open('docs/source/tutorial/complaint-submission-claim.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), claim)
self.assertEqual(response.status, '201 Created')
complaint2_token = response.json['access']['token']
complaint2_id = response.json['data']['id']
complaint_data = {'data': complaint['data'].copy()}
complaint_data['data']['status'] = 'pending'
with open('docs/source/tutorial/complaint-submission-complaint.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint3_id = response.json['data']['id']
complaint3_token = response.json['access']['token']
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), claim)
self.assertEqual(response.status, '201 Created')
complaint4_id = response.json['data']['id']
complaint4_token = response.json['access']['token']
with open('docs/source/tutorial/complaint-complaint.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint1_id, complaint1_token), {"data": {"status": "pending"}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-answer.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint2_id, owner_token), {"data": {
"status": "answered",
"resolutionType": "resolved",
"resolution": "Виправлено неконкурентні умови"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint4_id, owner_token), {"data": {
"status": "answered",
"resolutionType": "invalid",
"resolution": "Вимога не відповідає предмету закупівлі"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-satisfy.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint2_id, complaint2_token), {"data": {
"satisfied": True,
"status": "resolved"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-escalate.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint4_id, complaint4_token), {"data": {
"satisfied": False,
"status": "pending"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint5_id = response.json['data']['id']
complaint5_token = response.json['access']['token']
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint6_id = response.json['data']['id']
complaint6_token = response.json['access']['token']
self.app.authorization = ('Basic', ('reviewer', ''))
with open('docs/source/tutorial/complaint-reject.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint4_id), {"data": {
"status": "invalid"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-accept.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint1_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint3_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint5_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint6_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-resolution-upload.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/complaints/{}/documents'.format(self.tender_id, complaint1_id),
upload_files=[('file', u'ComplaintResolution.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/complaint-resolve.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint1_id), {"data": {
"status": "satisfied"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-decline.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint3_id), {"data": {
"status": "declined"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-accepted-stopped.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint5_id), {"data": {
"decision": "Тендер скасовується замовником",
"status": "stopped"
}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/complaint-resolved.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint1_id, owner_token), {"data": {
"tendererAction": "Умови виправлено",
"status": "resolved"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint-accepted-stopping.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint6_id, complaint6_token), {"data": {
"cancellationReason": "Тендер скасовується замовником",
"status": "stopping"
}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('reviewer', ''))
with open('docs/source/tutorial/complaint-stopping-stopped.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint6_id), {"data": {
"decision": "Тендер скасовується замовником",
"status": "stopped"
}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), complaint)
self.assertEqual(response.status, '201 Created')
complaint7_id = response.json['data']['id']
complaint7_token = response.json['access']['token']
with open('docs/source/tutorial/complaint-cancel.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint7_id, complaint7_token), {"data": {
"cancellationReason": "Умови виправлено",
"status": "cancelled"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaints-list.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get('/tenders/{}/complaints'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/complaint.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get('/tenders/{}/complaints/{}'.format(self.tender_id, complaint1_id))
self.assertEqual(response.status, '200 OK')
def test_award_complaints(self):
response = self.app.post_json('/tenders?opt_pretty=1', {"data": test_tender_ua_data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
owner_token = response.json['access']['token']
self.tender_id = tender['id']
response = self.app.post_json('/tenders/{}/bids'.format(self.tender_id), bid)
bid_id = response.json['data']['id']
bid_token = response.json['access']['token']
# create second bid
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/bids'.format(self.tender_id), bid2)
# switch to active.auction
self.set_status('active.auction')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.get('/tenders/{}/auction'.format(self.tender_id))
auction_bids_data = response.json['data']['bids']
self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': auction_bids_data}})
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.get('/tenders/{}/awards?acc_token={}'.format(self.tender_id, owner_token))
# get pending award
award_id = [i['id'] for i in response.json['data'] if i['status'] == 'pending'][0]
self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(self.tender_id, award_id, owner_token), {"data": {"status": "active", "qualified": True, "eligible": True}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-submission.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint)
self.assertEqual(response.status, '201 Created')
complaint1_token = response.json['access']['token']
complaint1_id = response.json['data']['id']
with open('docs/source/tutorial/award-complaint-submission-upload.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/awards/{}/complaints/{}/documents?acc_token={}'.format(self.tender_id, award_id, complaint1_id, complaint1_token),
upload_files=[('file', u'Complaint_Attachement.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/award-complaint-complaint.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint1_id, complaint1_token), {"data": {"status": "pending"}})
self.assertEqual(response.status, '200 OK')
complaint_data = {'data': complaint['data'].copy()}
complaint_data['data']['status'] = 'pending'
with open('docs/source/tutorial/award-complaint-submission-complaint.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint2_token = response.json['access']['token']
complaint2_id = response.json['data']['id']
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint3_token = response.json['access']['token']
complaint3_id = response.json['data']['id']
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint4_token = response.json['access']['token']
complaint4_id = response.json['data']['id']
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint_data)
self.assertEqual(response.status, '201 Created')
complaint5_token = response.json['access']['token']
complaint5_id = response.json['data']['id']
claim = {'data': complaint['data'].copy()}
claim['data']['status'] = 'claim'
with open('docs/source/tutorial/award-complaint-submission-claim.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), claim)
self.assertEqual(response.status, '201 Created')
complaint6_token = response.json['access']['token']
complaint6_id = response.json['data']['id']
with open('docs/source/tutorial/award-complaint-answer.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint6_id, owner_token), {"data": {
"status": "answered",
"resolutionType": "resolved",
"resolution": "Умови виправлено, вибір переможня буде розгянуто повторно"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-satisfy.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint6_id, complaint6_token), {"data": {
"satisfied": True,
}})
self.assertEqual(response.status, '200 OK')
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), claim)
self.assertEqual(response.status, '201 Created')
complaint7_token = response.json['access']['token']
complaint7_id = response.json['data']['id']
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint7_id, owner_token), {"data": {
"status": "answered",
"resolutionType": "invalid",
"resolution": "Вимога не відповідає предмету закупівлі"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-unsatisfy.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint7_id, complaint7_token), {"data": {
"satisfied": False,
}})
self.assertEqual(response.status, '200 OK')
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint)
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/award-complaint-claim.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, response.json['data']['id'], response.json['access']['token']), {"data": {
"status": "claim"
}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('reviewer', ''))
with open('docs/source/tutorial/award-complaint-reject.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint2_id), {"data": {
"status": "invalid"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-accept.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint1_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint3_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint4_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint5_id), {"data": {
"status": "accepted"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-resolution-upload.http', 'w') as self.app.file_obj:
response = self.app.post('/tenders/{}/awards/{}/complaints/{}/documents'.format(self.tender_id, award_id, complaint1_id),
upload_files=[('file', u'ComplaintResolution.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/award-complaint-resolve.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint1_id), {"data": {
"status": "satisfied"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-decline.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint3_id), {"data": {
"status": "declined"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-accepted-stopped.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint5_id), {"data": {
"decision": "Тендер скасовується замовником",
"status": "stopped"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaints-list.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get('/tenders/{}/awards/{}/complaints'.format(self.tender_id, award_id))
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint.http', 'w') as self.app.file_obj:
self.app.authorization = None
response = self.app.get('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint1_id))
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/award-complaint-resolved.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint1_id, owner_token), {"data": {
"tendererAction": "Умови виправлено, вибір переможня буде розгянуто повторно",
"status": "resolved"
}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-accepted-stopping.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, complaint4_id, complaint4_token), {"data": {
"cancellationReason": "Тендер скасовується замовником",
"status": "stopping"
}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('reviewer', ''))
with open('docs/source/tutorial/award-complaint-stopping-stopped.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}'.format(self.tender_id, award_id, complaint4_id), {"data": {
"decision": "Тендер скасовується замовником",
"status": "stopped"
}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/award-complaint-satisfied-resolving.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(self.tender_id, award_id, owner_token), {"data": {
"status": "cancelled"
}})
self.assertEqual(response.status, '200 OK')
new_award_id = response.headers['Location'][-32:]
award_id = new_award_id
self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(self.tender_id, award_id, owner_token), {"data": {"status": "active", "qualified": True, "eligible": True}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/award-complaint-submit.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, award_id, bid_token), complaint)
self.assertEqual(response.status, '201 Created')
with open('docs/source/tutorial/award-complaint-cancel.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, award_id, response.json['data']['id'], response.json['access']['token']), {"data": {
"cancellationReason": "Умови виправлено",
"status": "cancelled"
}})
self.assertEqual(response.status, '200 OK')
| 49.307326 | 201 | 0.590078 |
c814ca030daf686e625a783cb59ca914c9d0ef7e
| 4,325 |
py
|
Python
|
contrib/seeds/generate-seeds.py
|
StutzCoin/stutz
|
b7db01cff055e7dc4b07317732fc990a46161956
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
StutzCoin/stutz
|
b7db01cff055e7dc4b07317732fc990a46161956
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
StutzCoin/stutz
|
b7db01cff055e7dc4b07317732fc990a46161956
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the stutz network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 1848)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 19335)
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| 31.34058 | 98 | 0.581272 |
a864fd51200177b1f4d2b2fd19f7730461cb094f
| 1,981 |
py
|
Python
|
examples/scan_ble.py
|
nobrin/omron-2jcie-bu01
|
7570eb55c4f3f371cce999fbff5bd3dfcd34e424
|
[
"MIT"
] | 1 |
2021-10-20T06:11:50.000Z
|
2021-10-20T06:11:50.000Z
|
examples/scan_ble.py
|
nobrin/omron-2jcie-bu01
|
7570eb55c4f3f371cce999fbff5bd3dfcd34e424
|
[
"MIT"
] | null | null | null |
examples/scan_ble.py
|
nobrin/omron-2jcie-bu01
|
7570eb55c4f3f371cce999fbff5bd3dfcd34e424
|
[
"MIT"
] | 1 |
2021-07-12T15:57:47.000Z
|
2021-07-12T15:57:47.000Z
|
#!/usr/bin/env python3
""" NOTE: Bluez 5.50 may not return all every received messages.
Data seems to be detected every 11 seconds.
In mode 0x03, ADV_IND and ADV_RSP are not always aligned.
So, it seems that complete data can only be obtained once in a white.
The acquisition intervals was between 44 to 374 seconds.
It seems random...
Test result on Raspbian GNU/Linux 10 (buster)
Linux raspberrypi 5.4.51+ #1333 Mon Aug 10 16:38:02 BST 2020 armv6l GNU/Linux
Package: bluetooth / Version: 5.50-1.2~deb10u1+rpt2
Python 3.7.3 / Bleak 0.7.1
$ ./scan_blt.py (scan 1200 seconds)
Adv_0x03(type=3, seq=27, temperature=Decimal('27.93'), ...
Adv_0x03(type=3, seq=13, temperature=Decimal('28.68'), ...
Adv_0x03(type=3, seq=112, temperature=Decimal('28.83'), ...
Adv_0x03(type=3, seq=178, temperature=Decimal('28.91'), ...
Adv_0x03(type=3, seq=222, temperature=Decimal('28.95'), ...
Adv_0x03(type=3, seq=241, temperature=Decimal('28.87'), ...
Adv_0x03(type=3, seq=103, temperature=Decimal('29.05'), ...
Intervals(seconds)
- 242
- 99
- 66
- 44
- 275
- 374
If you need scan on Linux, consider notify method.
---
On Windows, data can be detected at intervals of 1 second or less.
And complete data will be obtained about every 1 second.
"""
import sys
sys.path.insert(0, "../lib-ext")
sys.path.insert(0, "..")
from omron_2jcie_bu01 import Omron2JCIE_BU01
#s = Omron2JCIE_BU01.ble("AA:BB:CC:DD:EE:FF")
s = Omron2JCIE_BU01.ble()
# Scan
def on_scan(data):
print(data)
# Set Advertising mode: 0x03 (Active scan)
# The setting is retained even when the power is turned off the device.
s.advertise_setting(mode=0x03)
# Scanning needs to disconnect
# If connected, advertising packets will not be detected.
s.disconnect()
s.scan(on_scan, scantime=10, active=True)
# Mode: 0x01 (Passive scan)
#s.advertise_setting(mode=0x01)
#s.disconnect()
#s.scan(on_scan, scantime=10)
| 31.444444 | 81 | 0.684503 |
837e1e94f34b4c0c3baee5ecd3a45132fe97f995
| 1,088 |
py
|
Python
|
Game/Dados/dadosDoJogo.py
|
almerindopaixao/GameOfHeroes
|
46a20f236caa4a5eef1f7dfe58e4add6d1787e3c
|
[
"MIT"
] | 1 |
2020-04-14T04:16:02.000Z
|
2020-04-14T04:16:02.000Z
|
Game/Dados/dadosDoJogo.py
|
almerindopaixao/GameOfHeroes
|
46a20f236caa4a5eef1f7dfe58e4add6d1787e3c
|
[
"MIT"
] | null | null | null |
Game/Dados/dadosDoJogo.py
|
almerindopaixao/GameOfHeroes
|
46a20f236caa4a5eef1f7dfe58e4add6d1787e3c
|
[
"MIT"
] | null | null | null |
from time import sleep
import json
def carregarJogo(player):
"""
Função que carrega o jogo
"""
try:
jogoSalvo = open('jogoSalvo.json', 'rb')
jogoSalvo.close()
except IOError:
print('\033[1;31mNão foi encontrado nenhum jogo Salvo\033[m')
return
with open('jogoSalvo.json', 'rb') as jogoSalvo:
data = jogoSalvo.readline()
data.decode()
objeto = json.loads(data)
player.nome = objeto['nome']
player.For = objeto['For']
player.Def = objeto['Def']
player.HP = objeto['HP']
player.SP = objeto['SP']
player.inimigosMortos = objeto['inimigosMortos']
print('Carregando Jogo....')
sleep(3)
print('\033[1;32mJogo Carregado\033[m')
def salvarJogo(player):
"""
Função que salva o jogo
"""
with open('jogoSalvo.json', 'wb') as jogoSalvo:
data = player.__dict__
data_string = json.dumps(data)
jogoSalvo.write(data_string.encode())
print('Salvando Jogo..')
sleep(3)
print('\033[1;32mJogo Salvo\033[m')
| 23.148936 | 69 | 0.589154 |
bc32c5f1d7965d118d526013de34b62b3276010b
| 20,129 |
py
|
Python
|
Installation/tests/test_spectrogram.py
|
pmatosevic/nnAudio
|
bf7a6392b32f11294d677cba1920886f5b321730
|
[
"MIT"
] | null | null | null |
Installation/tests/test_spectrogram.py
|
pmatosevic/nnAudio
|
bf7a6392b32f11294d677cba1920886f5b321730
|
[
"MIT"
] | null | null | null |
Installation/tests/test_spectrogram.py
|
pmatosevic/nnAudio
|
bf7a6392b32f11294d677cba1920886f5b321730
|
[
"MIT"
] | null | null | null |
import pytest
import librosa
import torch
from scipy.signal import chirp, sweep_poly
import sys
sys.path.insert(0, './')
from nnAudio.Spectrogram import *
from parameters import *
import warnings
gpu_idx=0 # Choose which GPU to use
# If GPU is avaliable, also test on GPU
if torch.cuda.is_available():
device_args = ['cpu', f'cuda:{gpu_idx}']
else:
warnings.warn("GPU is not avaliable, testing only on CPU")
device_args = ['cpu']
# librosa example audio for testing
example_y, example_sr = librosa.load(librosa.util.example_audio_file())
@pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters)
@pytest.mark.parametrize("device", [*device_args])
def test_inverse2(n_fft, hop_length, window, device):
x = torch.tensor(example_y,device=device)
stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window).to(device)
istft = iSTFT(n_fft=n_fft, hop_length=hop_length, window=window).to(device)
X = stft(x.unsqueeze(0), output_format="Complex")
x_recon = istft(X, length=x.shape[0], onesided=True).squeeze()
assert np.allclose(x.cpu(), x_recon.cpu(), rtol=1e-5, atol=1e-3)
@pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters)
@pytest.mark.parametrize("device", [*device_args])
def test_inverse(n_fft, hop_length, window, device):
x = torch.tensor(example_y, device=device)
stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window, iSTFT=True).to(device)
X = stft(x.unsqueeze(0), output_format="Complex")
x_recon = stft.inverse(X, length=x.shape[0]).squeeze()
assert np.allclose(x.cpu(), x_recon.cpu(), rtol=1e-3, atol=1)
# @pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters)
# def test_inverse_GPU(n_fft, hop_length, window):
# x = torch.tensor(example_y,device=f'cuda:{gpu_idx}')
# stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window, device=f'cuda:{gpu_idx}')
# X = stft(x.unsqueeze(0), output_format="Complex")
# x_recon = stft.inverse(X, num_samples=x.shape[0]).squeeze()
# assert np.allclose(x.cpu(), x_recon.cpu(), rtol=1e-3, atol=1)
@pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters)
@pytest.mark.parametrize("device", [*device_args])
def test_stft_complex(n_fft, hop_length, window, device):
x = example_y
stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Complex")
X_real, X_imag = X[:, :, :, 0].squeeze(), X[:, :, :, 1].squeeze()
X_librosa = librosa.stft(x, n_fft=n_fft, hop_length=hop_length, window=window)
real_diff, imag_diff = np.allclose(X_real.cpu(), X_librosa.real, rtol=1e-3, atol=1e-3), \
np.allclose(X_imag.cpu(), X_librosa.imag, rtol=1e-3, atol=1e-3)
assert real_diff and imag_diff
# @pytest.mark.parametrize("n_fft, hop_length, window", stft_parameters)
# def test_stft_complex_GPU(n_fft, hop_length, window):
# x = example_y
# stft = STFT(n_fft=n_fft, hop_length=hop_length, window=window, device=f'cuda:{gpu_idx}')
# X = stft(torch.tensor(x,device=f'cuda:{gpu_idx}').unsqueeze(0), output_format="Complex")
# X_real, X_imag = X[:, :, :, 0].squeeze().detach().cpu(), X[:, :, :, 1].squeeze().detach().cpu()
# X_librosa = librosa.stft(x, n_fft=n_fft, hop_length=hop_length, window=window)
# real_diff, imag_diff = np.allclose(X_real, X_librosa.real, rtol=1e-3, atol=1e-3), \
# np.allclose(X_imag, X_librosa.imag, rtol=1e-3, atol=1e-3)
# assert real_diff and imag_diff
@pytest.mark.parametrize("n_fft, win_length, hop_length", stft_with_win_parameters)
@pytest.mark.parametrize("device", [*device_args])
def test_stft_complex_winlength(n_fft, win_length, hop_length, device):
x = example_y
stft = STFT(n_fft=n_fft, win_length=win_length, hop_length=hop_length).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Complex")
X_real, X_imag = X[:, :, :, 0].squeeze(), X[:, :, :, 1].squeeze()
X_librosa = librosa.stft(x, n_fft=n_fft, win_length=win_length, hop_length=hop_length)
real_diff, imag_diff = np.allclose(X_real.cpu(), X_librosa.real, rtol=1e-3, atol=1e-3), \
np.allclose(X_imag.cpu(), X_librosa.imag, rtol=1e-3, atol=1e-3)
assert real_diff and imag_diff
@pytest.mark.parametrize("device", [*device_args])
def test_stft_magnitude(device):
x = example_y
stft = STFT(n_fft=2048, hop_length=512).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Magnitude").squeeze()
X_librosa, _ = librosa.core.magphase(librosa.stft(x, n_fft=2048, hop_length=512))
assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_stft_phase(device):
x = example_y
stft = STFT(n_fft=2048, hop_length=512).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0), output_format="Phase")
X_real, X_imag = torch.cos(X).squeeze(), torch.sin(X).squeeze()
_, X_librosa = librosa.core.magphase(librosa.stft(x, n_fft=2048, hop_length=512))
real_diff, imag_diff = np.mean(np.abs(X_real.cpu().numpy() - X_librosa.real)), \
np.mean(np.abs(X_imag.cpu().numpy() - X_librosa.imag))
# I find that np.allclose is too strict for allowing phase to be similar to librosa.
# Hence for phase we use average element-wise distance as the test metric.
assert real_diff < 2e-4 and imag_diff < 2e-4
@pytest.mark.parametrize("n_fft, win_length", mel_win_parameters)
@pytest.mark.parametrize("device", [*device_args])
def test_mel_spectrogram(n_fft, win_length, device):
x = example_y
melspec = MelSpectrogram(n_fft=n_fft, win_length=win_length, hop_length=512).to(device)
X = melspec(torch.tensor(x, device=device).unsqueeze(0)).squeeze()
X_librosa = librosa.feature.melspectrogram(x, n_fft=n_fft, win_length=win_length, hop_length=512)
assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_cqt_1992(device):
# Log sweep case
fs = 44100
t = 1
f0 = 55
f1 = 22050
s = np.linspace(0, t, fs*t)
x = chirp(s, f0, 1, f1, method='logarithmic')
x = x.astype(dtype=np.float32)
# Magnitude
stft = CQT1992(sr=fs, fmin=220, output_format="Magnitude",
n_bins=80, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
# Complex
stft = CQT1992(sr=fs, fmin=220, output_format="Complex",
n_bins=80, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
# Phase
stft = CQT1992(sr=fs, fmin=220, output_format="Phase",
n_bins=160, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
assert True
@pytest.mark.parametrize("device", [*device_args])
def test_cqt_2010(device):
# Log sweep case
fs = 44100
t = 1
f0 = 55
f1 = 22050
s = np.linspace(0, t, fs*t)
x = chirp(s, f0, 1, f1, method='logarithmic')
x = x.astype(dtype=np.float32)
# Magnitude
stft = CQT2010(sr=fs, fmin=110, output_format="Magnitude",
n_bins=160, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
# Complex
stft = CQT2010(sr=fs, fmin=110, output_format="Complex",
n_bins=160, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
# Phase
stft = CQT2010(sr=fs, fmin=110, output_format="Phase",
n_bins=160, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
assert True
@pytest.mark.parametrize("device", [*device_args])
def test_cqt_1992_v2_log(device):
# Log sweep case
fs = 44100
t = 1
f0 = 55
f1 = 22050
s = np.linspace(0, t, fs*t)
x = chirp(s, f0, 1, f1, method='logarithmic')
x = x.astype(dtype=np.float32)
# Magnitude
stft = CQT1992v2(sr=fs, fmin=55, output_format="Magnitude",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
ground_truth = np.load("tests/ground-truths/log-sweep-cqt-1992-mag-ground-truth.npy")
X = torch.log(X + 1e-5)
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Complex
stft = CQT1992v2(sr=fs, fmin=55, output_format="Complex",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
ground_truth = np.load("tests/ground-truths/log-sweep-cqt-1992-complex-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Phase
stft = CQT1992v2(sr=fs, fmin=55, output_format="Phase",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
ground_truth = np.load("tests/ground-truths/log-sweep-cqt-1992-phase-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_cqt_1992_v2_linear(device):
# Linear sweep case
fs = 44100
t = 1
f0 = 55
f1 = 22050
s = np.linspace(0, t, fs*t)
x = chirp(s, f0, 1, f1, method='linear')
x = x.astype(dtype=np.float32)
# Magnitude
stft = CQT1992v2(sr=fs, fmin=55, output_format="Magnitude",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-1992-mag-ground-truth.npy")
X = torch.log(X + 1e-5)
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Complex
stft = CQT1992v2(sr=fs, fmin=55, output_format="Complex",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-1992-complex-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Phase
stft = CQT1992v2(sr=fs, fmin=55, output_format="Phase",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-1992-phase-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_cqt_2010_v2_log(device):
# Log sweep case
fs = 44100
t = 1
f0 = 55
f1 = 22050
s = np.linspace(0, t, fs*t)
x = chirp(s, f0, 1, f1, method='logarithmic')
x = x.astype(dtype=np.float32)
# Magnitude
stft = CQT2010v2(sr=fs, fmin=55, output_format="Magnitude",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
X = torch.log(X + 1e-2)
# np.save("tests/ground-truths/log-sweep-cqt-2010-mag-ground-truth", X.cpu())
ground_truth = np.load("tests/ground-truths/log-sweep-cqt-2010-mag-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Complex
stft = CQT2010v2(sr=fs, fmin=55, output_format="Complex",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
# np.save("tests/ground-truths/log-sweep-cqt-2010-complex-ground-truth", X.cpu())
ground_truth = np.load("tests/ground-truths/log-sweep-cqt-2010-complex-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# # Phase
# stft = CQT2010v2(sr=fs, fmin=55, device=device, output_format="Phase",
# n_bins=207, bins_per_octave=24)
# X = stft(torch.tensor(x, device=device).unsqueeze(0))
# # np.save("tests/ground-truths/log-sweep-cqt-2010-phase-ground-truth", X.cpu())
# ground_truth = np.load("tests/ground-truths/log-sweep-cqt-2010-phase-ground-truth.npy")
# assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_cqt_2010_v2_linear(device):
# Linear sweep case
fs = 44100
t = 1
f0 = 55
f1 = 22050
s = np.linspace(0, t, fs*t)
x = chirp(s, f0, 1, f1, method='linear')
x = x.astype(dtype=np.float32)
# Magnitude
stft = CQT2010v2(sr=fs, fmin=55, output_format="Magnitude",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
X = torch.log(X + 1e-2)
# np.save("tests/ground-truths/linear-sweep-cqt-2010-mag-ground-truth", X.cpu())
ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-2010-mag-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Complex
stft = CQT2010v2(sr=fs, fmin=55, output_format="Complex",
n_bins=207, bins_per_octave=24).to(device)
X = stft(torch.tensor(x, device=device).unsqueeze(0))
# np.save("tests/ground-truths/linear-sweep-cqt-2010-complex-ground-truth", X.cpu())
ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-2010-complex-ground-truth.npy")
assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
# Phase
# stft = CQT2010v2(sr=fs, fmin=55, device=device, output_format="Phase",
# n_bins=207, bins_per_octave=24)
# X = stft(torch.tensor(x, device=device).unsqueeze(0))
# # np.save("tests/ground-truths/linear-sweep-cqt-2010-phase-ground-truth", X.cpu())
# ground_truth = np.load("tests/ground-truths/linear-sweep-cqt-2010-phase-ground-truth.npy")
# assert np.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_mfcc(device):
x = example_y
mfcc = MFCC(sr=example_sr).to(device)
X = mfcc(torch.tensor(x, device=device).unsqueeze(0)).squeeze()
X_librosa = librosa.feature.mfcc(x, sr=example_sr)
assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [*device_args])
def test_cfp_original(device):
x = torch.tensor(example_y,device=device).unsqueeze(0)
cfp_layer = Combined_Frequency_Periodicity(fr=2,
fs=44100,
hop_length=320,
window_size=2049,
fc=80,
tc=0.001,
g=[0.24, 0.6, 1],
NumPerOct=48,).to(device)
X = cfp_layer(x)
ground_truth = torch.load("tests/ground-truths/cfp_original.pt")
for i, j in zip(X, ground_truth):
assert torch.allclose(i.cpu(), j, 1e-3, 1e-1)
@pytest.mark.parametrize("device", [*device_args])
def test_cfp_new(device):
x = torch.tensor(example_y,device=device).unsqueeze(0)
cfp_layer = CFP(fr=2,
fs=44100,
hop_length=320,
window_size=2049,
fc=80,
tc=0.001,
g=[0.24, 0.6, 1],
NumPerOct=48,).to(device)
X = cfp_layer(x)
ground_truth = torch.load("tests/ground-truths/cfp_new.pt")
assert torch.allclose(X.cpu(), ground_truth, rtol=1e-3, atol=1e-1)
@pytest.mark.parametrize("device", [*device_args])
def test_mfcc(device):
x = example_y
mfcc = MFCC(sr=example_sr).to(device)
X = mfcc(torch.tensor(x, device=device).unsqueeze(0)).squeeze()
X_librosa = librosa.feature.mfcc(x, sr=example_sr)
assert np.allclose(X.cpu(), X_librosa, rtol=1e-3, atol=1e-3)
# If GPU is availabe, test on parallel
if torch.cuda.is_available():
x = torch.randn((4,44100)).to(f'cuda:{gpu_idx}') # Create a batch of input for the following Data.Parallel test
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_STFT_Parallel(device):
spec_layer = STFT(hop_length=512, n_fft=2048, window='hann',
freq_scale='no',
output_format='Complex').to(device)
inverse_spec_layer = iSTFT(hop_length=512, n_fft=2048, window='hann',
freq_scale='no').to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
inverse_spec_layer_parallel = torch.nn.DataParallel(inverse_spec_layer)
spec = spec_layer_parallel(x)
x_recon = inverse_spec_layer_parallel(spec, onesided=True, length=x.shape[-1])
assert np.allclose(x_recon.detach().cpu(), x.detach().cpu(), rtol=1e-3, atol=1e-3)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_MelSpectrogram_Parallel(device):
spec_layer = MelSpectrogram(sr=22050, n_fft=2048, n_mels=128, hop_length=512,
window='hann', center=True, pad_mode='reflect',
power=2.0, htk=False, fmin=0.0, fmax=None, norm=1,
verbose=True).to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
spec = spec_layer_parallel(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_MFCC_Parallel(device):
spec_layer = MFCC().to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
spec = spec_layer_parallel(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_CQT1992_Parallel(device):
spec_layer = CQT1992(fmin=110, n_bins=60, bins_per_octave=12).to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
spec = spec_layer_parallel(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_CQT1992v2_Parallel(device):
spec_layer = CQT1992v2().to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
spec = spec_layer_parallel(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_CQT2010_Parallel(device):
spec_layer = CQT2010().to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
spec = spec_layer_parallel(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_CQT2010v2_Parallel(device):
spec_layer = CQT2010v2().to(device)
spec_layer_parallel = torch.nn.DataParallel(spec_layer)
spec = spec_layer_parallel(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_cfp_original_Parallel(device):
cfp_layer = Combined_Frequency_Periodicity(fr=2,
fs=44100,
hop_length=320,
window_size=2049,
fc=80,
tc=0.001,
g=[0.24, 0.6, 1],
NumPerOct=48,).to(device)
cfp_layer = torch.nn.DataParallel(cfp_layer)
X = cfp_layer(x)
@pytest.mark.parametrize("device", [f'cuda:{gpu_idx}'])
def test_cfp_new_Parallel(device):
cfp_layer = CFP(fr=2,
fs=44100,
hop_length=320,
window_size=2049,
fc=80,
tc=0.001,
g=[0.24, 0.6, 1],
NumPerOct=48,).to(device)
X = cfp_layer(x.to(device))
| 44.23956 | 119 | 0.622386 |
655ecca9098cc69f81b455c4453a49e74ad86738
| 111 |
py
|
Python
|
CAIL2020/cocr/torchocr/networks/__init__.py
|
ShenDezhou/CAIL
|
c4cfa98ab4ecedbce34a7a5a186830486047540c
|
[
"Apache-2.0"
] | 71 |
2020-07-16T01:49:27.000Z
|
2022-03-27T16:55:00.000Z
|
CAIL2020/cocr/torchocr/networks/__init__.py
|
ShenDezhou/CAIL
|
c4cfa98ab4ecedbce34a7a5a186830486047540c
|
[
"Apache-2.0"
] | 11 |
2020-09-18T14:26:25.000Z
|
2022-02-09T23:49:33.000Z
|
CAIL2020/cocr/torchocr/networks/__init__.py
|
ShenDezhou/CAIL
|
c4cfa98ab4ecedbce34a7a5a186830486047540c
|
[
"Apache-2.0"
] | 16 |
2020-07-15T07:24:30.000Z
|
2022-03-19T05:41:11.000Z
|
from .architectures import build_model
from .losses import build_loss
__all__ = ['build_model', 'build_loss']
| 22.2 | 39 | 0.792793 |
ebf0c448e1090e2ba5bf8fcdd9dbc9de50e99700
| 2,331 |
py
|
Python
|
sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/v7_3/aio/_configuration.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1 |
2022-02-01T18:50:12.000Z
|
2022-02-01T18:50:12.000Z
|
sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/v7_3/aio/_configuration.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/v7_3/aio/_configuration.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
VERSION = "unknown"
class KeyVaultClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for KeyVaultClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:keyword api_version: Api Version. Default value is "7.3". Note that overriding this default
value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
**kwargs: Any
) -> None:
super(KeyVaultClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop('api_version', "7.3") # type: str
self.api_version = api_version
kwargs.setdefault('sdk_moniker', 'keyvault/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
| 44.826923 | 108 | 0.677392 |
00695b11eaa0f54c472c58ec84f6ed5d4f600810
| 499 |
py
|
Python
|
plotly/validators/histogram2dcontour/stream/_token.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | 2 |
2020-03-24T11:41:14.000Z
|
2021-01-14T07:59:43.000Z
|
plotly/validators/histogram2dcontour/stream/_token.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | null | null | null |
plotly/validators/histogram2dcontour/stream/_token.py
|
faezs/plotly.py
|
6009b5b9c746e5d2a2849ad255a4eb234b551ed7
|
[
"MIT"
] | 4 |
2019-06-03T14:49:12.000Z
|
2022-01-06T01:05:12.000Z
|
import _plotly_utils.basevalidators
class TokenValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='token',
parent_name='histogram2dcontour.stream',
**kwargs
):
super(TokenValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='calc',
no_blank=True,
role='info',
strict=True,
**kwargs
)
| 23.761905 | 67 | 0.581162 |
022a482c6b203eec3b9185820554348a0cfdad75
| 699 |
py
|
Python
|
graphics_in_the_admin_panel_project/graphics_in_the_admin_panel_application/parser.py
|
Jenek209/4a0f9bca3ce79bd9b0a5fa6a95e134d0
|
15295b50cad1e8fe574ba056486a28ae990e251a
|
[
"MIT"
] | null | null | null |
graphics_in_the_admin_panel_project/graphics_in_the_admin_panel_application/parser.py
|
Jenek209/4a0f9bca3ce79bd9b0a5fa6a95e134d0
|
15295b50cad1e8fe574ba056486a28ae990e251a
|
[
"MIT"
] | null | null | null |
graphics_in_the_admin_panel_project/graphics_in_the_admin_panel_application/parser.py
|
Jenek209/4a0f9bca3ce79bd9b0a5fa6a95e134d0
|
15295b50cad1e8fe574ba056486a28ae990e251a
|
[
"MIT"
] | null | null | null |
import ast
import operator as op
# supported operators
operators = {ast.Add: op.add, ast.Sub: op.sub, ast.Mult: op.mul,
ast.Div: op.truediv, ast.Pow: op.pow, ast.BitXor: op.xor,
ast.USub: op.neg}
def eval_expr(expr):
return eval_(ast.parse(expr, mode='eval').body)
def eval_(node):
if isinstance(node, ast.Num): # <number>
return node.n
elif isinstance(node, ast.BinOp): # <left> <operator> <right>
return operators[type(node.op)](eval_(node.left), eval_(node.right))
elif isinstance(node, ast.UnaryOp): # <operator> <operand> e.g., -1
return operators[type(node.op)](eval_(node.operand))
else:
raise TypeError(node)
| 31.772727 | 76 | 0.638054 |
f0c3ed812ee088a4ffb5301b93896a79122fa54c
| 3,453 |
py
|
Python
|
microquake/core/helpers/velocity.py
|
jeanphilippemercier/microquake
|
0b9d07be11eddd64619e46939c320487531602a3
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
microquake/core/helpers/velocity.py
|
jeanphilippemercier/microquake
|
0b9d07be11eddd64619e46939c320487531602a3
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
microquake/core/helpers/velocity.py
|
jeanphilippemercier/microquake
|
0b9d07be11eddd64619e46939c320487531602a3
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
from os import environ, path
import numpy as np
from microquake.core.data.grid import create, read_grid
from microquake.core.settings import settings
def get_current_velocity_model_id(phase='P'):
"""
Return the velocity model ID for a specificed phase
:param phase: phase (possible values 'P', 'S'
:return: resource_identifier
"""
if phase.upper() == 'P':
v_path = path.join(settings.common_dir,
settings.grids.velocities.vp) + '.rid'
elif phase.upper() == 'S':
v_path = path.join(settings.common_dir,
settings.grids.velocities.vs) + '.rid'
with open(v_path) as ris:
return ris.read()
def get_velocities():
"""
returns velocity models
"""
grids = settings.grids
if grids.velocities.homogeneous:
vp = create(**grids)
vp.data *= grids.velocities.vp
vp.resource_id = get_current_velocity_model_id('P')
vs = create(**grids)
vs.data *= grids.velocities.vs
vs.resource_id = get_current_velocity_model_id('S')
else:
if grids.velocities.source == 'local':
format = grids.velocities.format
vp_path = path.join(settings.common_dir,
grids.velocities.vp)
vp = read_grid(vp_path, format=format)
vp.resource_id = get_current_velocity_model_id('P')
vs_path = path.join(settings.common_dir,
grids.velocities.vs)
vs = read_grid(vs_path, format=format)
vs.resource_id = get_current_velocity_model_id('S')
elif settings['grids.velocities.local']:
# TODO: read the velocity grids from the server
pass
return vp, vs
def create_velocities():
# Note that this function should not be used forever! New velocity models will be created in the future making
# this function obsolete
z = [1168, 459, -300]
Vp_z = [4533, 5337, 5836]
Vs_z = [2306, 2885, 3524]
vp = create(**settings.grids)
vs = create(**settings.grids)
origin = settings.grids.origin
zis = [int(vp.transform_to([origin[0], origin[1], z_])[2]) for z_ in z]
vp.data[:, :, zis[0]:] = Vp_z[0]
vs.data[:, :, zis[0]:] = Vs_z[0]
vp.data[:, :, zis[1]:zis[0]] = np.linspace(Vp_z[1], Vp_z[0], zis[0] - zis[1])
vs.data[:, :, zis[1]:zis[0]] = np.linspace(Vs_z[1], Vs_z[0], zis[0] - zis[1])
vp.data[:, :, zis[2]:zis[1]] = np.linspace(Vp_z[2], Vp_z[1], zis[1] - zis[2])
vs.data[:, :, zis[2]:zis[1]] = np.linspace(Vs_z[2], Vs_z[1], zis[1] - zis[2])
vp.data[:, :, :zis[2]] = Vp_z[2]
vs.data[:, :, :zis[2]] = Vs_z[2]
# TODO this block looks unused to me, should we use it?
(lx, ly, lz) = vp.shape
x = [vp.transform_from(np.array([x_, 0, 0]))[0] for x_ in range(0, lx)]
y = [vp.transform_from(np.array([0, y_, 0]))[1] for y_ in range(0, ly)]
z = [vp.transform_from(np.array([0, 0, z_]))[2] for z_ in range(0, lz)]
vp.write(path.join(settings.common_dir, 'velocities/vp'), format='NLLOC')
vs.write(path.join(settings.common_dir, 'velocities/vs'), format='NLLOC')
with open(path.join(settings.common_dir, 'velocities/vp.rid'), 'w') as vp:
vp.write('initial_1d_vp_velocity_model_2018_01')
with open(path.join(settings.common_dir, 'velocities/vs.rid'), 'w') as vs:
vs.write('initial_1d_vs_velocity_model_2018_01')
| 32.885714 | 114 | 0.601216 |
1380be3207ee175f852fa49395d105708969ec55
| 603 |
py
|
Python
|
ozpcenter/models/custom_field_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1 |
2018-10-05T17:03:01.000Z
|
2018-10-05T17:03:01.000Z
|
ozpcenter/models/custom_field_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1 |
2017-01-06T19:20:32.000Z
|
2017-01-06T19:20:32.000Z
|
ozpcenter/models/custom_field_type.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 7 |
2016-12-16T15:42:05.000Z
|
2020-09-05T01:11:27.000Z
|
from django.db import models
from .external_model import ExternalModel
class CustomFieldType(ExternalModel):
"""
Custom Field Type
Defines the what kind of field a Custom Field will represent
"""
class Meta:
db_table = 'custom_field_type'
name = models.CharField(max_length=50)
display_name = models.CharField(max_length=50)
media_type = models.CharField(max_length=255)
options = models.CharField(max_length=4000, null=True, blank=True)
def __repr__(self):
return self.display_name
def __str__(self):
return self.display_name
| 23.192308 | 70 | 0.708126 |
4c047db4ad3a12d4494173e2ef91fa044d2614a7
| 1,715 |
py
|
Python
|
Menchetti/NN_model/predict_one.py
|
LucaLand/SistemiDigitaliM20-21
|
e900327cc905d21caae72c42ec027f991b6ac2d9
|
[
"MIT"
] | 9 |
2021-02-07T22:53:34.000Z
|
2022-03-14T21:47:30.000Z
|
Menchetti/NN_model/predict_one.py
|
LucaLand/SistemiDigitaliM20-21
|
e900327cc905d21caae72c42ec027f991b6ac2d9
|
[
"MIT"
] | null | null | null |
Menchetti/NN_model/predict_one.py
|
LucaLand/SistemiDigitaliM20-21
|
e900327cc905d21caae72c42ec027f991b6ac2d9
|
[
"MIT"
] | 18 |
2021-02-07T18:30:47.000Z
|
2022-01-22T16:57:40.000Z
|
#DA FAREEEEE
import tensorflow as tf
import numpy as np
import pandas as pd
import cv2
import sys
sys.path.append('../../')
from utils.DBR_utils import get_dog_from_predictions, resize_square_image_keep_ratio_border, print_human_readable_dog_cli
from Network import DBR_Network
# -- Variabili globali --
# Paths
modelName = 'DBR1.9.8-8000'
trained_model_path = './trainedModel/'
csv_output_values = '../../_DBR_dataset_128px_v4/_breeds/unique_breed_translation.csv'
test_image_path = '../../_single_test/leonberg.jpg'
# Variabili
img_size = (128, 128, 3) #dimensione immagini dataset
output_values_it = pd.read_csv(csv_output_values)['IT']
def get_test_image(image_path):
image = cv2.imread(image_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#RESIZE IMAGE FOR PREDICT
image = resize_square_image_keep_ratio_border(image, img_size[0])
image = image / 255.
return np.expand_dims(image,0)
# Predict one
if __name__ == '__main__':
trainedModel = trained_model_path + modelName
x = tf.placeholder(tf.float32,shape=[1,img_size[0],img_size[1],img_size[2]], name='input')
network = DBR_Network(x=x, is_training=False)
loader = tf.train.Saver()
config = tf.ConfigProto(allow_soft_placement=True)
with tf.Session(config=config) as session:
loader.restore(session, trainedModel)
image = get_test_image(test_image_path)
predicted_value, output_percentage = session.run([network.prediction, network.output_percentage], feed_dict={x:image})
dog = get_dog_from_predictions(output_percentage, output_values_it, test_image_path)
print('\n- Risultato -')
res = print_human_readable_dog_cli(dog)
print(res)
| 32.358491 | 126 | 0.739942 |
6d3b8d6b83bfb2097173706f11fb4065eb0610c6
| 2,110 |
py
|
Python
|
irctest/server_tests/away_notify.py
|
FiskFan1999/ergochat_irctest
|
da005d7d2492bf31c4bdeb46108240766c69d0ad
|
[
"MIT"
] | 16 |
2015-12-20T16:24:54.000Z
|
2021-06-03T18:00:03.000Z
|
irctest/server_tests/away_notify.py
|
FiskFan1999/ergochat_irctest
|
da005d7d2492bf31c4bdeb46108240766c69d0ad
|
[
"MIT"
] | 66 |
2015-12-20T00:23:25.000Z
|
2021-08-14T09:57:04.000Z
|
irctest/server_tests/away_notify.py
|
FiskFan1999/ergochat_irctest
|
da005d7d2492bf31c4bdeb46108240766c69d0ad
|
[
"MIT"
] | 3 |
2021-12-04T21:18:41.000Z
|
2022-03-22T01:42:36.000Z
|
"""
<https://ircv3.net/specs/extensions/away-notify-3.1>
"""
from irctest import cases
class AwayNotifyTestCase(cases.BaseServerTestCase, cases.OptionalityHelper):
@cases.mark_capabilities("away-notify")
def testAwayNotify(self):
"""Basic away-notify test."""
self.connectClient("foo", capabilities=["away-notify"], skip_if_cap_nak=True)
self.getMessages(1)
self.joinChannel(1, "#chan")
self.connectClient("bar")
self.getMessages(2)
self.joinChannel(2, "#chan")
self.getMessages(2)
self.getMessages(1)
self.sendLine(2, "AWAY :i'm going away")
self.getMessages(2)
awayNotify = self.getMessage(1)
self.assertMessageMatch(awayNotify, command="AWAY", params=["i'm going away"])
self.assertTrue(
awayNotify.prefix.startswith("bar!"),
"Unexpected away-notify source: %s" % (awayNotify.prefix,),
)
@cases.mark_capabilities("away-notify")
def testAwayNotifyOnJoin(self):
"""The away-notify specification states:
"Clients will be sent an AWAY message [...] when a user joins
and has an away message set."
"""
self.connectClient("foo", capabilities=["away-notify"], skip_if_cap_nak=True)
self.getMessages(1)
self.joinChannel(1, "#chan")
self.connectClient("bar")
self.getMessages(2)
self.sendLine(2, "AWAY :i'm already away")
self.getMessages(2)
self.joinChannel(2, "#chan")
self.getMessages(2)
messages = [msg for msg in self.getMessages(1) if msg.command == "AWAY"]
self.assertEqual(
len(messages),
1,
"Someone away joined a channel, "
"but users in the channel did not get AWAY messages.",
)
awayNotify = messages[0]
self.assertMessageMatch(awayNotify, command="AWAY", params=["i'm already away"])
self.assertTrue(
awayNotify.prefix.startswith("bar!"),
"Unexpected away-notify source: %s" % (awayNotify.prefix,),
)
| 33.492063 | 88 | 0.609005 |
8aca6afefb6f90cafb3da7266eac8c7539659933
| 11,495 |
py
|
Python
|
varnish.py
|
360youlun/python-varnish
|
6dd614f3081e277e7ce23fa6a6fb1ca45c914e43
|
[
"BSD-3-Clause"
] | 1 |
2015-04-29T06:12:33.000Z
|
2015-04-29T06:12:33.000Z
|
varnish.py
|
360youlun/python-varnish
|
6dd614f3081e277e7ce23fa6a6fb1ca45c914e43
|
[
"BSD-3-Clause"
] | null | null | null |
varnish.py
|
360youlun/python-varnish
|
6dd614f3081e277e7ce23fa6a6fb1ca45c914e43
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Simple Python interface for the Varnish management port.
Tested against
Varnish v3.0.2
Varnish Cache CLI 1.0
Supports the following commands
help [command]
ping [timestamp]
auth response
quit
status
start
stop
vcl.load <configname> <filename>
vcl.inline <configname> <quoted_VCLstring>
vcl.use <configname>
vcl.discard <configname>
vcl.list
vcl.show <configname>
param.show [-l] [<param>]
param.set <param> <value>
ban.url <regexp>
ban <field> <operator> <arg> [&& <field> <oper> <arg>]...
ban.list
Also VarnishManager.purge will do HTTP purges. See below for configuration details
https://www.varnish-cache.org/docs/3.0/tutorial/purging.html
"""
from telnetlib import Telnet
from threading import Thread
from httplib import HTTPConnection
from urlparse import urlparse
from hashlib import sha256
import logging
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(message)s',
)
def http_purge_url(url):
"""
Do an HTTP PURGE of the given asset.
The URL is run through urlparse and must point to the varnish instance not the varnishadm
"""
url = urlparse(url)
connection = HTTPConnection(url.hostname, url.port or 80)
connection.request('PURGE', '%s?%s' % (url.path or '/', url.query), '',
{'Host': url.hostname})
response = connection.getresponse()
if response.status != 200:
logging.error('Purge failed with status: %s' % response.status)
return response
class VarnishHandler(Telnet):
def __init__(self, host_port_timeout, secret=None, **kwargs):
if isinstance(host_port_timeout, basestring):
host_port_timeout = host_port_timeout.split(':')
Telnet.__init__(self, *host_port_timeout)
(status, length), content = self._read()
if status == 107 and secret is not None:
self.auth(secret, content)
elif status != 200:
logging.error('Connecting failed with status: %i' % status)
def _read(self):
(status, length), content = map(int, self.read_until('\n').split()), ''
while len(content) < length:
content += self.read_some()
return (status, length), content[:-1]
def fetch(self, command):
"""
Run a command on the Varnish backend and return the result
return value is a tuple of ((status, length), content)
"""
logging.debug('SENT: %s: %s' % (self.host, command))
self.write('%s\n' % command)
while 1:
buffer = self.read_until('\n').strip()
if len(buffer):
break
status, length = map(int, buffer.split())
content = ''
assert status == 200, 'Bad response code: {status} {text} ({command})'.format(status=status,
text=self.read_until(
'\n').strip(),
command=command)
while len(content) < length:
content += self.read_until('\n')
logging.debug('RECV: %s: %dB %s' % (status, length, content[:30]))
self.read_eager()
return (status, length), content
# Service control methods
def start(self):
"""start Start the Varnish cache process if it is not already running."""
return self.fetch('start')
def stop(self):
"""stop Stop the Varnish cache process."""
return self.fetch('stop')
def quit(self):
"""quit Close the connection to the varnish admin port."""
return self.close()
def auth(self, secret, content):
challenge = content[:32]
response = sha256('%s\n%s\n%s\n' % (challenge, secret, challenge))
response_str = 'auth %s' % response.hexdigest()
self.fetch(response_str)
# Information methods
def ping(self, timestamp=None):
"""
ping [timestamp]
Ping the Varnish cache process, keeping the connection alive.
"""
cmd = 'ping'
if timestamp: cmd += ' %s' % timestamp
return tuple(map(float, self.fetch(cmd)[1].split()[1:]))
def status(self):
"""status Check the status of the Varnish cache process."""
return self.fetch('status')[1]
def help(self, command=None):
"""
help [command]
Display a list of available commands.
If the command is specified, display help for this command.
"""
cmd = 'help'
if command: cmd += ' %s' % command
return self.fetch(cmd)[1]
# VCL methods
def vcl_load(self, configname, filename):
"""
vcl.load configname filename
Create a new configuration named configname with the contents of the specified file.
"""
return self.fetch('vcl.load %s %s' % (configname, filename))
def vcl_inline(self, configname, vclcontent):
"""
vcl.inline configname vcl
Create a new configuration named configname with the VCL code specified by vcl, which must be a
quoted string.
"""
return self.fetch('vcl.inline %s %s' % (configname, vclcontent))
def vcl_show(self, configname):
"""
vcl.show configname
Display the source code for the specified configuration.
"""
return self.fetch('vcl.show %s' % configname)
def vcl_use(self, configname):
"""
vcl.use configname
Start using the configuration specified by configname for all new requests. Existing requests
will coninue using whichever configuration was in use when they arrived.
"""
return self.fetch('vcl.use %s' % configname)
def vcl_discard(self, configname):
"""
vcl.discard configname
Discard the configuration specified by configname. This will have no effect if the specified
configuration has a non-zero reference count.
"""
return self.fetch('vcl.discard %s' % configname)
def vcl_list(self):
"""
vcl.list
List available configurations and their respective reference counts. The active configuration
is indicated with an asterisk ("*").
"""
vcls = {}
for line in self.fetch('vcl.list')[1].splitlines():
a = line.split()
vcls[a[2]] = tuple(a[:-1])
return vcls
# Param methods
def param_show(self, param, l=False):
"""
param.show [-l] [param]
Display a list if run-time parameters and their values.
If the -l option is specified, the list includes a brief explanation of each parameter.
If a param is specified, display only the value and explanation for this parameter.
"""
cmd = 'param.show '
if l: cmd += '-l '
return self.fetch(cmd + param)
def param_set(self, param, value):
"""
param.set param value
Set the parameter specified by param to the specified value. See Run-Time Parameters for a list
of paramea ters.
"""
self.fetch('param.set %s %s' % (param, value))
# Ban methods
def ban(self, expression):
"""
ban field operator argument [&& field operator argument [...]]
Immediately invalidate all documents matching the ban expression. See Ban Expressions for more
documentation and examples.
"""
return self.fetch('ban %s' % expression)[1]
def ban_url(self, regex):
"""
ban.url regexp
Immediately invalidate all documents whose URL matches the specified regular expression. Please
note that the Host part of the URL is ignored, so if you have several virtual hosts all of them
will be banned. Use ban to specify a complete ban if you need to narrow it down.
"""
return self.fetch('ban req.url ~ %s' % regex)[1]
def ban_list(self):
"""
ban.list
All requests for objects from the cache are matched against items on the ban list. If an object
in the cache is older than a matching ban list item, it is considered "banned", and will be
fetched from the backend instead.
When a ban expression is older than all the objects in the cache, it is removed from the list.
ban.list displays the ban list. The output looks something like this (broken into two lines):
0x7fea4fcb0580 1303835108.618863 131G req.http.host ~ www.myhost.com && req.url ~ /some/url
The first field is the address of the ban.
The second is the time of entry into the list, given as a high precision timestamp.
The third field describes many objects point to this ban. When an object is compared to a ban
the object is marked with a reference to the newest ban it was tested against. This isn't really
useful unless you're debugging.
A "G" marks that the ban is "Gone". Meaning it has been marked as a duplicate or it is no longer
valid. It stays in the list for effiency reasons.
Then follows the actual ban it self.
"""
return self.fetch('ban.list')[1]
def purge_url(self, url):
"""
Wrapper for http_purge_url
"""
return http_purge_url(url)
class ThreadedRunner(Thread):
"""
Runs commands on a particular varnish server in a separate thread
"""
def __init__(self, addr, *commands, **kwargs):
self.addr = addr
self.commands = commands
self.kwargs = kwargs
super(ThreadedRunner, self).__init__()
def run(self):
handler = VarnishHandler(self.addr, **self.kwargs)
for cmd in self.commands:
if isinstance(cmd, tuple) and len(cmd) > 1:
getattr(handler, cmd[0].replace('.', '_'))(*cmd[1:])
else:
getattr(handler, cmd.replace('.', '_'))()
handler.close()
def run(addr, *commands, **kwargs):
"""
Non-threaded batch command runner returning output results
"""
results = []
handler = VarnishHandler(addr, **kwargs)
for cmd in commands:
if isinstance(cmd, tuple) and len(cmd) > 1:
results.extend([getattr(handler, c[0].replace('.', '_'))(*c[1:]) for c in cmd])
else:
results.append(getattr(handler, cmd.replace('.', '_'))(*commands[1:]))
break
handler.close()
return results
class VarnishManager(object):
def __init__(self, servers):
if not len(servers):
logging.warn('No servers found, please declare some')
self.servers = servers
def run(self, *commands, **kwargs):
threaded = kwargs.pop('threaded', False)
for server in self.servers:
if threaded:
[ThreadedRunner(server, *commands, **kwargs).start()
for server in self.servers]
else:
return [run(server, *commands, **kwargs)
for server in self.servers]
def help(self, *args):
return run(self.servers[0], *('help',) + args)[0]
def close(self):
self.run('close', threaded=True)
self.servers = ()
| 34.728097 | 110 | 0.590779 |
6f407ba37eb27fe2e3538737619ce69f3da960e8
| 10,542 |
py
|
Python
|
qiskit/opflow/gradients/derivative_base.py
|
Roshan-Thomas/qiskit-terra
|
77219b5c7b7146b1545c5e5190739b36f4064b2f
|
[
"Apache-2.0"
] | 1,599 |
2018-07-10T10:59:12.000Z
|
2022-03-31T23:56:25.000Z
|
qiskit/opflow/gradients/derivative_base.py
|
Roshan-Thomas/qiskit-terra
|
77219b5c7b7146b1545c5e5190739b36f4064b2f
|
[
"Apache-2.0"
] | 5,244 |
2018-07-10T06:20:13.000Z
|
2022-03-31T22:18:48.000Z
|
qiskit/opflow/gradients/derivative_base.py
|
Roshan-Thomas/qiskit-terra
|
77219b5c7b7146b1545c5e5190739b36f4064b2f
|
[
"Apache-2.0"
] | 1,409 |
2018-07-10T02:16:12.000Z
|
2022-03-31T09:01:32.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" DerivativeBase Class """
import warnings
from abc import abstractmethod
from typing import Callable, Iterable, List, Optional, Tuple, Union
import numpy as np
from qiskit.utils.quantum_instance import QuantumInstance
from qiskit.circuit import ParameterExpression, ParameterVector
from qiskit.providers import BaseBackend, Backend
from ..converters.converter_base import ConverterBase
from ..expectations import ExpectationBase, PauliExpectation
from ..list_ops.composed_op import ComposedOp
from ..list_ops.list_op import ListOp
from ..list_ops.tensored_op import TensoredOp
from ..operator_base import OperatorBase
from ..primitive_ops.primitive_op import PrimitiveOp
from ..state_fns import StateFn, OperatorStateFn
OperatorType = Union[StateFn, PrimitiveOp, ListOp]
class DerivativeBase(ConverterBase):
r"""Base class for differentiating opflow objects.
Converter for differentiating opflow objects and handling
things like properly differentiating combo_fn's and enforcing product rules
when operator coefficients are parameterized.
This is distinct from CircuitGradient converters which use quantum
techniques such as parameter shifts and linear combination of unitaries
to compute derivatives of circuits.
CircuitGradient - uses quantum techniques to get derivatives of circuits
DerivativeBase - uses classical techniques to differentiate opflow data structures
"""
# pylint: disable=arguments-differ
@abstractmethod
def convert(
self,
operator: OperatorBase,
params: Optional[
Union[ParameterVector, ParameterExpression, List[ParameterExpression]]
] = None,
) -> OperatorBase:
r"""
Args:
operator: The operator we are taking the gradient, Hessian or QFI of
params: The parameters we are taking the gradient, Hessian or QFI with respect to.
Returns:
An operator whose evaluation yields the gradient, Hessian or QFI.
Raises:
ValueError: If ``params`` contains a parameter not present in ``operator``.
"""
raise NotImplementedError
def gradient_wrapper(
self,
operator: OperatorBase,
bind_params: Union[ParameterExpression, ParameterVector, List[ParameterExpression]],
grad_params: Optional[
Union[
ParameterExpression,
ParameterVector,
List[ParameterExpression],
Tuple[ParameterExpression, ParameterExpression],
List[Tuple[ParameterExpression, ParameterExpression]],
]
] = None,
backend: Optional[Union[BaseBackend, Backend, QuantumInstance]] = None,
expectation: Optional[ExpectationBase] = None,
) -> Callable[[Iterable], np.ndarray]:
"""Get a callable function which provides the respective gradient, Hessian or QFI for given
parameter values. This callable can be used as gradient function for optimizers.
Args:
operator: The operator for which we want to get the gradient, Hessian or QFI.
bind_params: The operator parameters to which the parameter values are assigned.
grad_params: The parameters with respect to which we are taking the gradient, Hessian
or QFI. If grad_params = None, then grad_params = bind_params
backend: The quantum backend or QuantumInstance to use to evaluate the gradient,
Hessian or QFI.
expectation: The expectation converter to be used. If none is set then
`PauliExpectation()` is used.
Returns:
Function to compute a gradient, Hessian or QFI. The function
takes an iterable as argument which holds the parameter values.
"""
from ..converters import CircuitSampler
if not grad_params:
grad_params = bind_params
grad = self.convert(operator, grad_params)
if expectation is None:
expectation = PauliExpectation()
grad = expectation.convert(grad)
def gradient_fn(p_values):
p_values_dict = dict(zip(bind_params, p_values))
if not backend:
converter = grad.assign_parameters(p_values_dict)
return np.real(converter.eval())
else:
p_values_dict = {k: [v] for k, v in p_values_dict.items()}
converter = CircuitSampler(backend=backend).convert(grad, p_values_dict)
return np.real(converter.eval()[0])
return gradient_fn
@staticmethod
def parameter_expression_grad(
param_expr: ParameterExpression, param: ParameterExpression
) -> Union[ParameterExpression, float]:
"""Get the derivative of a parameter expression w.r.t. the given parameter.
Args:
param_expr: The Parameter Expression for which we compute the derivative
param: Parameter w.r.t. which we want to take the derivative
Returns:
ParameterExpression representing the gradient of param_expr w.r.t. param
"""
warnings.warn(
"The DerivativeBase.parameter_expression_grad method is deprecated as of "
"Qiskit Terra 0.18.0 and will be removed no earlier than 3 months after "
"the release date. Use the ParameterExpression.gradient method instead for "
"a direct replacement.",
DeprecationWarning,
stacklevel=2,
)
return _coeff_derivative(param_expr, param)
@classmethod
def _erase_operator_coeffs(cls, operator: OperatorBase) -> OperatorBase:
"""This method traverses an input operator and deletes all of the coefficients
Args:
operator: An operator type object.
Returns:
An operator which is equal to the input operator but whose coefficients
have all been set to 1.0
Raises:
TypeError: If unknown operator type is reached.
"""
if isinstance(operator, PrimitiveOp):
return operator / operator.coeff
op_coeff = operator.coeff # type: ignore
return (operator / op_coeff).traverse(cls._erase_operator_coeffs)
@classmethod
def _factor_coeffs_out_of_composed_op(cls, operator: OperatorBase) -> OperatorBase:
"""Factor all coefficients of ComposedOp out into a single global coefficient.
Part of the automatic differentiation logic inside of Gradient and Hessian
counts on the fact that no product or chain rules need to be computed between
operators or coefficients within a ComposedOp. To ensure this condition is met,
this function traverses an operator and replaces each ComposedOp with an equivalent
ComposedOp, but where all coefficients have been factored out and placed onto the
ComposedOp. Note that this cannot be done properly if an OperatorMeasurement contains
a SummedOp as it's primitive.
Args:
operator: The operator whose coefficients are being re-organized
Returns:
An operator equivalent to the input operator, but whose coefficients have been
reorganized
Raises:
ValueError: If an element within a ComposedOp has a primitive of type ListOp,
then it is not possible to factor all coefficients out of the ComposedOp.
"""
if isinstance(operator, ListOp) and not isinstance(operator, ComposedOp):
return operator.traverse(cls._factor_coeffs_out_of_composed_op)
if isinstance(operator, ComposedOp):
total_coeff = operator.coeff
take_norm_of_coeffs = False
for k, op in enumerate(operator.oplist):
if take_norm_of_coeffs:
total_coeff *= op.coeff * np.conj(op.coeff) # type: ignore
else:
total_coeff *= op.coeff # type: ignore
if hasattr(op, "primitive"):
prim = op.primitive # type: ignore
if isinstance(op, StateFn) and isinstance(prim, TensoredOp):
# Check if any of the coefficients in the TensoredOp is a
# ParameterExpression
for prim_op in prim.oplist:
# If a coefficient is a ParameterExpression make sure that the
# coefficients are pulled together correctly
if isinstance(prim_op.coeff, ParameterExpression):
prim_tensored = StateFn(
prim.reduce(), is_measurement=op.is_measurement, coeff=op.coeff
)
operator.oplist[k] = prim_tensored
return operator.traverse(cls._factor_coeffs_out_of_composed_op)
elif isinstance(prim, ListOp):
raise ValueError(
"This operator was not properly decomposed. "
"By this point, all operator measurements should "
"contain single operators, otherwise the coefficient "
"gradients will not be handled properly."
)
if hasattr(prim, "coeff"):
if take_norm_of_coeffs:
total_coeff *= prim._coeff * np.conj(prim._coeff)
else:
total_coeff *= prim._coeff
if isinstance(op, OperatorStateFn) and op.is_measurement:
take_norm_of_coeffs = True
return cls._erase_operator_coeffs(operator).mul(total_coeff)
else:
return operator
def _coeff_derivative(coeff, param):
if isinstance(coeff, ParameterExpression) and len(coeff.parameters) > 0:
return coeff.gradient(param)
return 0
| 43.925 | 99 | 0.643426 |
f1be8685e57b96e685ef0f775010459c86235f0a
| 7,884 |
py
|
Python
|
src/python/api/element_test.py
|
AustralianDisabilityLimited/incubator-wave
|
0858e0c60b42e2cac38aed44d8b16f9e8bcb377a
|
[
"MIT"
] | null | null | null |
src/python/api/element_test.py
|
AustralianDisabilityLimited/incubator-wave
|
0858e0c60b42e2cac38aed44d8b16f9e8bcb377a
|
[
"MIT"
] | null | null | null |
src/python/api/element_test.py
|
AustralianDisabilityLimited/incubator-wave
|
0858e0c60b42e2cac38aed44d8b16f9e8bcb377a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python2.4
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for the element module."""
import base64
import unittest
import element
import util
class TestElement(unittest.TestCase):
"""Tests for the element.Element class."""
def testProperties(self):
el = element.Element(element.Gadget.class_type,
key='value')
self.assertEquals('value', el.key)
def testFormElement(self):
el = element.Input('input')
self.assertEquals(element.Input.class_type, el.type)
self.assertEquals(el.value, '')
self.assertEquals(el.name, 'input')
def testImage(self):
image = element.Image('http://test.com/image.png', width=100, height=100)
self.assertEquals(element.Image.class_type, image.type)
self.assertEquals(image.url, 'http://test.com/image.png')
self.assertEquals(image.width, 100)
self.assertEquals(image.height, 100)
def testAttachment(self):
attachment = element.Attachment(caption='My Favorite', data='SomefakeData')
self.assertEquals(element.Attachment.class_type, attachment.type)
self.assertEquals(attachment.caption, 'My Favorite')
self.assertEquals(attachment.data, 'SomefakeData')
def testGadget(self):
gadget = element.Gadget('http://test.com/gadget.xml')
self.assertEquals(element.Gadget.class_type, gadget.type)
self.assertEquals(gadget.url, 'http://test.com/gadget.xml')
def testInstaller(self):
installer = element.Installer('http://test.com/installer.xml')
self.assertEquals(element.Installer.class_type, installer.type)
self.assertEquals(installer.manifest, 'http://test.com/installer.xml')
def testSerialize(self):
image = element.Image('http://test.com/image.png', width=100, height=100)
s = util.serialize(image)
k = s.keys()
k.sort()
# we should really only have three things to serialize
props = s['properties']
self.assertEquals(len(props), 3)
self.assertEquals(props['url'], 'http://test.com/image.png')
self.assertEquals(props['width'], 100)
self.assertEquals(props['height'], 100)
def testSerializeAttachment(self):
attachment = element.Attachment(caption='My Favorite', data='SomefakeData')
s = util.serialize(attachment)
k = s.keys()
k.sort()
# we should really have two things to serialize
props = s['properties']
self.assertEquals(len(props), 2)
self.assertEquals(props['caption'], 'My Favorite')
self.assertEquals(props['data'], base64.encodestring('SomefakeData'))
self.assertEquals(attachment.data, 'SomefakeData')
def testSerializeLine(self):
line = element.Line(element.Line.TYPE_H1, alignment=element.Line.ALIGN_LEFT)
s = util.serialize(line)
k = s.keys()
k.sort()
# we should really only have three things to serialize
props = s['properties']
self.assertEquals(len(props), 2)
self.assertEquals(props['alignment'], 'l')
self.assertEquals(props['lineType'], 'h1')
def testSerializeGadget(self):
gadget = element.Gadget('http://test.com', {'prop1': 'a', 'prop_cap': None})
s = util.serialize(gadget)
k = s.keys()
k.sort()
# we should really only have three things to serialize
props = s['properties']
self.assertEquals(len(props), 3)
self.assertEquals(props['url'], 'http://test.com')
self.assertEquals(props['prop1'], 'a')
self.assertEquals(props['prop_cap'], None)
def testGadgetElementFromJson(self):
url = 'http://www.foo.com/gadget.xml'
json = {
'type': element.Gadget.class_type,
'properties': {
'url': url,
}
}
gadget = element.Element.from_json(json)
self.assertEquals(element.Gadget.class_type, gadget.type)
self.assertEquals(url, gadget.url)
def testImageElementFromJson(self):
url = 'http://www.foo.com/image.png'
width = '32'
height = '32'
attachment_id = '2'
caption = 'Test Image'
json = {
'type': element.Image.class_type,
'properties': {
'url': url,
'width': width,
'height': height,
'attachmentId': attachment_id,
'caption': caption,
}
}
image = element.Element.from_json(json)
self.assertEquals(element.Image.class_type, image.type)
self.assertEquals(url, image.url)
self.assertEquals(width, image.width)
self.assertEquals(height, image.height)
self.assertEquals(attachment_id, image.attachmentId)
self.assertEquals(caption, image.caption)
def testAttachmentElementFromJson(self):
caption = 'fake caption'
data = 'fake data'
mime_type = 'fake mime'
attachment_id = 'fake id'
attachment_url = 'fake URL'
json = {
'type': element.Attachment.class_type,
'properties': {
'caption': caption,
'data': data,
'mimeType': mime_type,
'attachmentId': attachment_id,
'attachmentUrl': attachment_url,
}
}
attachment = element.Element.from_json(json)
self.assertEquals(element.Attachment.class_type, attachment.type)
self.assertEquals(caption, attachment.caption)
self.assertEquals(data, attachment.data)
self.assertEquals(mime_type, attachment.mimeType)
self.assertEquals(attachment_id, attachment.attachmentId)
self.assertEquals(attachment_url, attachment.attachmentUrl)
def testFormElementFromJson(self):
name = 'button'
value = 'value'
default_value = 'foo'
json = {
'type': element.Label.class_type,
'properties': {
'name': name,
'value': value,
'defaultValue': default_value,
}
}
el = element.Element.from_json(json)
self.assertEquals(element.Label.class_type, el.type)
self.assertEquals(name, el.name)
self.assertEquals(value, el.value)
def testCanInstantiate(self):
bag = [element.Check(name='check', value='value'),
element.Button(name='button', value='caption'),
element.Input(name='input', value='caption'),
element.Label(label_for='button', caption='caption'),
element.RadioButton(name='name', group='group'),
element.RadioButtonGroup(name='name', value='value'),
element.Password(name='name', value='geheim'),
element.TextArea(name='name', value='\n\n\n'),
element.Installer(manifest='test.com/installer.xml'),
element.Line(line_type='type',
indent='3',
alignment='r',
direction='d'),
element.Gadget(url='test.com/gadget.xml',
props={'key1': 'val1', 'key2': 'val2'}),
element.Image(url='test.com/image.png', width=100, height=200),
element.Attachment(caption='fake caption', data='fake data')]
types_constructed = set([type(x) for x in bag])
types_required = set(element.ALL.values())
missing_required = types_constructed.difference(types_required)
self.assertEquals(missing_required, set())
missing_constructed = types_required.difference(types_constructed)
self.assertEquals(missing_constructed, set())
if __name__ == '__main__':
unittest.main()
| 36 | 81 | 0.67174 |
2fef585f03f53a3bd585fce4cda87a28a6522376
| 861 |
py
|
Python
|
train_test_split.py
|
KennethTBarrett/Decision-Tree-Algorithm-LSCS
|
a729ad640f03d867c8e7b6117d4339f9d1f2a5c9
|
[
"MIT"
] | null | null | null |
train_test_split.py
|
KennethTBarrett/Decision-Tree-Algorithm-LSCS
|
a729ad640f03d867c8e7b6117d4339f9d1f2a5c9
|
[
"MIT"
] | null | null | null |
train_test_split.py
|
KennethTBarrett/Decision-Tree-Algorithm-LSCS
|
a729ad640f03d867c8e7b6117d4339f9d1f2a5c9
|
[
"MIT"
] | null | null | null |
import random
def train_test_split(df, test_size):
"""Randomly selects data, and splits it based upon
the specified test size.
Usage: `train_test_split(df, test_size)`
Returns training and testing data.
If a float is input as test_size, it's treated as percentage."""
# If our test_size is a float, we're going to treat it as a percentage.
# We need to calculate and round, because k below requires an integer.
if isinstance(test_size, float):
test_size = round(test_size * len(df))
# Sampling test indices.
test_idx = random.sample(population=df.index.tolist(), k=test_size)
train_df = df.drop(test_idx) # Training data / test indices dropped.
test_df = df.loc[test_idx] # Testing data / only test indices.
return train_df, test_df
| 37.434783 | 79 | 0.653891 |
db50bb9fe39090636ef96bb1a2a0d70439ad8f8d
| 1,056 |
py
|
Python
|
_sadm/plugin/os/pkg/debian/deploy.py
|
jrmsdev/pysadm
|
0d6b3f0c8d870d83ab499c8d9487ec8e3a89fc37
|
[
"BSD-3-Clause"
] | 1 |
2019-10-15T08:37:56.000Z
|
2019-10-15T08:37:56.000Z
|
_sadm/plugin/os/pkg/debian/deploy.py
|
jrmsdev/pysadm
|
0d6b3f0c8d870d83ab499c8d9487ec8e3a89fc37
|
[
"BSD-3-Clause"
] | null | null | null |
_sadm/plugin/os/pkg/debian/deploy.py
|
jrmsdev/pysadm
|
0d6b3f0c8d870d83ab499c8d9487ec8e3a89fc37
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) Jeremías Casteglione <[email protected]>
# See LICENSE file.
from os import environ
from _sadm.utils.cmd import call, callCheck
from .check import check
__all__ = ['deploy']
_cmdenv = environ.copy()
_cmdenv['DEBIAN_FRONTEND'] = 'noninteractive'
def deploy(env):
if env.settings.getboolean('os.pkg', 'update', fallback = False):
env.log('update')
_update()
for diff in check(env, action = 'remove'):
opt, pkg = diff
env.log("%s %s" % (opt, pkg))
_remove(pkg)
for diff in check(env, action = 'install'):
opt, pkg = diff
env.log("%s %s" % (opt, pkg))
_install(pkg)
for diff in check(env, action = 'prune'):
opt, pkg = diff
env.log("%s %s" % (opt, pkg))
_prune(pkg)
def _call(cmd):
callCheck(cmd, env = _cmdenv)
def _update():
_call(['apt-get', 'update'])
def _remove(pkg):
_call(['apt-get', 'autoremove', '-yy', '--purge', pkg])
def _install(pkg):
_call(['apt-get', 'install', '-yy', '--purge', '--no-install-recommends', pkg])
def _prune(pkg):
_call(['apt-get', 'autoremove', '-yy', '--purge', pkg])
| 22.956522 | 80 | 0.639205 |
72cc344455810f1c2e6680d4982e13fb6ade5d26
| 1,479 |
py
|
Python
|
dedomeno/houses/migrations/0093_auto_20170109_1615.py
|
ginopalazzo/dedomeno
|
e43df365849102016c8819b2082d2cde9109360f
|
[
"MIT"
] | 38 |
2018-03-19T12:52:17.000Z
|
2022-02-17T14:45:57.000Z
|
dedomeno/houses/migrations/0093_auto_20170109_1615.py
|
ginopalazzo/dedomeno
|
e43df365849102016c8819b2082d2cde9109360f
|
[
"MIT"
] | 7 |
2020-02-11T23:01:40.000Z
|
2020-08-06T13:30:58.000Z
|
dedomeno/houses/migrations/0093_auto_20170109_1615.py
|
ginopalazzo/dedomeno
|
e43df365849102016c8819b2082d2cde9109360f
|
[
"MIT"
] | 12 |
2019-02-23T22:10:34.000Z
|
2022-03-24T12:01:38.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-09 15:15
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('houses', '0092_office_warehouse'),
]
operations = [
migrations.CreateModel(
name='Garage',
fields=[
('property_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='houses.Property')),
('garage_type', models.CharField(blank=True, max_length=200, null=True)),
('garage_number', models.IntegerField(blank=True, null=True)),
('covered', models.NullBooleanField()),
('elevator', models.NullBooleanField()),
('automatic_door', models.NullBooleanField()),
('security_cameras', models.NullBooleanField()),
('alarm', models.NullBooleanField()),
('security_guard', models.NullBooleanField()),
],
bases=('houses.property',),
),
migrations.RenameField(
model_name='house',
old_name='garage',
new_name='has_garage',
),
migrations.RenameField(
model_name='office',
old_name='garage',
new_name='has_garage',
),
]
| 35.214286 | 194 | 0.583502 |
e39a3462673f9ed38dd61a0d2d9b7f97c656b1fc
| 1,412 |
py
|
Python
|
cbmod/currency/views/wizard/page.py
|
coinbox/coinbox-mod-currency
|
e6b2141824fb2a64e74bcb3e7068da8d9d9aaf2c
|
[
"MIT"
] | 1 |
2015-11-27T20:59:21.000Z
|
2015-11-27T20:59:21.000Z
|
cbmod/currency/views/wizard/page.py
|
coinbox/coinbox-mod-currency
|
e6b2141824fb2a64e74bcb3e7068da8d9d9aaf2c
|
[
"MIT"
] | null | null | null |
cbmod/currency/views/wizard/page.py
|
coinbox/coinbox-mod-currency
|
e6b2141824fb2a64e74bcb3e7068da8d9d9aaf2c
|
[
"MIT"
] | null | null | null |
from PySide import QtGui
import cbpos
logger = cbpos.get_logger(__name__)
from cbmod.currency.models import Currency
from cbmod.currency.views import CurrenciesPage
from cbmod.base.views.wizard import BaseWizardPage
class CurrencySetupWizardPage(BaseWizardPage):
def __init__(self, parent=None):
super(CurrencySetupWizardPage, self).__init__(parent)
message = cbpos.tr.currency_("Set up the currencies you will be using. You will be able to change them later also.")
self.message = QtGui.QLabel(message)
self.form = CurrenciesPage()
layout = QtGui.QVBoxLayout()
layout.setSpacing(10)
layout.addWidget(self.message)
layout.addWidget(self.form)
self.setLayout(layout)
def initializePage(self):
self.form.populate()
def validatePage(self):
session = cbpos.database.session()
currency = session.query(Currency).first()
if currency is None:
QtGui.QMessageBox.warning(self, cbpos.tr.currency_("No currency"),
cbpos.tr.currency_("You have to set up at least one currency"),
QtGui.QMessageBox.Ok)
return False
cbpos.config["mod.currency", "default"] = currency.id
cbpos.config.save()
return True
| 30.042553 | 124 | 0.616856 |
b3dca57161b55524b497171029abecaf1df84485
| 2,327 |
py
|
Python
|
src/model.py
|
SimardeepKaur/Workflows_Group_306
|
33471dfdd17c65dd67f02af7771320606d0c5fa7
|
[
"MIT"
] | null | null | null |
src/model.py
|
SimardeepKaur/Workflows_Group_306
|
33471dfdd17c65dd67f02af7771320606d0c5fa7
|
[
"MIT"
] | null | null | null |
src/model.py
|
SimardeepKaur/Workflows_Group_306
|
33471dfdd17c65dd67f02af7771320606d0c5fa7
|
[
"MIT"
] | null | null | null |
#author : Simardeep Kaur
#date : 25 January, 2020
""" Runs classification model on the cleaned data to get the accuarcy on the test results
Usage: src/model.py --train_file=<train_file> --test_file=<test_file>
"""
import pandas as pd
import numpy as np
import altair as alt
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from docopt import docopt
from sklearn.model_selection import GridSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import plot_confusion_matrix
import matplotlib.pyplot as plt
opt = docopt(__doc__)
def main(train_file, test_file):
data = pd.read_csv(train_file)
test_data = pd.read_csv(test_file)
try:
data.to_csv(train_file, index = False)
except:
os.makedirs(os.path.dirname(train_file))
data.to_csv(train_file, index = False)
X = data.drop(columns = 'status')
y = data['status']
X_test = test_data.drop(columns = 'status')
y_test = test_data['status']
## Checking whether X_test has training examples
try:
X_test.shape[0] == 0 and X.shape[0]
except:
print("Please check the data")
#Random forest Classification
parameters = {'max_depth':[2,4,6]}
model = RandomForestClassifier()
clf = GridSearchCV(model, parameters, cv = 5)
clf.fit(X, y)
rf_score = clf.score(X_test, y_test)
rf_data = pd.DataFrame({'rf_score':[round(rf_score, 3)]})
rf_data.to_csv(path_or_buf='data/rf_score.csv', index = False)
#Logistic Regression
lr_model = LogisticRegression(solver = 'liblinear')
lr_model.fit(X, y)
lr_Score = lr_model.score(X_test, y_test)
lr_data = pd.DataFrame({'lr_score':[round(lr_Score, 3)]})
lr_data.to_csv(path_or_buf='data/lr_score.csv', index = False)
disp = plot_confusion_matrix(clf, X_test, y_test,
cmap=plt.cm.Reds,
values_format = 'd')
plt.savefig('img/disp_rf.jpg')
disp = plot_confusion_matrix(lr_model, X_test, y_test,
cmap=plt.cm.Reds,
values_format = 'd')
plt.savefig('img/disp_lr.jpg')
if __name__ == "__main__":
main(opt["--train_file"],opt["--test_file"])
| 26.747126 | 89 | 0.691448 |
c973e2a8ca43878db2224580d1d51ca9afcf9ab8
| 292,894 |
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20170301/_inputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20170301/_inputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20170301/_inputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = [
'AddressSpaceArgs',
'ApplicationGatewayAuthenticationCertificateArgs',
'ApplicationGatewayBackendAddressArgs',
'ApplicationGatewayBackendAddressPoolArgs',
'ApplicationGatewayBackendHttpSettingsArgs',
'ApplicationGatewayConnectionDrainingArgs',
'ApplicationGatewayFirewallDisabledRuleGroupArgs',
'ApplicationGatewayFrontendIPConfigurationArgs',
'ApplicationGatewayFrontendPortArgs',
'ApplicationGatewayHttpListenerArgs',
'ApplicationGatewayIPConfigurationArgs',
'ApplicationGatewayPathRuleArgs',
'ApplicationGatewayProbeArgs',
'ApplicationGatewayRequestRoutingRuleArgs',
'ApplicationGatewaySkuArgs',
'ApplicationGatewaySslCertificateArgs',
'ApplicationGatewaySslPolicyArgs',
'ApplicationGatewayUrlPathMapArgs',
'ApplicationGatewayWebApplicationFirewallConfigurationArgs',
'BackendAddressPoolArgs',
'BgpSettingsArgs',
'DhcpOptionsArgs',
'DnsConfigArgs',
'EndpointArgs',
'ExpressRouteCircuitAuthorizationArgs',
'ExpressRouteCircuitPeeringArgs',
'ExpressRouteCircuitPeeringConfigArgs',
'ExpressRouteCircuitServiceProviderPropertiesArgs',
'ExpressRouteCircuitSkuArgs',
'ExpressRouteCircuitStatsArgs',
'FrontendIPConfigurationArgs',
'InboundNatPoolArgs',
'InboundNatRuleArgs',
'IpsecPolicyArgs',
'Ipv6ExpressRouteCircuitPeeringConfigArgs',
'LoadBalancingRuleArgs',
'LocalNetworkGatewayArgs',
'MonitorConfigArgs',
'NetworkInterfaceDnsSettingsArgs',
'NetworkInterfaceIPConfigurationArgs',
'NetworkSecurityGroupArgs',
'OutboundNatRuleArgs',
'PacketCaptureFilterArgs',
'PacketCaptureStorageLocationArgs',
'ProbeArgs',
'PublicIPAddressArgs',
'PublicIPAddressDnsSettingsArgs',
'ResourceNavigationLinkArgs',
'RouteArgs',
'RouteFilterArgs',
'RouteFilterRuleArgs',
'RouteTableArgs',
'SecurityRuleArgs',
'SubResourceArgs',
'SubnetArgs',
'VirtualNetworkGatewayArgs',
'VirtualNetworkGatewayIPConfigurationArgs',
'VirtualNetworkGatewaySkuArgs',
'VirtualNetworkPeeringArgs',
'VpnClientConfigurationArgs',
'VpnClientRevokedCertificateArgs',
'VpnClientRootCertificateArgs',
]
@pulumi.input_type
class AddressSpaceArgs:
def __init__(__self__, *,
address_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network.
:param pulumi.Input[Sequence[pulumi.Input[str]]] address_prefixes: A list of address blocks reserved for this virtual network in CIDR notation.
"""
if address_prefixes is not None:
pulumi.set(__self__, "address_prefixes", address_prefixes)
@property
@pulumi.getter(name="addressPrefixes")
def address_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of address blocks reserved for this virtual network in CIDR notation.
"""
return pulumi.get(self, "address_prefixes")
@address_prefixes.setter
def address_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "address_prefixes", value)
@pulumi.input_type
class ApplicationGatewayAuthenticationCertificateArgs:
def __init__(__self__, *,
data: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Authentication certificates of an application gateway.
:param pulumi.Input[str] data: Certificate public data.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Provisioning state of the authentication certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if data is not None:
pulumi.set(__self__, "data", data)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
"""
Certificate public data.
"""
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the authentication certificate resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class ApplicationGatewayBackendAddressArgs:
def __init__(__self__, *,
fqdn: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None):
"""
Backend address of an application gateway.
:param pulumi.Input[str] fqdn: Fully qualified domain name (FQDN).
:param pulumi.Input[str] ip_address: IP address
"""
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
@property
@pulumi.getter
def fqdn(self) -> Optional[pulumi.Input[str]]:
"""
Fully qualified domain name (FQDN).
"""
return pulumi.get(self, "fqdn")
@fqdn.setter
def fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fqdn", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
IP address
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@pulumi.input_type
class ApplicationGatewayBackendAddressPoolArgs:
def __init__(__self__, *,
backend_addresses: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressArgs']]]] = None,
backend_ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceIPConfigurationArgs']]]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Backend Address Pool of an application gateway.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressArgs']]] backend_addresses: Backend addresses
:param pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceIPConfigurationArgs']]] backend_ip_configurations: Collection of references to IPs defined in network interfaces.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Provisioning state of the backend address pool resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if backend_addresses is not None:
pulumi.set(__self__, "backend_addresses", backend_addresses)
if backend_ip_configurations is not None:
pulumi.set(__self__, "backend_ip_configurations", backend_ip_configurations)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendAddresses")
def backend_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressArgs']]]]:
"""
Backend addresses
"""
return pulumi.get(self, "backend_addresses")
@backend_addresses.setter
def backend_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressArgs']]]]):
pulumi.set(self, "backend_addresses", value)
@property
@pulumi.getter(name="backendIPConfigurations")
def backend_ip_configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceIPConfigurationArgs']]]]:
"""
Collection of references to IPs defined in network interfaces.
"""
return pulumi.get(self, "backend_ip_configurations")
@backend_ip_configurations.setter
def backend_ip_configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceIPConfigurationArgs']]]]):
pulumi.set(self, "backend_ip_configurations", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the backend address pool resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class ApplicationGatewayBackendHttpSettingsArgs:
def __init__(__self__, *,
authentication_certificates: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]] = None,
connection_draining: Optional[pulumi.Input['ApplicationGatewayConnectionDrainingArgs']] = None,
cookie_based_affinity: Optional[pulumi.Input[Union[str, 'ApplicationGatewayCookieBasedAffinity']]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
probe: Optional[pulumi.Input['SubResourceArgs']] = None,
protocol: Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
request_timeout: Optional[pulumi.Input[int]] = None):
"""
Backend address pool settings of an application gateway.
:param pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]] authentication_certificates: Array of references to application gateway authentication certificates.
:param pulumi.Input['ApplicationGatewayConnectionDrainingArgs'] connection_draining: Connection draining of the backend http settings resource.
:param pulumi.Input[Union[str, 'ApplicationGatewayCookieBasedAffinity']] cookie_based_affinity: Cookie based affinity.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[int] port: Port
:param pulumi.Input['SubResourceArgs'] probe: Probe resource of an application gateway.
:param pulumi.Input[Union[str, 'ApplicationGatewayProtocol']] protocol: Protocol.
:param pulumi.Input[str] provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[int] request_timeout: Request timeout in seconds. Application Gateway will fail the request if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400 seconds.
"""
if authentication_certificates is not None:
pulumi.set(__self__, "authentication_certificates", authentication_certificates)
if connection_draining is not None:
pulumi.set(__self__, "connection_draining", connection_draining)
if cookie_based_affinity is not None:
pulumi.set(__self__, "cookie_based_affinity", cookie_based_affinity)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if port is not None:
pulumi.set(__self__, "port", port)
if probe is not None:
pulumi.set(__self__, "probe", probe)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if request_timeout is not None:
pulumi.set(__self__, "request_timeout", request_timeout)
@property
@pulumi.getter(name="authenticationCertificates")
def authentication_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]:
"""
Array of references to application gateway authentication certificates.
"""
return pulumi.get(self, "authentication_certificates")
@authentication_certificates.setter
def authentication_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]):
pulumi.set(self, "authentication_certificates", value)
@property
@pulumi.getter(name="connectionDraining")
def connection_draining(self) -> Optional[pulumi.Input['ApplicationGatewayConnectionDrainingArgs']]:
"""
Connection draining of the backend http settings resource.
"""
return pulumi.get(self, "connection_draining")
@connection_draining.setter
def connection_draining(self, value: Optional[pulumi.Input['ApplicationGatewayConnectionDrainingArgs']]):
pulumi.set(self, "connection_draining", value)
@property
@pulumi.getter(name="cookieBasedAffinity")
def cookie_based_affinity(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewayCookieBasedAffinity']]]:
"""
Cookie based affinity.
"""
return pulumi.get(self, "cookie_based_affinity")
@cookie_based_affinity.setter
def cookie_based_affinity(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewayCookieBasedAffinity']]]):
pulumi.set(self, "cookie_based_affinity", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
Port
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def probe(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Probe resource of an application gateway.
"""
return pulumi.get(self, "probe")
@probe.setter
def probe(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "probe", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]]:
"""
Protocol.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="requestTimeout")
def request_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Request timeout in seconds. Application Gateway will fail the request if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400 seconds.
"""
return pulumi.get(self, "request_timeout")
@request_timeout.setter
def request_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "request_timeout", value)
@pulumi.input_type
class ApplicationGatewayConnectionDrainingArgs:
def __init__(__self__, *,
drain_timeout_in_sec: pulumi.Input[int],
enabled: pulumi.Input[bool]):
"""
Connection draining allows open connections to a backend server to be active for a specified time after the backend server got removed from the configuration.
:param pulumi.Input[int] drain_timeout_in_sec: The number of seconds connection draining is active. Acceptable values are from 1 second to 3600 seconds.
:param pulumi.Input[bool] enabled: Whether connection draining is enabled or not.
"""
pulumi.set(__self__, "drain_timeout_in_sec", drain_timeout_in_sec)
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="drainTimeoutInSec")
def drain_timeout_in_sec(self) -> pulumi.Input[int]:
"""
The number of seconds connection draining is active. Acceptable values are from 1 second to 3600 seconds.
"""
return pulumi.get(self, "drain_timeout_in_sec")
@drain_timeout_in_sec.setter
def drain_timeout_in_sec(self, value: pulumi.Input[int]):
pulumi.set(self, "drain_timeout_in_sec", value)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Whether connection draining is enabled or not.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class ApplicationGatewayFirewallDisabledRuleGroupArgs:
def __init__(__self__, *,
rule_group_name: pulumi.Input[str],
rules: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None):
"""
Allows to disable rules within a rule group or an entire rule group.
:param pulumi.Input[str] rule_group_name: The name of the rule group that will be disabled.
:param pulumi.Input[Sequence[pulumi.Input[int]]] rules: The list of rules that will be disabled. If null, all rules of the rule group will be disabled.
"""
pulumi.set(__self__, "rule_group_name", rule_group_name)
if rules is not None:
pulumi.set(__self__, "rules", rules)
@property
@pulumi.getter(name="ruleGroupName")
def rule_group_name(self) -> pulumi.Input[str]:
"""
The name of the rule group that will be disabled.
"""
return pulumi.get(self, "rule_group_name")
@rule_group_name.setter
def rule_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "rule_group_name", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
"""
The list of rules that will be disabled. If null, all rules of the rule group will be disabled.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "rules", value)
@pulumi.input_type
class ApplicationGatewayFrontendIPConfigurationArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_address: Optional[pulumi.Input[str]] = None,
private_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
public_ip_address: Optional[pulumi.Input['SubResourceArgs']] = None,
subnet: Optional[pulumi.Input['SubResourceArgs']] = None):
"""
Frontend IP configuration of an application gateway.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] private_ip_address: PrivateIPAddress of the network interface IP Configuration.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] private_ip_allocation_method: PrivateIP allocation method.
:param pulumi.Input[str] provisioning_state: Provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input['SubResourceArgs'] public_ip_address: Reference of the PublicIP resource.
:param pulumi.Input['SubResourceArgs'] subnet: Reference of the subnet resource.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIPAddress")
def private_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
PrivateIPAddress of the network interface IP Configuration.
"""
return pulumi.get(self, "private_ip_address")
@private_ip_address.setter
def private_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_ip_address", value)
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]:
"""
PrivateIP allocation method.
"""
return pulumi.get(self, "private_ip_allocation_method")
@private_ip_allocation_method.setter
def private_ip_allocation_method(self, value: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]):
pulumi.set(self, "private_ip_allocation_method", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Reference of the PublicIP resource.
"""
return pulumi.get(self, "public_ip_address")
@public_ip_address.setter
def public_ip_address(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "public_ip_address", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Reference of the subnet resource.
"""
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "subnet", value)
@pulumi.input_type
class ApplicationGatewayFrontendPortArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Frontend port of an application gateway.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[int] port: Frontend port
:param pulumi.Input[str] provisioning_state: Provisioning state of the frontend port resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if port is not None:
pulumi.set(__self__, "port", port)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
Frontend port
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the frontend port resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class ApplicationGatewayHttpListenerArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
frontend_ip_configuration: Optional[pulumi.Input['SubResourceArgs']] = None,
frontend_port: Optional[pulumi.Input['SubResourceArgs']] = None,
host_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
require_server_name_indication: Optional[pulumi.Input[bool]] = None,
ssl_certificate: Optional[pulumi.Input['SubResourceArgs']] = None):
"""
Http listener of an application gateway.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input['SubResourceArgs'] frontend_ip_configuration: Frontend IP configuration resource of an application gateway.
:param pulumi.Input['SubResourceArgs'] frontend_port: Frontend port resource of an application gateway.
:param pulumi.Input[str] host_name: Host name of HTTP listener.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[Union[str, 'ApplicationGatewayProtocol']] protocol: Protocol.
:param pulumi.Input[str] provisioning_state: Provisioning state of the HTTP listener resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[bool] require_server_name_indication: Applicable only if protocol is https. Enables SNI for multi-hosting.
:param pulumi.Input['SubResourceArgs'] ssl_certificate: SSL certificate resource of an application gateway.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if frontend_port is not None:
pulumi.set(__self__, "frontend_port", frontend_port)
if host_name is not None:
pulumi.set(__self__, "host_name", host_name)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if require_server_name_indication is not None:
pulumi.set(__self__, "require_server_name_indication", require_server_name_indication)
if ssl_certificate is not None:
pulumi.set(__self__, "ssl_certificate", ssl_certificate)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Frontend IP configuration resource of an application gateway.
"""
return pulumi.get(self, "frontend_ip_configuration")
@frontend_ip_configuration.setter
def frontend_ip_configuration(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "frontend_ip_configuration", value)
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Frontend port resource of an application gateway.
"""
return pulumi.get(self, "frontend_port")
@frontend_port.setter
def frontend_port(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "frontend_port", value)
@property
@pulumi.getter(name="hostName")
def host_name(self) -> Optional[pulumi.Input[str]]:
"""
Host name of HTTP listener.
"""
return pulumi.get(self, "host_name")
@host_name.setter
def host_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_name", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]]:
"""
Protocol.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the HTTP listener resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="requireServerNameIndication")
def require_server_name_indication(self) -> Optional[pulumi.Input[bool]]:
"""
Applicable only if protocol is https. Enables SNI for multi-hosting.
"""
return pulumi.get(self, "require_server_name_indication")
@require_server_name_indication.setter
def require_server_name_indication(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "require_server_name_indication", value)
@property
@pulumi.getter(name="sslCertificate")
def ssl_certificate(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
SSL certificate resource of an application gateway.
"""
return pulumi.get(self, "ssl_certificate")
@ssl_certificate.setter
def ssl_certificate(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "ssl_certificate", value)
@pulumi.input_type
class ApplicationGatewayIPConfigurationArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
subnet: Optional[pulumi.Input['SubResourceArgs']] = None):
"""
IP configuration of an application gateway. Currently 1 public and 1 private IP configuration is allowed.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Provisioning state of the application gateway subnet resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input['SubResourceArgs'] subnet: Reference of the subnet resource. A subnet from where application gateway gets its private address.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the application gateway subnet resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Reference of the subnet resource. A subnet from where application gateway gets its private address.
"""
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "subnet", value)
@pulumi.input_type
class ApplicationGatewayPathRuleArgs:
def __init__(__self__, *,
backend_address_pool: Optional[pulumi.Input['SubResourceArgs']] = None,
backend_http_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
paths: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Path rule of URL path map of an application gateway.
:param pulumi.Input['SubResourceArgs'] backend_address_pool: Backend address pool resource of URL path map.
:param pulumi.Input['SubResourceArgs'] backend_http_settings: Backend http settings resource of URL path map.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] paths: Path rules of URL path map.
:param pulumi.Input[str] provisioning_state: Path rule of URL path map resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if backend_address_pool is not None:
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
if backend_http_settings is not None:
pulumi.set(__self__, "backend_http_settings", backend_http_settings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if paths is not None:
pulumi.set(__self__, "paths", paths)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Backend address pool resource of URL path map.
"""
return pulumi.get(self, "backend_address_pool")
@backend_address_pool.setter
def backend_address_pool(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_address_pool", value)
@property
@pulumi.getter(name="backendHttpSettings")
def backend_http_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Backend http settings resource of URL path map.
"""
return pulumi.get(self, "backend_http_settings")
@backend_http_settings.setter
def backend_http_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_http_settings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Path rules of URL path map.
"""
return pulumi.get(self, "paths")
@paths.setter
def paths(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "paths", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Path rule of URL path map resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class ApplicationGatewayProbeArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
host: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
interval: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
timeout: Optional[pulumi.Input[int]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None):
"""
Probe of the application gateway.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] host: Host name to send the probe to.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] interval: The probing interval in seconds. This is the time interval between two consecutive probes. Acceptable values are from 1 second to 86400 seconds.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] path: Relative path of probe. Valid path starts from '/'. Probe is sent to <Protocol>://<host>:<port><path>
:param pulumi.Input[Union[str, 'ApplicationGatewayProtocol']] protocol: Protocol.
:param pulumi.Input[str] provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[int] timeout: the probe timeout in seconds. Probe marked as failed if valid response is not received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:param pulumi.Input[int] unhealthy_threshold: The probe retry count. Backend server is marked down after consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second to 20.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if host is not None:
pulumi.set(__self__, "host", host)
if id is not None:
pulumi.set(__self__, "id", id)
if interval is not None:
pulumi.set(__self__, "interval", interval)
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
Host name to send the probe to.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def interval(self) -> Optional[pulumi.Input[int]]:
"""
The probing interval in seconds. This is the time interval between two consecutive probes. Acceptable values are from 1 second to 86400 seconds.
"""
return pulumi.get(self, "interval")
@interval.setter
def interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
Relative path of probe. Valid path starts from '/'. Probe is sent to <Protocol>://<host>:<port><path>
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]]:
"""
Protocol.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewayProtocol']]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter
def timeout(self) -> Optional[pulumi.Input[int]]:
"""
the probe timeout in seconds. Probe marked as failed if valid response is not received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
The probe retry count. Backend server is marked down after consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second to 20.
"""
return pulumi.get(self, "unhealthy_threshold")
@unhealthy_threshold.setter
def unhealthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "unhealthy_threshold", value)
@pulumi.input_type
class ApplicationGatewayRequestRoutingRuleArgs:
def __init__(__self__, *,
backend_address_pool: Optional[pulumi.Input['SubResourceArgs']] = None,
backend_http_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
http_listener: Optional[pulumi.Input['SubResourceArgs']] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
rule_type: Optional[pulumi.Input[Union[str, 'ApplicationGatewayRequestRoutingRuleType']]] = None,
url_path_map: Optional[pulumi.Input['SubResourceArgs']] = None):
"""
Request routing rule of an application gateway.
:param pulumi.Input['SubResourceArgs'] backend_address_pool: Backend address pool resource of the application gateway.
:param pulumi.Input['SubResourceArgs'] backend_http_settings: Frontend port resource of the application gateway.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input['SubResourceArgs'] http_listener: Http listener resource of the application gateway.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Provisioning state of the request routing rule resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[Union[str, 'ApplicationGatewayRequestRoutingRuleType']] rule_type: Rule type.
:param pulumi.Input['SubResourceArgs'] url_path_map: URL path map resource of the application gateway.
"""
if backend_address_pool is not None:
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
if backend_http_settings is not None:
pulumi.set(__self__, "backend_http_settings", backend_http_settings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if http_listener is not None:
pulumi.set(__self__, "http_listener", http_listener)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if rule_type is not None:
pulumi.set(__self__, "rule_type", rule_type)
if url_path_map is not None:
pulumi.set(__self__, "url_path_map", url_path_map)
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Backend address pool resource of the application gateway.
"""
return pulumi.get(self, "backend_address_pool")
@backend_address_pool.setter
def backend_address_pool(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_address_pool", value)
@property
@pulumi.getter(name="backendHttpSettings")
def backend_http_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Frontend port resource of the application gateway.
"""
return pulumi.get(self, "backend_http_settings")
@backend_http_settings.setter
def backend_http_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_http_settings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="httpListener")
def http_listener(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Http listener resource of the application gateway.
"""
return pulumi.get(self, "http_listener")
@http_listener.setter
def http_listener(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "http_listener", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the request routing rule resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="ruleType")
def rule_type(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewayRequestRoutingRuleType']]]:
"""
Rule type.
"""
return pulumi.get(self, "rule_type")
@rule_type.setter
def rule_type(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewayRequestRoutingRuleType']]]):
pulumi.set(self, "rule_type", value)
@property
@pulumi.getter(name="urlPathMap")
def url_path_map(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
URL path map resource of the application gateway.
"""
return pulumi.get(self, "url_path_map")
@url_path_map.setter
def url_path_map(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "url_path_map", value)
@pulumi.input_type
class ApplicationGatewaySkuArgs:
def __init__(__self__, *,
capacity: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[Union[str, 'ApplicationGatewaySkuName']]] = None,
tier: Optional[pulumi.Input[Union[str, 'ApplicationGatewayTier']]] = None):
"""
SKU of an application gateway
:param pulumi.Input[int] capacity: Capacity (instance count) of an application gateway.
:param pulumi.Input[Union[str, 'ApplicationGatewaySkuName']] name: Name of an application gateway SKU.
:param pulumi.Input[Union[str, 'ApplicationGatewayTier']] tier: Tier of an application gateway.
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
Capacity (instance count) of an application gateway.
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewaySkuName']]]:
"""
Name of an application gateway SKU.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewaySkuName']]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[Union[str, 'ApplicationGatewayTier']]]:
"""
Tier of an application gateway.
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[Union[str, 'ApplicationGatewayTier']]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class ApplicationGatewaySslCertificateArgs:
def __init__(__self__, *,
data: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
public_cert_data: Optional[pulumi.Input[str]] = None):
"""
SSL certificates of an application gateway.
:param pulumi.Input[str] data: Base-64 encoded pfx certificate. Only applicable in PUT Request.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] password: Password for the pfx file specified in data. Only applicable in PUT request.
:param pulumi.Input[str] provisioning_state: Provisioning state of the SSL certificate resource Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in data. Only applicable in GET request.
"""
if data is not None:
pulumi.set(__self__, "data", data)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if password is not None:
pulumi.set(__self__, "password", password)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_cert_data is not None:
pulumi.set(__self__, "public_cert_data", public_cert_data)
@property
@pulumi.getter
def data(self) -> Optional[pulumi.Input[str]]:
"""
Base-64 encoded pfx certificate. Only applicable in PUT Request.
"""
return pulumi.get(self, "data")
@data.setter
def data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Password for the pfx file specified in data. Only applicable in PUT request.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the SSL certificate resource Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="publicCertData")
def public_cert_data(self) -> Optional[pulumi.Input[str]]:
"""
Base-64 encoded Public cert data corresponding to pfx specified in data. Only applicable in GET request.
"""
return pulumi.get(self, "public_cert_data")
@public_cert_data.setter
def public_cert_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_cert_data", value)
@pulumi.input_type
class ApplicationGatewaySslPolicyArgs:
def __init__(__self__, *,
disabled_ssl_protocols: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'ApplicationGatewaySslProtocol']]]]] = None):
"""
Application gateway SSL policy.
:param pulumi.Input[Sequence[pulumi.Input[Union[str, 'ApplicationGatewaySslProtocol']]]] disabled_ssl_protocols: SSL protocols to be disabled on application gateway.
"""
if disabled_ssl_protocols is not None:
pulumi.set(__self__, "disabled_ssl_protocols", disabled_ssl_protocols)
@property
@pulumi.getter(name="disabledSslProtocols")
def disabled_ssl_protocols(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'ApplicationGatewaySslProtocol']]]]]:
"""
SSL protocols to be disabled on application gateway.
"""
return pulumi.get(self, "disabled_ssl_protocols")
@disabled_ssl_protocols.setter
def disabled_ssl_protocols(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'ApplicationGatewaySslProtocol']]]]]):
pulumi.set(self, "disabled_ssl_protocols", value)
@pulumi.input_type
class ApplicationGatewayUrlPathMapArgs:
def __init__(__self__, *,
default_backend_address_pool: Optional[pulumi.Input['SubResourceArgs']] = None,
default_backend_http_settings: Optional[pulumi.Input['SubResourceArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path_rules: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayPathRuleArgs']]]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
UrlPathMaps give a url path to the backend mapping information for PathBasedRouting.
:param pulumi.Input['SubResourceArgs'] default_backend_address_pool: Default backend address pool resource of URL path map.
:param pulumi.Input['SubResourceArgs'] default_backend_http_settings: Default backend http settings resource of URL path map.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayPathRuleArgs']]] path_rules: Path rule of URL path map resource.
:param pulumi.Input[str] provisioning_state: Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if default_backend_address_pool is not None:
pulumi.set(__self__, "default_backend_address_pool", default_backend_address_pool)
if default_backend_http_settings is not None:
pulumi.set(__self__, "default_backend_http_settings", default_backend_http_settings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if path_rules is not None:
pulumi.set(__self__, "path_rules", path_rules)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="defaultBackendAddressPool")
def default_backend_address_pool(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Default backend address pool resource of URL path map.
"""
return pulumi.get(self, "default_backend_address_pool")
@default_backend_address_pool.setter
def default_backend_address_pool(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "default_backend_address_pool", value)
@property
@pulumi.getter(name="defaultBackendHttpSettings")
def default_backend_http_settings(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
Default backend http settings resource of URL path map.
"""
return pulumi.get(self, "default_backend_http_settings")
@default_backend_http_settings.setter
def default_backend_http_settings(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "default_backend_http_settings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pathRules")
def path_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayPathRuleArgs']]]]:
"""
Path rule of URL path map resource.
"""
return pulumi.get(self, "path_rules")
@path_rules.setter
def path_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayPathRuleArgs']]]]):
pulumi.set(self, "path_rules", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Provisioning state of the backend http settings resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class ApplicationGatewayWebApplicationFirewallConfigurationArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool],
firewall_mode: pulumi.Input[Union[str, 'ApplicationGatewayFirewallMode']],
rule_set_type: pulumi.Input[str],
rule_set_version: pulumi.Input[str],
disabled_rule_groups: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayFirewallDisabledRuleGroupArgs']]]] = None):
"""
Application gateway web application firewall configuration.
:param pulumi.Input[bool] enabled: Whether the web application firewall is enabled or not.
:param pulumi.Input[Union[str, 'ApplicationGatewayFirewallMode']] firewall_mode: Web application firewall mode.
:param pulumi.Input[str] rule_set_type: The type of the web application firewall rule set. Possible values are: 'OWASP'.
:param pulumi.Input[str] rule_set_version: The version of the rule set type.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayFirewallDisabledRuleGroupArgs']]] disabled_rule_groups: The disabled rule groups.
"""
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "firewall_mode", firewall_mode)
pulumi.set(__self__, "rule_set_type", rule_set_type)
pulumi.set(__self__, "rule_set_version", rule_set_version)
if disabled_rule_groups is not None:
pulumi.set(__self__, "disabled_rule_groups", disabled_rule_groups)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Whether the web application firewall is enabled or not.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="firewallMode")
def firewall_mode(self) -> pulumi.Input[Union[str, 'ApplicationGatewayFirewallMode']]:
"""
Web application firewall mode.
"""
return pulumi.get(self, "firewall_mode")
@firewall_mode.setter
def firewall_mode(self, value: pulumi.Input[Union[str, 'ApplicationGatewayFirewallMode']]):
pulumi.set(self, "firewall_mode", value)
@property
@pulumi.getter(name="ruleSetType")
def rule_set_type(self) -> pulumi.Input[str]:
"""
The type of the web application firewall rule set. Possible values are: 'OWASP'.
"""
return pulumi.get(self, "rule_set_type")
@rule_set_type.setter
def rule_set_type(self, value: pulumi.Input[str]):
pulumi.set(self, "rule_set_type", value)
@property
@pulumi.getter(name="ruleSetVersion")
def rule_set_version(self) -> pulumi.Input[str]:
"""
The version of the rule set type.
"""
return pulumi.get(self, "rule_set_version")
@rule_set_version.setter
def rule_set_version(self, value: pulumi.Input[str]):
pulumi.set(self, "rule_set_version", value)
@property
@pulumi.getter(name="disabledRuleGroups")
def disabled_rule_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayFirewallDisabledRuleGroupArgs']]]]:
"""
The disabled rule groups.
"""
return pulumi.get(self, "disabled_rule_groups")
@disabled_rule_groups.setter
def disabled_rule_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayFirewallDisabledRuleGroupArgs']]]]):
pulumi.set(self, "disabled_rule_groups", value)
@pulumi.input_type
class BackendAddressPoolArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Pool of backend IP addresses.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Get provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Get provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class BgpSettingsArgs:
def __init__(__self__, *,
asn: Optional[pulumi.Input[float]] = None,
bgp_peering_address: Optional[pulumi.Input[str]] = None,
peer_weight: Optional[pulumi.Input[int]] = None):
"""
BGP settings details
:param pulumi.Input[float] asn: The BGP speaker's ASN.
:param pulumi.Input[str] bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker.
:param pulumi.Input[int] peer_weight: The weight added to routes learned from this BGP speaker.
"""
if asn is not None:
pulumi.set(__self__, "asn", asn)
if bgp_peering_address is not None:
pulumi.set(__self__, "bgp_peering_address", bgp_peering_address)
if peer_weight is not None:
pulumi.set(__self__, "peer_weight", peer_weight)
@property
@pulumi.getter
def asn(self) -> Optional[pulumi.Input[float]]:
"""
The BGP speaker's ASN.
"""
return pulumi.get(self, "asn")
@asn.setter
def asn(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "asn", value)
@property
@pulumi.getter(name="bgpPeeringAddress")
def bgp_peering_address(self) -> Optional[pulumi.Input[str]]:
"""
The BGP peering address and BGP identifier of this BGP speaker.
"""
return pulumi.get(self, "bgp_peering_address")
@bgp_peering_address.setter
def bgp_peering_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bgp_peering_address", value)
@property
@pulumi.getter(name="peerWeight")
def peer_weight(self) -> Optional[pulumi.Input[int]]:
"""
The weight added to routes learned from this BGP speaker.
"""
return pulumi.get(self, "peer_weight")
@peer_weight.setter
def peer_weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "peer_weight", value)
@pulumi.input_type
class DhcpOptionsArgs:
def __init__(__self__, *,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
DhcpOptions contains an array of DNS servers available to VMs deployed in the virtual network. Standard DHCP option for a subnet overrides VNET DHCP options.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: The list of DNS servers IP addresses.
"""
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of DNS servers IP addresses.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@pulumi.input_type
class DnsConfigArgs:
def __init__(__self__, *,
fqdn: Optional[pulumi.Input[str]] = None,
relative_name: Optional[pulumi.Input[str]] = None,
ttl: Optional[pulumi.Input[float]] = None):
"""
Class containing DNS settings in a Traffic Manager profile.
:param pulumi.Input[str] fqdn: Gets or sets the fully-qualified domain name (FQDN) of the Traffic Manager profile. This is formed from the concatenation of the RelativeName with the DNS domain used by Azure Traffic Manager.
:param pulumi.Input[str] relative_name: Gets or sets the relative DNS name provided by this Traffic Manager profile. This value is combined with the DNS domain name used by Azure Traffic Manager to form the fully-qualified domain name (FQDN) of the profile.
:param pulumi.Input[float] ttl: Gets or sets the DNS Time-To-Live (TTL), in seconds. This informs the local DNS resolvers and DNS clients how long to cache DNS responses provided by this Traffic Manager profile.
"""
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if relative_name is not None:
pulumi.set(__self__, "relative_name", relative_name)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter
def fqdn(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the fully-qualified domain name (FQDN) of the Traffic Manager profile. This is formed from the concatenation of the RelativeName with the DNS domain used by Azure Traffic Manager.
"""
return pulumi.get(self, "fqdn")
@fqdn.setter
def fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fqdn", value)
@property
@pulumi.getter(name="relativeName")
def relative_name(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the relative DNS name provided by this Traffic Manager profile. This value is combined with the DNS domain name used by Azure Traffic Manager to form the fully-qualified domain name (FQDN) of the profile.
"""
return pulumi.get(self, "relative_name")
@relative_name.setter
def relative_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "relative_name", value)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[float]]:
"""
Gets or sets the DNS Time-To-Live (TTL), in seconds. This informs the local DNS resolvers and DNS clients how long to cache DNS responses provided by this Traffic Manager profile.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "ttl", value)
@pulumi.input_type
class EndpointArgs:
def __init__(__self__, *,
endpoint_location: Optional[pulumi.Input[str]] = None,
endpoint_monitor_status: Optional[pulumi.Input[str]] = None,
endpoint_status: Optional[pulumi.Input[str]] = None,
geo_mapping: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
id: Optional[pulumi.Input[str]] = None,
min_child_endpoints: Optional[pulumi.Input[float]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[float]] = None,
target: Optional[pulumi.Input[str]] = None,
target_resource_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
weight: Optional[pulumi.Input[float]] = None):
"""
Class representing a Traffic Manager endpoint.
:param pulumi.Input[str] endpoint_location: Specifies the location of the external or nested endpoints when using the ‘Performance’ traffic routing method.
:param pulumi.Input[str] endpoint_monitor_status: Gets or sets the monitoring status of the endpoint.
:param pulumi.Input[str] endpoint_status: Gets or sets the status of the endpoint.. If the endpoint is Enabled, it is probed for endpoint health and is included in the traffic routing method. Possible values are 'Enabled' and 'Disabled'.
:param pulumi.Input[Sequence[pulumi.Input[str]]] geo_mapping: Gets or sets the list of countries/regions mapped to this endpoint when using the ‘Geographic’ traffic routing method. Please consult Traffic Manager Geographic documentation for a full list of accepted values.
:param pulumi.Input[str] id: Gets or sets the ID of the Traffic Manager endpoint.
:param pulumi.Input[float] min_child_endpoints: Gets or sets the minimum number of endpoints that must be available in the child profile in order for the parent profile to be considered available. Only applicable to endpoint of type 'NestedEndpoints'.
:param pulumi.Input[str] name: Gets or sets the name of the Traffic Manager endpoint.
:param pulumi.Input[float] priority: Gets or sets the priority of this endpoint when using the ‘Priority’ traffic routing method. Possible values are from 1 to 1000, lower values represent higher priority. This is an optional parameter. If specified, it must be specified on all endpoints, and no two endpoints can share the same priority value.
:param pulumi.Input[str] target: Gets or sets the fully-qualified DNS name of the endpoint. Traffic Manager returns this value in DNS responses to direct traffic to this endpoint.
:param pulumi.Input[str] target_resource_id: Gets or sets the Azure Resource URI of the of the endpoint. Not applicable to endpoints of type 'ExternalEndpoints'.
:param pulumi.Input[str] type: Gets or sets the endpoint type of the Traffic Manager endpoint.
:param pulumi.Input[float] weight: Gets or sets the weight of this endpoint when using the 'Weighted' traffic routing method. Possible values are from 1 to 1000.
"""
if endpoint_location is not None:
pulumi.set(__self__, "endpoint_location", endpoint_location)
if endpoint_monitor_status is not None:
pulumi.set(__self__, "endpoint_monitor_status", endpoint_monitor_status)
if endpoint_status is not None:
pulumi.set(__self__, "endpoint_status", endpoint_status)
if geo_mapping is not None:
pulumi.set(__self__, "geo_mapping", geo_mapping)
if id is not None:
pulumi.set(__self__, "id", id)
if min_child_endpoints is not None:
pulumi.set(__self__, "min_child_endpoints", min_child_endpoints)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if target is not None:
pulumi.set(__self__, "target", target)
if target_resource_id is not None:
pulumi.set(__self__, "target_resource_id", target_resource_id)
if type is not None:
pulumi.set(__self__, "type", type)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="endpointLocation")
def endpoint_location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the location of the external or nested endpoints when using the ‘Performance’ traffic routing method.
"""
return pulumi.get(self, "endpoint_location")
@endpoint_location.setter
def endpoint_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_location", value)
@property
@pulumi.getter(name="endpointMonitorStatus")
def endpoint_monitor_status(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the monitoring status of the endpoint.
"""
return pulumi.get(self, "endpoint_monitor_status")
@endpoint_monitor_status.setter
def endpoint_monitor_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_monitor_status", value)
@property
@pulumi.getter(name="endpointStatus")
def endpoint_status(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the status of the endpoint.. If the endpoint is Enabled, it is probed for endpoint health and is included in the traffic routing method. Possible values are 'Enabled' and 'Disabled'.
"""
return pulumi.get(self, "endpoint_status")
@endpoint_status.setter
def endpoint_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_status", value)
@property
@pulumi.getter(name="geoMapping")
def geo_mapping(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Gets or sets the list of countries/regions mapped to this endpoint when using the ‘Geographic’ traffic routing method. Please consult Traffic Manager Geographic documentation for a full list of accepted values.
"""
return pulumi.get(self, "geo_mapping")
@geo_mapping.setter
def geo_mapping(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "geo_mapping", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the ID of the Traffic Manager endpoint.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="minChildEndpoints")
def min_child_endpoints(self) -> Optional[pulumi.Input[float]]:
"""
Gets or sets the minimum number of endpoints that must be available in the child profile in order for the parent profile to be considered available. Only applicable to endpoint of type 'NestedEndpoints'.
"""
return pulumi.get(self, "min_child_endpoints")
@min_child_endpoints.setter
def min_child_endpoints(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "min_child_endpoints", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the name of the Traffic Manager endpoint.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[float]]:
"""
Gets or sets the priority of this endpoint when using the ‘Priority’ traffic routing method. Possible values are from 1 to 1000, lower values represent higher priority. This is an optional parameter. If specified, it must be specified on all endpoints, and no two endpoints can share the same priority value.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the fully-qualified DNS name of the endpoint. Traffic Manager returns this value in DNS responses to direct traffic to this endpoint.
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="targetResourceId")
def target_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the Azure Resource URI of the of the endpoint. Not applicable to endpoints of type 'ExternalEndpoints'.
"""
return pulumi.get(self, "target_resource_id")
@target_resource_id.setter
def target_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_resource_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the endpoint type of the Traffic Manager endpoint.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[float]]:
"""
Gets or sets the weight of this endpoint when using the 'Weighted' traffic routing method. Possible values are from 1 to 1000.
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class ExpressRouteCircuitAuthorizationArgs:
def __init__(__self__, *,
authorization_key: Optional[pulumi.Input[str]] = None,
authorization_use_status: Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Authorization in an ExpressRouteCircuit resource.
:param pulumi.Input[str] authorization_key: The authorization key.
:param pulumi.Input[Union[str, 'AuthorizationUseStatus']] authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if authorization_key is not None:
pulumi.set(__self__, "authorization_key", authorization_key)
if authorization_use_status is not None:
pulumi.set(__self__, "authorization_use_status", authorization_use_status)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="authorizationKey")
def authorization_key(self) -> Optional[pulumi.Input[str]]:
"""
The authorization key.
"""
return pulumi.get(self, "authorization_key")
@authorization_key.setter
def authorization_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorization_key", value)
@property
@pulumi.getter(name="authorizationUseStatus")
def authorization_use_status(self) -> Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]]:
"""
AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
"""
return pulumi.get(self, "authorization_use_status")
@authorization_use_status.setter
def authorization_use_status(self, value: Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]]):
pulumi.set(self, "authorization_use_status", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class ExpressRouteCircuitPeeringArgs:
def __init__(__self__, *,
azure_asn: Optional[pulumi.Input[int]] = None,
gateway_manager_etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
ipv6_peering_config: Optional[pulumi.Input['Ipv6ExpressRouteCircuitPeeringConfigArgs']] = None,
last_modified_by: Optional[pulumi.Input[str]] = None,
microsoft_peering_config: Optional[pulumi.Input['ExpressRouteCircuitPeeringConfigArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
peer_asn: Optional[pulumi.Input[int]] = None,
peering_type: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringType']]] = None,
primary_azure_port: Optional[pulumi.Input[str]] = None,
primary_peer_address_prefix: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
route_filter: Optional[pulumi.Input['RouteFilterArgs']] = None,
secondary_azure_port: Optional[pulumi.Input[str]] = None,
secondary_peer_address_prefix: Optional[pulumi.Input[str]] = None,
shared_key: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']]] = None,
stats: Optional[pulumi.Input['ExpressRouteCircuitStatsArgs']] = None,
vlan_id: Optional[pulumi.Input[int]] = None):
"""
Peering in an ExpressRouteCircuit resource.
:param pulumi.Input[int] azure_asn: The Azure ASN.
:param pulumi.Input[str] gateway_manager_etag: The GatewayManager Etag.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input['Ipv6ExpressRouteCircuitPeeringConfigArgs'] ipv6_peering_config: The IPv6 peering configuration.
:param pulumi.Input[str] last_modified_by: Gets whether the provider or the customer last modified the peering.
:param pulumi.Input['ExpressRouteCircuitPeeringConfigArgs'] microsoft_peering_config: The Microsoft peering configuration.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[int] peer_asn: The peer ASN.
:param pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringType']] peering_type: The PeeringType. Possible values are: 'AzurePublicPeering', 'AzurePrivatePeering', and 'MicrosoftPeering'.
:param pulumi.Input[str] primary_azure_port: The primary port.
:param pulumi.Input[str] primary_peer_address_prefix: The primary address prefix.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input['RouteFilterArgs'] route_filter: The reference of the RouteFilter resource.
:param pulumi.Input[str] secondary_azure_port: The secondary port.
:param pulumi.Input[str] secondary_peer_address_prefix: The secondary address prefix.
:param pulumi.Input[str] shared_key: The shared key.
:param pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']] state: The state of peering. Possible values are: 'Disabled' and 'Enabled'
:param pulumi.Input['ExpressRouteCircuitStatsArgs'] stats: Gets peering stats.
:param pulumi.Input[int] vlan_id: The VLAN ID.
"""
if azure_asn is not None:
pulumi.set(__self__, "azure_asn", azure_asn)
if gateway_manager_etag is not None:
pulumi.set(__self__, "gateway_manager_etag", gateway_manager_etag)
if id is not None:
pulumi.set(__self__, "id", id)
if ipv6_peering_config is not None:
pulumi.set(__self__, "ipv6_peering_config", ipv6_peering_config)
if last_modified_by is not None:
pulumi.set(__self__, "last_modified_by", last_modified_by)
if microsoft_peering_config is not None:
pulumi.set(__self__, "microsoft_peering_config", microsoft_peering_config)
if name is not None:
pulumi.set(__self__, "name", name)
if peer_asn is not None:
pulumi.set(__self__, "peer_asn", peer_asn)
if peering_type is not None:
pulumi.set(__self__, "peering_type", peering_type)
if primary_azure_port is not None:
pulumi.set(__self__, "primary_azure_port", primary_azure_port)
if primary_peer_address_prefix is not None:
pulumi.set(__self__, "primary_peer_address_prefix", primary_peer_address_prefix)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if route_filter is not None:
pulumi.set(__self__, "route_filter", route_filter)
if secondary_azure_port is not None:
pulumi.set(__self__, "secondary_azure_port", secondary_azure_port)
if secondary_peer_address_prefix is not None:
pulumi.set(__self__, "secondary_peer_address_prefix", secondary_peer_address_prefix)
if shared_key is not None:
pulumi.set(__self__, "shared_key", shared_key)
if state is not None:
pulumi.set(__self__, "state", state)
if stats is not None:
pulumi.set(__self__, "stats", stats)
if vlan_id is not None:
pulumi.set(__self__, "vlan_id", vlan_id)
@property
@pulumi.getter(name="azureASN")
def azure_asn(self) -> Optional[pulumi.Input[int]]:
"""
The Azure ASN.
"""
return pulumi.get(self, "azure_asn")
@azure_asn.setter
def azure_asn(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "azure_asn", value)
@property
@pulumi.getter(name="gatewayManagerEtag")
def gateway_manager_etag(self) -> Optional[pulumi.Input[str]]:
"""
The GatewayManager Etag.
"""
return pulumi.get(self, "gateway_manager_etag")
@gateway_manager_etag.setter
def gateway_manager_etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway_manager_etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="ipv6PeeringConfig")
def ipv6_peering_config(self) -> Optional[pulumi.Input['Ipv6ExpressRouteCircuitPeeringConfigArgs']]:
"""
The IPv6 peering configuration.
"""
return pulumi.get(self, "ipv6_peering_config")
@ipv6_peering_config.setter
def ipv6_peering_config(self, value: Optional[pulumi.Input['Ipv6ExpressRouteCircuitPeeringConfigArgs']]):
pulumi.set(self, "ipv6_peering_config", value)
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> Optional[pulumi.Input[str]]:
"""
Gets whether the provider or the customer last modified the peering.
"""
return pulumi.get(self, "last_modified_by")
@last_modified_by.setter
def last_modified_by(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_modified_by", value)
@property
@pulumi.getter(name="microsoftPeeringConfig")
def microsoft_peering_config(self) -> Optional[pulumi.Input['ExpressRouteCircuitPeeringConfigArgs']]:
"""
The Microsoft peering configuration.
"""
return pulumi.get(self, "microsoft_peering_config")
@microsoft_peering_config.setter
def microsoft_peering_config(self, value: Optional[pulumi.Input['ExpressRouteCircuitPeeringConfigArgs']]):
pulumi.set(self, "microsoft_peering_config", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="peerASN")
def peer_asn(self) -> Optional[pulumi.Input[int]]:
"""
The peer ASN.
"""
return pulumi.get(self, "peer_asn")
@peer_asn.setter
def peer_asn(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "peer_asn", value)
@property
@pulumi.getter(name="peeringType")
def peering_type(self) -> Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringType']]]:
"""
The PeeringType. Possible values are: 'AzurePublicPeering', 'AzurePrivatePeering', and 'MicrosoftPeering'.
"""
return pulumi.get(self, "peering_type")
@peering_type.setter
def peering_type(self, value: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringType']]]):
pulumi.set(self, "peering_type", value)
@property
@pulumi.getter(name="primaryAzurePort")
def primary_azure_port(self) -> Optional[pulumi.Input[str]]:
"""
The primary port.
"""
return pulumi.get(self, "primary_azure_port")
@primary_azure_port.setter
def primary_azure_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_azure_port", value)
@property
@pulumi.getter(name="primaryPeerAddressPrefix")
def primary_peer_address_prefix(self) -> Optional[pulumi.Input[str]]:
"""
The primary address prefix.
"""
return pulumi.get(self, "primary_peer_address_prefix")
@primary_peer_address_prefix.setter
def primary_peer_address_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_peer_address_prefix", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="routeFilter")
def route_filter(self) -> Optional[pulumi.Input['RouteFilterArgs']]:
"""
The reference of the RouteFilter resource.
"""
return pulumi.get(self, "route_filter")
@route_filter.setter
def route_filter(self, value: Optional[pulumi.Input['RouteFilterArgs']]):
pulumi.set(self, "route_filter", value)
@property
@pulumi.getter(name="secondaryAzurePort")
def secondary_azure_port(self) -> Optional[pulumi.Input[str]]:
"""
The secondary port.
"""
return pulumi.get(self, "secondary_azure_port")
@secondary_azure_port.setter
def secondary_azure_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secondary_azure_port", value)
@property
@pulumi.getter(name="secondaryPeerAddressPrefix")
def secondary_peer_address_prefix(self) -> Optional[pulumi.Input[str]]:
"""
The secondary address prefix.
"""
return pulumi.get(self, "secondary_peer_address_prefix")
@secondary_peer_address_prefix.setter
def secondary_peer_address_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secondary_peer_address_prefix", value)
@property
@pulumi.getter(name="sharedKey")
def shared_key(self) -> Optional[pulumi.Input[str]]:
"""
The shared key.
"""
return pulumi.get(self, "shared_key")
@shared_key.setter
def shared_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "shared_key", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']]]:
"""
The state of peering. Possible values are: 'Disabled' and 'Enabled'
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter
def stats(self) -> Optional[pulumi.Input['ExpressRouteCircuitStatsArgs']]:
"""
Gets peering stats.
"""
return pulumi.get(self, "stats")
@stats.setter
def stats(self, value: Optional[pulumi.Input['ExpressRouteCircuitStatsArgs']]):
pulumi.set(self, "stats", value)
@property
@pulumi.getter(name="vlanId")
def vlan_id(self) -> Optional[pulumi.Input[int]]:
"""
The VLAN ID.
"""
return pulumi.get(self, "vlan_id")
@vlan_id.setter
def vlan_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vlan_id", value)
@pulumi.input_type
class ExpressRouteCircuitPeeringConfigArgs:
def __init__(__self__, *,
advertised_public_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
advertised_public_prefixes_state: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringAdvertisedPublicPrefixState']]] = None,
customer_asn: Optional[pulumi.Input[int]] = None,
routing_registry_name: Optional[pulumi.Input[str]] = None):
"""
Specifies the peering configuration.
:param pulumi.Input[Sequence[pulumi.Input[str]]] advertised_public_prefixes: The reference of AdvertisedPublicPrefixes.
:param pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringAdvertisedPublicPrefixState']] advertised_public_prefixes_state: AdvertisedPublicPrefixState of the Peering resource. Possible values are 'NotConfigured', 'Configuring', 'Configured', and 'ValidationNeeded'.
:param pulumi.Input[int] customer_asn: The CustomerASN of the peering.
:param pulumi.Input[str] routing_registry_name: The RoutingRegistryName of the configuration.
"""
if advertised_public_prefixes is not None:
pulumi.set(__self__, "advertised_public_prefixes", advertised_public_prefixes)
if advertised_public_prefixes_state is not None:
pulumi.set(__self__, "advertised_public_prefixes_state", advertised_public_prefixes_state)
if customer_asn is not None:
pulumi.set(__self__, "customer_asn", customer_asn)
if routing_registry_name is not None:
pulumi.set(__self__, "routing_registry_name", routing_registry_name)
@property
@pulumi.getter(name="advertisedPublicPrefixes")
def advertised_public_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The reference of AdvertisedPublicPrefixes.
"""
return pulumi.get(self, "advertised_public_prefixes")
@advertised_public_prefixes.setter
def advertised_public_prefixes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "advertised_public_prefixes", value)
@property
@pulumi.getter(name="advertisedPublicPrefixesState")
def advertised_public_prefixes_state(self) -> Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringAdvertisedPublicPrefixState']]]:
"""
AdvertisedPublicPrefixState of the Peering resource. Possible values are 'NotConfigured', 'Configuring', 'Configured', and 'ValidationNeeded'.
"""
return pulumi.get(self, "advertised_public_prefixes_state")
@advertised_public_prefixes_state.setter
def advertised_public_prefixes_state(self, value: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringAdvertisedPublicPrefixState']]]):
pulumi.set(self, "advertised_public_prefixes_state", value)
@property
@pulumi.getter(name="customerASN")
def customer_asn(self) -> Optional[pulumi.Input[int]]:
"""
The CustomerASN of the peering.
"""
return pulumi.get(self, "customer_asn")
@customer_asn.setter
def customer_asn(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "customer_asn", value)
@property
@pulumi.getter(name="routingRegistryName")
def routing_registry_name(self) -> Optional[pulumi.Input[str]]:
"""
The RoutingRegistryName of the configuration.
"""
return pulumi.get(self, "routing_registry_name")
@routing_registry_name.setter
def routing_registry_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "routing_registry_name", value)
@pulumi.input_type
class ExpressRouteCircuitServiceProviderPropertiesArgs:
def __init__(__self__, *,
bandwidth_in_mbps: Optional[pulumi.Input[int]] = None,
peering_location: Optional[pulumi.Input[str]] = None,
service_provider_name: Optional[pulumi.Input[str]] = None):
"""
Contains ServiceProviderProperties in an ExpressRouteCircuit.
:param pulumi.Input[int] bandwidth_in_mbps: The BandwidthInMbps.
:param pulumi.Input[str] peering_location: The peering location.
:param pulumi.Input[str] service_provider_name: The serviceProviderName.
"""
if bandwidth_in_mbps is not None:
pulumi.set(__self__, "bandwidth_in_mbps", bandwidth_in_mbps)
if peering_location is not None:
pulumi.set(__self__, "peering_location", peering_location)
if service_provider_name is not None:
pulumi.set(__self__, "service_provider_name", service_provider_name)
@property
@pulumi.getter(name="bandwidthInMbps")
def bandwidth_in_mbps(self) -> Optional[pulumi.Input[int]]:
"""
The BandwidthInMbps.
"""
return pulumi.get(self, "bandwidth_in_mbps")
@bandwidth_in_mbps.setter
def bandwidth_in_mbps(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bandwidth_in_mbps", value)
@property
@pulumi.getter(name="peeringLocation")
def peering_location(self) -> Optional[pulumi.Input[str]]:
"""
The peering location.
"""
return pulumi.get(self, "peering_location")
@peering_location.setter
def peering_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peering_location", value)
@property
@pulumi.getter(name="serviceProviderName")
def service_provider_name(self) -> Optional[pulumi.Input[str]]:
"""
The serviceProviderName.
"""
return pulumi.get(self, "service_provider_name")
@service_provider_name.setter
def service_provider_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_provider_name", value)
@pulumi.input_type
class ExpressRouteCircuitSkuArgs:
def __init__(__self__, *,
family: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitSkuFamily']]] = None,
name: Optional[pulumi.Input[str]] = None,
tier: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitSkuTier']]] = None):
"""
Contains SKU in an ExpressRouteCircuit.
:param pulumi.Input[Union[str, 'ExpressRouteCircuitSkuFamily']] family: The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'.
:param pulumi.Input[str] name: The name of the SKU.
:param pulumi.Input[Union[str, 'ExpressRouteCircuitSkuTier']] tier: The tier of the SKU. Possible values are 'Standard' and 'Premium'.
"""
if family is not None:
pulumi.set(__self__, "family", family)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def family(self) -> Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitSkuFamily']]]:
"""
The family of the SKU. Possible values are: 'UnlimitedData' and 'MeteredData'.
"""
return pulumi.get(self, "family")
@family.setter
def family(self, value: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitSkuFamily']]]):
pulumi.set(self, "family", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the SKU.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitSkuTier']]]:
"""
The tier of the SKU. Possible values are 'Standard' and 'Premium'.
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitSkuTier']]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class ExpressRouteCircuitStatsArgs:
def __init__(__self__, *,
primarybytes_in: Optional[pulumi.Input[float]] = None,
primarybytes_out: Optional[pulumi.Input[float]] = None,
secondarybytes_in: Optional[pulumi.Input[float]] = None,
secondarybytes_out: Optional[pulumi.Input[float]] = None):
"""
Contains stats associated with the peering.
:param pulumi.Input[float] primarybytes_in: Gets BytesIn of the peering.
:param pulumi.Input[float] primarybytes_out: Gets BytesOut of the peering.
:param pulumi.Input[float] secondarybytes_in: Gets BytesIn of the peering.
:param pulumi.Input[float] secondarybytes_out: Gets BytesOut of the peering.
"""
if primarybytes_in is not None:
pulumi.set(__self__, "primarybytes_in", primarybytes_in)
if primarybytes_out is not None:
pulumi.set(__self__, "primarybytes_out", primarybytes_out)
if secondarybytes_in is not None:
pulumi.set(__self__, "secondarybytes_in", secondarybytes_in)
if secondarybytes_out is not None:
pulumi.set(__self__, "secondarybytes_out", secondarybytes_out)
@property
@pulumi.getter(name="primarybytesIn")
def primarybytes_in(self) -> Optional[pulumi.Input[float]]:
"""
Gets BytesIn of the peering.
"""
return pulumi.get(self, "primarybytes_in")
@primarybytes_in.setter
def primarybytes_in(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "primarybytes_in", value)
@property
@pulumi.getter(name="primarybytesOut")
def primarybytes_out(self) -> Optional[pulumi.Input[float]]:
"""
Gets BytesOut of the peering.
"""
return pulumi.get(self, "primarybytes_out")
@primarybytes_out.setter
def primarybytes_out(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "primarybytes_out", value)
@property
@pulumi.getter(name="secondarybytesIn")
def secondarybytes_in(self) -> Optional[pulumi.Input[float]]:
"""
Gets BytesIn of the peering.
"""
return pulumi.get(self, "secondarybytes_in")
@secondarybytes_in.setter
def secondarybytes_in(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "secondarybytes_in", value)
@property
@pulumi.getter(name="secondarybytesOut")
def secondarybytes_out(self) -> Optional[pulumi.Input[float]]:
"""
Gets BytesOut of the peering.
"""
return pulumi.get(self, "secondarybytes_out")
@secondarybytes_out.setter
def secondarybytes_out(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "secondarybytes_out", value)
@pulumi.input_type
class FrontendIPConfigurationArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_address: Optional[pulumi.Input[str]] = None,
private_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
public_ip_address: Optional[pulumi.Input['PublicIPAddressArgs']] = None,
subnet: Optional[pulumi.Input['SubnetArgs']] = None):
"""
Frontend IP address of the load balancer.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] private_ip_address: The private IP address of the IP configuration.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] private_ip_allocation_method: The Private IP allocation method. Possible values are: 'Static' and 'Dynamic'.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input['PublicIPAddressArgs'] public_ip_address: The reference of the Public IP resource.
:param pulumi.Input['SubnetArgs'] subnet: The reference of the subnet resource.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIPAddress")
def private_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The private IP address of the IP configuration.
"""
return pulumi.get(self, "private_ip_address")
@private_ip_address.setter
def private_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_ip_address", value)
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]:
"""
The Private IP allocation method. Possible values are: 'Static' and 'Dynamic'.
"""
return pulumi.get(self, "private_ip_allocation_method")
@private_ip_allocation_method.setter
def private_ip_allocation_method(self, value: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]):
pulumi.set(self, "private_ip_allocation_method", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional[pulumi.Input['PublicIPAddressArgs']]:
"""
The reference of the Public IP resource.
"""
return pulumi.get(self, "public_ip_address")
@public_ip_address.setter
def public_ip_address(self, value: Optional[pulumi.Input['PublicIPAddressArgs']]):
pulumi.set(self, "public_ip_address", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input['SubnetArgs']]:
"""
The reference of the subnet resource.
"""
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input['SubnetArgs']]):
pulumi.set(self, "subnet", value)
@pulumi.input_type
class InboundNatPoolArgs:
def __init__(__self__, *,
backend_port: pulumi.Input[int],
frontend_port_range_end: pulumi.Input[int],
frontend_port_range_start: pulumi.Input[int],
protocol: pulumi.Input[Union[str, 'TransportProtocol']],
etag: Optional[pulumi.Input[str]] = None,
frontend_ip_configuration: Optional[pulumi.Input['SubResourceArgs']] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Inbound NAT pool of the load balancer.
:param pulumi.Input[int] backend_port: The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535.
:param pulumi.Input[int] frontend_port_range_end: The last port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65535.
:param pulumi.Input[int] frontend_port_range_start: The first port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65534.
:param pulumi.Input[Union[str, 'TransportProtocol']] protocol: The transport protocol for the endpoint. Possible values are: 'Udp' or 'Tcp'.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input['SubResourceArgs'] frontend_ip_configuration: A reference to frontend IP addresses.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
pulumi.set(__self__, "backend_port", backend_port)
pulumi.set(__self__, "frontend_port_range_end", frontend_port_range_end)
pulumi.set(__self__, "frontend_port_range_start", frontend_port_range_start)
pulumi.set(__self__, "protocol", protocol)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> pulumi.Input[int]:
"""
The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535.
"""
return pulumi.get(self, "backend_port")
@backend_port.setter
def backend_port(self, value: pulumi.Input[int]):
pulumi.set(self, "backend_port", value)
@property
@pulumi.getter(name="frontendPortRangeEnd")
def frontend_port_range_end(self) -> pulumi.Input[int]:
"""
The last port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65535.
"""
return pulumi.get(self, "frontend_port_range_end")
@frontend_port_range_end.setter
def frontend_port_range_end(self, value: pulumi.Input[int]):
pulumi.set(self, "frontend_port_range_end", value)
@property
@pulumi.getter(name="frontendPortRangeStart")
def frontend_port_range_start(self) -> pulumi.Input[int]:
"""
The first port number in the range of external ports that will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range between 1 and 65534.
"""
return pulumi.get(self, "frontend_port_range_start")
@frontend_port_range_start.setter
def frontend_port_range_start(self, value: pulumi.Input[int]):
pulumi.set(self, "frontend_port_range_start", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[Union[str, 'TransportProtocol']]:
"""
The transport protocol for the endpoint. Possible values are: 'Udp' or 'Tcp'.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[Union[str, 'TransportProtocol']]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
A reference to frontend IP addresses.
"""
return pulumi.get(self, "frontend_ip_configuration")
@frontend_ip_configuration.setter
def frontend_ip_configuration(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "frontend_ip_configuration", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class InboundNatRuleArgs:
def __init__(__self__, *,
backend_port: Optional[pulumi.Input[int]] = None,
enable_floating_ip: Optional[pulumi.Input[bool]] = None,
etag: Optional[pulumi.Input[str]] = None,
frontend_ip_configuration: Optional[pulumi.Input['SubResourceArgs']] = None,
frontend_port: Optional[pulumi.Input[int]] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[Union[str, 'TransportProtocol']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Inbound NAT rule of the load balancer.
:param pulumi.Input[int] backend_port: The port used for the internal endpoint. Acceptable values range from 1 to 65535.
:param pulumi.Input[bool] enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input['SubResourceArgs'] frontend_ip_configuration: A reference to frontend IP addresses.
:param pulumi.Input[int] frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values range from 1 to 65534.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[Union[str, 'TransportProtocol']] protocol: The transport protocol for the endpoint. Possible values are: 'Udp' or 'Tcp'
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
if backend_port is not None:
pulumi.set(__self__, "backend_port", backend_port)
if enable_floating_ip is not None:
pulumi.set(__self__, "enable_floating_ip", enable_floating_ip)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if frontend_port is not None:
pulumi.set(__self__, "frontend_port", frontend_port)
if id is not None:
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if name is not None:
pulumi.set(__self__, "name", name)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> Optional[pulumi.Input[int]]:
"""
The port used for the internal endpoint. Acceptable values range from 1 to 65535.
"""
return pulumi.get(self, "backend_port")
@backend_port.setter
def backend_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "backend_port", value)
@property
@pulumi.getter(name="enableFloatingIP")
def enable_floating_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint.
"""
return pulumi.get(self, "enable_floating_ip")
@enable_floating_ip.setter
def enable_floating_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_floating_ip", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
A reference to frontend IP addresses.
"""
return pulumi.get(self, "frontend_ip_configuration")
@frontend_ip_configuration.setter
def frontend_ip_configuration(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "frontend_ip_configuration", value)
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> Optional[pulumi.Input[int]]:
"""
The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values range from 1 to 65534.
"""
return pulumi.get(self, "frontend_port")
@frontend_port.setter
def frontend_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "frontend_port", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[pulumi.Input[int]]:
"""
The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@idle_timeout_in_minutes.setter
def idle_timeout_in_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "idle_timeout_in_minutes", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'TransportProtocol']]]:
"""
The transport protocol for the endpoint. Possible values are: 'Udp' or 'Tcp'
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'TransportProtocol']]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class IpsecPolicyArgs:
def __init__(__self__, *,
dh_group: pulumi.Input[Union[str, 'DhGroup']],
ike_encryption: pulumi.Input[Union[str, 'IkeEncryption']],
ike_integrity: pulumi.Input[Union[str, 'IkeIntegrity']],
ipsec_encryption: pulumi.Input[Union[str, 'IpsecEncryption']],
ipsec_integrity: pulumi.Input[Union[str, 'IpsecIntegrity']],
pfs_group: pulumi.Input[Union[str, 'PfsGroup']],
sa_data_size_kilobytes: pulumi.Input[int],
sa_life_time_seconds: pulumi.Input[int]):
"""
An IPSec Policy configuration for a virtual network gateway connection
:param pulumi.Input[Union[str, 'DhGroup']] dh_group: The DH Groups used in IKE Phase 1 for initial SA.
:param pulumi.Input[Union[str, 'IkeEncryption']] ike_encryption: The IKE encryption algorithm (IKE phase 2).
:param pulumi.Input[Union[str, 'IkeIntegrity']] ike_integrity: The IKE integrity algorithm (IKE phase 2).
:param pulumi.Input[Union[str, 'IpsecEncryption']] ipsec_encryption: The IPSec encryption algorithm (IKE phase 1).
:param pulumi.Input[Union[str, 'IpsecIntegrity']] ipsec_integrity: The IPSec integrity algorithm (IKE phase 1).
:param pulumi.Input[Union[str, 'PfsGroup']] pfs_group: The DH Groups used in IKE Phase 2 for new child SA.
:param pulumi.Input[int] sa_data_size_kilobytes: The IPSec Security Association (also called Quick Mode or Phase 2 SA) payload size in KB for a site to site VPN tunnel.
:param pulumi.Input[int] sa_life_time_seconds: The IPSec Security Association (also called Quick Mode or Phase 2 SA) lifetime in seconds for a site to site VPN tunnel.
"""
pulumi.set(__self__, "dh_group", dh_group)
pulumi.set(__self__, "ike_encryption", ike_encryption)
pulumi.set(__self__, "ike_integrity", ike_integrity)
pulumi.set(__self__, "ipsec_encryption", ipsec_encryption)
pulumi.set(__self__, "ipsec_integrity", ipsec_integrity)
pulumi.set(__self__, "pfs_group", pfs_group)
pulumi.set(__self__, "sa_data_size_kilobytes", sa_data_size_kilobytes)
pulumi.set(__self__, "sa_life_time_seconds", sa_life_time_seconds)
@property
@pulumi.getter(name="dhGroup")
def dh_group(self) -> pulumi.Input[Union[str, 'DhGroup']]:
"""
The DH Groups used in IKE Phase 1 for initial SA.
"""
return pulumi.get(self, "dh_group")
@dh_group.setter
def dh_group(self, value: pulumi.Input[Union[str, 'DhGroup']]):
pulumi.set(self, "dh_group", value)
@property
@pulumi.getter(name="ikeEncryption")
def ike_encryption(self) -> pulumi.Input[Union[str, 'IkeEncryption']]:
"""
The IKE encryption algorithm (IKE phase 2).
"""
return pulumi.get(self, "ike_encryption")
@ike_encryption.setter
def ike_encryption(self, value: pulumi.Input[Union[str, 'IkeEncryption']]):
pulumi.set(self, "ike_encryption", value)
@property
@pulumi.getter(name="ikeIntegrity")
def ike_integrity(self) -> pulumi.Input[Union[str, 'IkeIntegrity']]:
"""
The IKE integrity algorithm (IKE phase 2).
"""
return pulumi.get(self, "ike_integrity")
@ike_integrity.setter
def ike_integrity(self, value: pulumi.Input[Union[str, 'IkeIntegrity']]):
pulumi.set(self, "ike_integrity", value)
@property
@pulumi.getter(name="ipsecEncryption")
def ipsec_encryption(self) -> pulumi.Input[Union[str, 'IpsecEncryption']]:
"""
The IPSec encryption algorithm (IKE phase 1).
"""
return pulumi.get(self, "ipsec_encryption")
@ipsec_encryption.setter
def ipsec_encryption(self, value: pulumi.Input[Union[str, 'IpsecEncryption']]):
pulumi.set(self, "ipsec_encryption", value)
@property
@pulumi.getter(name="ipsecIntegrity")
def ipsec_integrity(self) -> pulumi.Input[Union[str, 'IpsecIntegrity']]:
"""
The IPSec integrity algorithm (IKE phase 1).
"""
return pulumi.get(self, "ipsec_integrity")
@ipsec_integrity.setter
def ipsec_integrity(self, value: pulumi.Input[Union[str, 'IpsecIntegrity']]):
pulumi.set(self, "ipsec_integrity", value)
@property
@pulumi.getter(name="pfsGroup")
def pfs_group(self) -> pulumi.Input[Union[str, 'PfsGroup']]:
"""
The DH Groups used in IKE Phase 2 for new child SA.
"""
return pulumi.get(self, "pfs_group")
@pfs_group.setter
def pfs_group(self, value: pulumi.Input[Union[str, 'PfsGroup']]):
pulumi.set(self, "pfs_group", value)
@property
@pulumi.getter(name="saDataSizeKilobytes")
def sa_data_size_kilobytes(self) -> pulumi.Input[int]:
"""
The IPSec Security Association (also called Quick Mode or Phase 2 SA) payload size in KB for a site to site VPN tunnel.
"""
return pulumi.get(self, "sa_data_size_kilobytes")
@sa_data_size_kilobytes.setter
def sa_data_size_kilobytes(self, value: pulumi.Input[int]):
pulumi.set(self, "sa_data_size_kilobytes", value)
@property
@pulumi.getter(name="saLifeTimeSeconds")
def sa_life_time_seconds(self) -> pulumi.Input[int]:
"""
The IPSec Security Association (also called Quick Mode or Phase 2 SA) lifetime in seconds for a site to site VPN tunnel.
"""
return pulumi.get(self, "sa_life_time_seconds")
@sa_life_time_seconds.setter
def sa_life_time_seconds(self, value: pulumi.Input[int]):
pulumi.set(self, "sa_life_time_seconds", value)
@pulumi.input_type
class Ipv6ExpressRouteCircuitPeeringConfigArgs:
def __init__(__self__, *,
microsoft_peering_config: Optional[pulumi.Input['ExpressRouteCircuitPeeringConfigArgs']] = None,
primary_peer_address_prefix: Optional[pulumi.Input[str]] = None,
route_filter: Optional[pulumi.Input['RouteFilterArgs']] = None,
secondary_peer_address_prefix: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']]] = None):
"""
Contains IPv6 peering config.
:param pulumi.Input['ExpressRouteCircuitPeeringConfigArgs'] microsoft_peering_config: The Microsoft peering configuration.
:param pulumi.Input[str] primary_peer_address_prefix: The primary address prefix.
:param pulumi.Input['RouteFilterArgs'] route_filter: The reference of the RouteFilter resource.
:param pulumi.Input[str] secondary_peer_address_prefix: The secondary address prefix.
:param pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']] state: The state of peering. Possible values are: 'Disabled' and 'Enabled'
"""
if microsoft_peering_config is not None:
pulumi.set(__self__, "microsoft_peering_config", microsoft_peering_config)
if primary_peer_address_prefix is not None:
pulumi.set(__self__, "primary_peer_address_prefix", primary_peer_address_prefix)
if route_filter is not None:
pulumi.set(__self__, "route_filter", route_filter)
if secondary_peer_address_prefix is not None:
pulumi.set(__self__, "secondary_peer_address_prefix", secondary_peer_address_prefix)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="microsoftPeeringConfig")
def microsoft_peering_config(self) -> Optional[pulumi.Input['ExpressRouteCircuitPeeringConfigArgs']]:
"""
The Microsoft peering configuration.
"""
return pulumi.get(self, "microsoft_peering_config")
@microsoft_peering_config.setter
def microsoft_peering_config(self, value: Optional[pulumi.Input['ExpressRouteCircuitPeeringConfigArgs']]):
pulumi.set(self, "microsoft_peering_config", value)
@property
@pulumi.getter(name="primaryPeerAddressPrefix")
def primary_peer_address_prefix(self) -> Optional[pulumi.Input[str]]:
"""
The primary address prefix.
"""
return pulumi.get(self, "primary_peer_address_prefix")
@primary_peer_address_prefix.setter
def primary_peer_address_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_peer_address_prefix", value)
@property
@pulumi.getter(name="routeFilter")
def route_filter(self) -> Optional[pulumi.Input['RouteFilterArgs']]:
"""
The reference of the RouteFilter resource.
"""
return pulumi.get(self, "route_filter")
@route_filter.setter
def route_filter(self, value: Optional[pulumi.Input['RouteFilterArgs']]):
pulumi.set(self, "route_filter", value)
@property
@pulumi.getter(name="secondaryPeerAddressPrefix")
def secondary_peer_address_prefix(self) -> Optional[pulumi.Input[str]]:
"""
The secondary address prefix.
"""
return pulumi.get(self, "secondary_peer_address_prefix")
@secondary_peer_address_prefix.setter
def secondary_peer_address_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secondary_peer_address_prefix", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']]]:
"""
The state of peering. Possible values are: 'Disabled' and 'Enabled'
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[Union[str, 'ExpressRouteCircuitPeeringState']]]):
pulumi.set(self, "state", value)
@pulumi.input_type
class LoadBalancingRuleArgs:
def __init__(__self__, *,
frontend_port: pulumi.Input[int],
protocol: pulumi.Input[Union[str, 'TransportProtocol']],
backend_address_pool: Optional[pulumi.Input['SubResourceArgs']] = None,
backend_port: Optional[pulumi.Input[int]] = None,
enable_floating_ip: Optional[pulumi.Input[bool]] = None,
etag: Optional[pulumi.Input[str]] = None,
frontend_ip_configuration: Optional[pulumi.Input['SubResourceArgs']] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
load_distribution: Optional[pulumi.Input[Union[str, 'LoadDistribution']]] = None,
name: Optional[pulumi.Input[str]] = None,
probe: Optional[pulumi.Input['SubResourceArgs']] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
A load balancing rule for a load balancer.
:param pulumi.Input[int] frontend_port: The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values are between 1 and 65534.
:param pulumi.Input[Union[str, 'TransportProtocol']] protocol: The transport protocol for the external endpoint. Possible values are 'Udp' or 'Tcp'
:param pulumi.Input['SubResourceArgs'] backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs.
:param pulumi.Input[int] backend_port: The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535.
:param pulumi.Input[bool] enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input['SubResourceArgs'] frontend_ip_configuration: A reference to frontend IP addresses.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.
:param pulumi.Input[Union[str, 'LoadDistribution']] load_distribution: The load distribution policy for this rule. Possible values are 'Default', 'SourceIP', and 'SourceIPProtocol'.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input['SubResourceArgs'] probe: The reference of the load balancer probe used by the load balancing rule.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
pulumi.set(__self__, "frontend_port", frontend_port)
pulumi.set(__self__, "protocol", protocol)
if backend_address_pool is not None:
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
if backend_port is not None:
pulumi.set(__self__, "backend_port", backend_port)
if enable_floating_ip is not None:
pulumi.set(__self__, "enable_floating_ip", enable_floating_ip)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if id is not None:
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if load_distribution is not None:
pulumi.set(__self__, "load_distribution", load_distribution)
if name is not None:
pulumi.set(__self__, "name", name)
if probe is not None:
pulumi.set(__self__, "probe", probe)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> pulumi.Input[int]:
"""
The port for the external endpoint. Port numbers for each rule must be unique within the Load Balancer. Acceptable values are between 1 and 65534.
"""
return pulumi.get(self, "frontend_port")
@frontend_port.setter
def frontend_port(self, value: pulumi.Input[int]):
pulumi.set(self, "frontend_port", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[Union[str, 'TransportProtocol']]:
"""
The transport protocol for the external endpoint. Possible values are 'Udp' or 'Tcp'
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[Union[str, 'TransportProtocol']]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
A reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs.
"""
return pulumi.get(self, "backend_address_pool")
@backend_address_pool.setter
def backend_address_pool(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "backend_address_pool", value)
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> Optional[pulumi.Input[int]]:
"""
The port used for internal connections on the endpoint. Acceptable values are between 1 and 65535.
"""
return pulumi.get(self, "backend_port")
@backend_port.setter
def backend_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "backend_port", value)
@property
@pulumi.getter(name="enableFloatingIP")
def enable_floating_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn Availability Group. This setting is required when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed after you create the endpoint.
"""
return pulumi.get(self, "enable_floating_ip")
@enable_floating_ip.setter
def enable_floating_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_floating_ip", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
A reference to frontend IP addresses.
"""
return pulumi.get(self, "frontend_ip_configuration")
@frontend_ip_configuration.setter
def frontend_ip_configuration(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "frontend_ip_configuration", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[pulumi.Input[int]]:
"""
The timeout for the TCP idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to TCP.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@idle_timeout_in_minutes.setter
def idle_timeout_in_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "idle_timeout_in_minutes", value)
@property
@pulumi.getter(name="loadDistribution")
def load_distribution(self) -> Optional[pulumi.Input[Union[str, 'LoadDistribution']]]:
"""
The load distribution policy for this rule. Possible values are 'Default', 'SourceIP', and 'SourceIPProtocol'.
"""
return pulumi.get(self, "load_distribution")
@load_distribution.setter
def load_distribution(self, value: Optional[pulumi.Input[Union[str, 'LoadDistribution']]]):
pulumi.set(self, "load_distribution", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def probe(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The reference of the load balancer probe used by the load balancing rule.
"""
return pulumi.get(self, "probe")
@probe.setter
def probe(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "probe", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class LocalNetworkGatewayArgs:
def __init__(__self__, *,
bgp_settings: Optional[pulumi.Input['BgpSettingsArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
gateway_ip_address: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
local_network_address_space: Optional[pulumi.Input['AddressSpaceArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
A common class for general resource information
:param pulumi.Input['BgpSettingsArgs'] bgp_settings: Local network gateway's BGP speaker settings.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] gateway_ip_address: IP address of local network gateway.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input['AddressSpaceArgs'] local_network_address_space: Local network site address space.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_guid: The resource GUID property of the LocalNetworkGateway resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if bgp_settings is not None:
pulumi.set(__self__, "bgp_settings", bgp_settings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if gateway_ip_address is not None:
pulumi.set(__self__, "gateway_ip_address", gateway_ip_address)
if id is not None:
pulumi.set(__self__, "id", id)
if local_network_address_space is not None:
pulumi.set(__self__, "local_network_address_space", local_network_address_space)
if location is not None:
pulumi.set(__self__, "location", location)
if resource_guid is not None:
pulumi.set(__self__, "resource_guid", resource_guid)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="bgpSettings")
def bgp_settings(self) -> Optional[pulumi.Input['BgpSettingsArgs']]:
"""
Local network gateway's BGP speaker settings.
"""
return pulumi.get(self, "bgp_settings")
@bgp_settings.setter
def bgp_settings(self, value: Optional[pulumi.Input['BgpSettingsArgs']]):
pulumi.set(self, "bgp_settings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="gatewayIpAddress")
def gateway_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
IP address of local network gateway.
"""
return pulumi.get(self, "gateway_ip_address")
@gateway_ip_address.setter
def gateway_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway_ip_address", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="localNetworkAddressSpace")
def local_network_address_space(self) -> Optional[pulumi.Input['AddressSpaceArgs']]:
"""
Local network site address space.
"""
return pulumi.get(self, "local_network_address_space")
@local_network_address_space.setter
def local_network_address_space(self, value: Optional[pulumi.Input['AddressSpaceArgs']]):
pulumi.set(self, "local_network_address_space", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[pulumi.Input[str]]:
"""
The resource GUID property of the LocalNetworkGateway resource.
"""
return pulumi.get(self, "resource_guid")
@resource_guid.setter
def resource_guid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_guid", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class MonitorConfigArgs:
def __init__(__self__, *,
path: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[float]] = None,
profile_monitor_status: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None):
"""
Class containing endpoint monitoring settings in a Traffic Manager profile.
:param pulumi.Input[str] path: Gets or sets the path relative to the endpoint domain name used to probe for endpoint health.
:param pulumi.Input[float] port: Gets or sets the TCP port used to probe for endpoint health.
:param pulumi.Input[str] profile_monitor_status: Gets or sets the profile-level monitoring status of the Traffic Manager profile.
:param pulumi.Input[str] protocol: Gets or sets the protocol (HTTP or HTTPS) used to probe for endpoint health.
"""
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if profile_monitor_status is not None:
pulumi.set(__self__, "profile_monitor_status", profile_monitor_status)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the path relative to the endpoint domain name used to probe for endpoint health.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[float]]:
"""
Gets or sets the TCP port used to probe for endpoint health.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="profileMonitorStatus")
def profile_monitor_status(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the profile-level monitoring status of the Traffic Manager profile.
"""
return pulumi.get(self, "profile_monitor_status")
@profile_monitor_status.setter
def profile_monitor_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "profile_monitor_status", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the protocol (HTTP or HTTPS) used to probe for endpoint health.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protocol", value)
@pulumi.input_type
class NetworkInterfaceDnsSettingsArgs:
def __init__(__self__, *,
applied_dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internal_dns_name_label: Optional[pulumi.Input[str]] = None,
internal_domain_name_suffix: Optional[pulumi.Input[str]] = None,
internal_fqdn: Optional[pulumi.Input[str]] = None):
"""
DNS settings of a network interface.
:param pulumi.Input[Sequence[pulumi.Input[str]]] applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then this list will have the union of all DNS servers from all NICs that are part of the Availability Set. This property is what is configured on each of those VMs.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be the only value in dnsServers collection.
:param pulumi.Input[str] internal_dns_name_label: Relative DNS name for this NIC used for internal communications between VMs in the same virtual network.
:param pulumi.Input[str] internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the VM name with the value of internalDomainNameSuffix.
:param pulumi.Input[str] internal_fqdn: Fully qualified DNS name supporting internal communications between VMs in the same virtual network.
"""
if applied_dns_servers is not None:
pulumi.set(__self__, "applied_dns_servers", applied_dns_servers)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if internal_dns_name_label is not None:
pulumi.set(__self__, "internal_dns_name_label", internal_dns_name_label)
if internal_domain_name_suffix is not None:
pulumi.set(__self__, "internal_domain_name_suffix", internal_domain_name_suffix)
if internal_fqdn is not None:
pulumi.set(__self__, "internal_fqdn", internal_fqdn)
@property
@pulumi.getter(name="appliedDnsServers")
def applied_dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
If the VM that uses this NIC is part of an Availability Set, then this list will have the union of all DNS servers from all NICs that are part of the Availability Set. This property is what is configured on each of those VMs.
"""
return pulumi.get(self, "applied_dns_servers")
@applied_dns_servers.setter
def applied_dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "applied_dns_servers", value)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be the only value in dnsServers collection.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@property
@pulumi.getter(name="internalDnsNameLabel")
def internal_dns_name_label(self) -> Optional[pulumi.Input[str]]:
"""
Relative DNS name for this NIC used for internal communications between VMs in the same virtual network.
"""
return pulumi.get(self, "internal_dns_name_label")
@internal_dns_name_label.setter
def internal_dns_name_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_dns_name_label", value)
@property
@pulumi.getter(name="internalDomainNameSuffix")
def internal_domain_name_suffix(self) -> Optional[pulumi.Input[str]]:
"""
Even if internalDnsNameLabel is not specified, a DNS entry is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the VM name with the value of internalDomainNameSuffix.
"""
return pulumi.get(self, "internal_domain_name_suffix")
@internal_domain_name_suffix.setter
def internal_domain_name_suffix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_domain_name_suffix", value)
@property
@pulumi.getter(name="internalFqdn")
def internal_fqdn(self) -> Optional[pulumi.Input[str]]:
"""
Fully qualified DNS name supporting internal communications between VMs in the same virtual network.
"""
return pulumi.get(self, "internal_fqdn")
@internal_fqdn.setter
def internal_fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_fqdn", value)
@pulumi.input_type
class NetworkInterfaceIPConfigurationArgs:
def __init__(__self__, *,
application_gateway_backend_address_pools: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressPoolArgs']]]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
load_balancer_backend_address_pools: Optional[pulumi.Input[Sequence[pulumi.Input['BackendAddressPoolArgs']]]] = None,
load_balancer_inbound_nat_rules: Optional[pulumi.Input[Sequence[pulumi.Input['InboundNatRuleArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
primary: Optional[pulumi.Input[bool]] = None,
private_ip_address: Optional[pulumi.Input[str]] = None,
private_ip_address_version: Optional[pulumi.Input[Union[str, 'IPVersion']]] = None,
private_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
public_ip_address: Optional[pulumi.Input['PublicIPAddressArgs']] = None,
subnet: Optional[pulumi.Input['SubnetArgs']] = None):
"""
IPConfiguration in a network interface.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressPoolArgs']]] application_gateway_backend_address_pools: The reference of ApplicationGatewayBackendAddressPool resource.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input['BackendAddressPoolArgs']]] load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool resource.
:param pulumi.Input[Sequence[pulumi.Input['InboundNatRuleArgs']]] load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[bool] primary: Gets whether this is a primary customer address on the network interface.
:param pulumi.Input[Union[str, 'IPVersion']] private_ip_address_version: Available from Api-Version 2016-03-30 onwards, it represents whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and 'IPv6'.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] private_ip_allocation_method: Defines how a private IP address is assigned. Possible values are: 'Static' and 'Dynamic'.
:param pulumi.Input['PublicIPAddressArgs'] public_ip_address: Public IP address resource.
:param pulumi.Input['SubnetArgs'] subnet: Subnet in a virtual network resource.
"""
if application_gateway_backend_address_pools is not None:
pulumi.set(__self__, "application_gateway_backend_address_pools", application_gateway_backend_address_pools)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if load_balancer_backend_address_pools is not None:
pulumi.set(__self__, "load_balancer_backend_address_pools", load_balancer_backend_address_pools)
if load_balancer_inbound_nat_rules is not None:
pulumi.set(__self__, "load_balancer_inbound_nat_rules", load_balancer_inbound_nat_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if primary is not None:
pulumi.set(__self__, "primary", primary)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if private_ip_address_version is not None:
pulumi.set(__self__, "private_ip_address_version", private_ip_address_version)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter(name="applicationGatewayBackendAddressPools")
def application_gateway_backend_address_pools(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressPoolArgs']]]]:
"""
The reference of ApplicationGatewayBackendAddressPool resource.
"""
return pulumi.get(self, "application_gateway_backend_address_pools")
@application_gateway_backend_address_pools.setter
def application_gateway_backend_address_pools(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationGatewayBackendAddressPoolArgs']]]]):
pulumi.set(self, "application_gateway_backend_address_pools", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="loadBalancerBackendAddressPools")
def load_balancer_backend_address_pools(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BackendAddressPoolArgs']]]]:
"""
The reference of LoadBalancerBackendAddressPool resource.
"""
return pulumi.get(self, "load_balancer_backend_address_pools")
@load_balancer_backend_address_pools.setter
def load_balancer_backend_address_pools(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BackendAddressPoolArgs']]]]):
pulumi.set(self, "load_balancer_backend_address_pools", value)
@property
@pulumi.getter(name="loadBalancerInboundNatRules")
def load_balancer_inbound_nat_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InboundNatRuleArgs']]]]:
"""
A list of references of LoadBalancerInboundNatRules.
"""
return pulumi.get(self, "load_balancer_inbound_nat_rules")
@load_balancer_inbound_nat_rules.setter
def load_balancer_inbound_nat_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['InboundNatRuleArgs']]]]):
pulumi.set(self, "load_balancer_inbound_nat_rules", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def primary(self) -> Optional[pulumi.Input[bool]]:
"""
Gets whether this is a primary customer address on the network interface.
"""
return pulumi.get(self, "primary")
@primary.setter
def primary(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "primary", value)
@property
@pulumi.getter(name="privateIPAddress")
def private_ip_address(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "private_ip_address")
@private_ip_address.setter
def private_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_ip_address", value)
@property
@pulumi.getter(name="privateIPAddressVersion")
def private_ip_address_version(self) -> Optional[pulumi.Input[Union[str, 'IPVersion']]]:
"""
Available from Api-Version 2016-03-30 onwards, it represents whether the specific ipconfiguration is IPv4 or IPv6. Default is taken as IPv4. Possible values are: 'IPv4' and 'IPv6'.
"""
return pulumi.get(self, "private_ip_address_version")
@private_ip_address_version.setter
def private_ip_address_version(self, value: Optional[pulumi.Input[Union[str, 'IPVersion']]]):
pulumi.set(self, "private_ip_address_version", value)
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]:
"""
Defines how a private IP address is assigned. Possible values are: 'Static' and 'Dynamic'.
"""
return pulumi.get(self, "private_ip_allocation_method")
@private_ip_allocation_method.setter
def private_ip_allocation_method(self, value: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]):
pulumi.set(self, "private_ip_allocation_method", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional[pulumi.Input['PublicIPAddressArgs']]:
"""
Public IP address resource.
"""
return pulumi.get(self, "public_ip_address")
@public_ip_address.setter
def public_ip_address(self, value: Optional[pulumi.Input['PublicIPAddressArgs']]):
pulumi.set(self, "public_ip_address", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input['SubnetArgs']]:
"""
Subnet in a virtual network resource.
"""
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input['SubnetArgs']]):
pulumi.set(self, "subnet", value)
@pulumi.input_type
class NetworkSecurityGroupArgs:
def __init__(__self__, *,
default_security_rules: Optional[pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
security_rules: Optional[pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
NetworkSecurityGroup resource.
:param pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]] default_security_rules: The default security rules of network security group.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] resource_guid: The resource GUID property of the network security group resource.
:param pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]] security_rules: A collection of security rules of the network security group.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if default_security_rules is not None:
pulumi.set(__self__, "default_security_rules", default_security_rules)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if location is not None:
pulumi.set(__self__, "location", location)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if resource_guid is not None:
pulumi.set(__self__, "resource_guid", resource_guid)
if security_rules is not None:
pulumi.set(__self__, "security_rules", security_rules)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="defaultSecurityRules")
def default_security_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]]]:
"""
The default security rules of network security group.
"""
return pulumi.get(self, "default_security_rules")
@default_security_rules.setter
def default_security_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]]]):
pulumi.set(self, "default_security_rules", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[pulumi.Input[str]]:
"""
The resource GUID property of the network security group resource.
"""
return pulumi.get(self, "resource_guid")
@resource_guid.setter
def resource_guid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_guid", value)
@property
@pulumi.getter(name="securityRules")
def security_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]]]:
"""
A collection of security rules of the network security group.
"""
return pulumi.get(self, "security_rules")
@security_rules.setter
def security_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SecurityRuleArgs']]]]):
pulumi.set(self, "security_rules", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class OutboundNatRuleArgs:
def __init__(__self__, *,
backend_address_pool: pulumi.Input['SubResourceArgs'],
allocated_outbound_ports: Optional[pulumi.Input[int]] = None,
etag: Optional[pulumi.Input[str]] = None,
frontend_ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Outbound NAT pool of the load balancer.
:param pulumi.Input['SubResourceArgs'] backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs.
:param pulumi.Input[int] allocated_outbound_ports: The number of outbound ports to be used for NAT.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]] frontend_ip_configurations: The Frontend IP addresses of the load balancer.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
if allocated_outbound_ports is not None:
pulumi.set(__self__, "allocated_outbound_ports", allocated_outbound_ports)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configurations is not None:
pulumi.set(__self__, "frontend_ip_configurations", frontend_ip_configurations)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> pulumi.Input['SubResourceArgs']:
"""
A reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs.
"""
return pulumi.get(self, "backend_address_pool")
@backend_address_pool.setter
def backend_address_pool(self, value: pulumi.Input['SubResourceArgs']):
pulumi.set(self, "backend_address_pool", value)
@property
@pulumi.getter(name="allocatedOutboundPorts")
def allocated_outbound_ports(self) -> Optional[pulumi.Input[int]]:
"""
The number of outbound ports to be used for NAT.
"""
return pulumi.get(self, "allocated_outbound_ports")
@allocated_outbound_ports.setter
def allocated_outbound_ports(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "allocated_outbound_ports", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="frontendIPConfigurations")
def frontend_ip_configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]:
"""
The Frontend IP addresses of the load balancer.
"""
return pulumi.get(self, "frontend_ip_configurations")
@frontend_ip_configurations.setter
def frontend_ip_configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]):
pulumi.set(self, "frontend_ip_configurations", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class PacketCaptureFilterArgs:
def __init__(__self__, *,
local_ip_address: Optional[pulumi.Input[str]] = None,
local_port: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[Union[str, 'PcProtocol']]] = None,
remote_ip_address: Optional[pulumi.Input[str]] = None,
remote_port: Optional[pulumi.Input[str]] = None):
"""
Filter that is applied to packet capture request. Multiple filters can be applied.
:param pulumi.Input[str] local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
:param pulumi.Input[str] local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
:param pulumi.Input[Union[str, 'PcProtocol']] protocol: Protocol to be filtered on.
:param pulumi.Input[str] remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
:param pulumi.Input[str] remote_port: Remote port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
"""
if local_ip_address is not None:
pulumi.set(__self__, "local_ip_address", local_ip_address)
if local_port is not None:
pulumi.set(__self__, "local_port", local_port)
if protocol is None:
protocol = 'Any'
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if remote_ip_address is not None:
pulumi.set(__self__, "remote_ip_address", remote_ip_address)
if remote_port is not None:
pulumi.set(__self__, "remote_port", remote_port)
@property
@pulumi.getter(name="localIPAddress")
def local_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
"""
return pulumi.get(self, "local_ip_address")
@local_ip_address.setter
def local_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "local_ip_address", value)
@property
@pulumi.getter(name="localPort")
def local_port(self) -> Optional[pulumi.Input[str]]:
"""
Local port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
"""
return pulumi.get(self, "local_port")
@local_port.setter
def local_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "local_port", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[Union[str, 'PcProtocol']]]:
"""
Protocol to be filtered on.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[Union[str, 'PcProtocol']]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="remoteIPAddress")
def remote_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
Local IP Address to be filtered on. Notation: "127.0.0.1" for single address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
"""
return pulumi.get(self, "remote_ip_address")
@remote_ip_address.setter
def remote_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "remote_ip_address", value)
@property
@pulumi.getter(name="remotePort")
def remote_port(self) -> Optional[pulumi.Input[str]]:
"""
Remote port to be filtered on. Notation: "80" for single port entry."80-85" for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing ranges with multiple entries not currently supported. Default = null.
"""
return pulumi.get(self, "remote_port")
@remote_port.setter
def remote_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "remote_port", value)
@pulumi.input_type
class PacketCaptureStorageLocationArgs:
def __init__(__self__, *,
file_path: Optional[pulumi.Input[str]] = None,
storage_id: Optional[pulumi.Input[str]] = None,
storage_path: Optional[pulumi.Input[str]] = None):
"""
Describes the storage location for a packet capture session.
:param pulumi.Input[str] file_path: A valid local path on the targeting VM. Must include the name of the capture file (*.cap). For linux virtual machine it must start with /var/captures. Required if no storage ID is provided, otherwise optional.
:param pulumi.Input[str] storage_id: The ID of the storage account to save the packet capture session. Required if no local file path is provided.
:param pulumi.Input[str] storage_path: The URI of the storage path to save the packet capture. Must be a well-formed URI describing the location to save the packet capture.
"""
if file_path is not None:
pulumi.set(__self__, "file_path", file_path)
if storage_id is not None:
pulumi.set(__self__, "storage_id", storage_id)
if storage_path is not None:
pulumi.set(__self__, "storage_path", storage_path)
@property
@pulumi.getter(name="filePath")
def file_path(self) -> Optional[pulumi.Input[str]]:
"""
A valid local path on the targeting VM. Must include the name of the capture file (*.cap). For linux virtual machine it must start with /var/captures. Required if no storage ID is provided, otherwise optional.
"""
return pulumi.get(self, "file_path")
@file_path.setter
def file_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_path", value)
@property
@pulumi.getter(name="storageId")
def storage_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the storage account to save the packet capture session. Required if no local file path is provided.
"""
return pulumi.get(self, "storage_id")
@storage_id.setter
def storage_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_id", value)
@property
@pulumi.getter(name="storagePath")
def storage_path(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the storage path to save the packet capture. Must be a well-formed URI describing the location to save the packet capture.
"""
return pulumi.get(self, "storage_path")
@storage_path.setter
def storage_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_path", value)
@pulumi.input_type
class ProbeArgs:
def __init__(__self__, *,
port: pulumi.Input[int],
protocol: pulumi.Input[Union[str, 'ProbeProtocol']],
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
interval_in_seconds: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
number_of_probes: Optional[pulumi.Input[int]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
request_path: Optional[pulumi.Input[str]] = None):
"""
A load balancer probe.
:param pulumi.Input[int] port: The port for communicating the probe. Possible values range from 1 to 65535, inclusive.
:param pulumi.Input[Union[str, 'ProbeProtocol']] protocol: The protocol of the end point. Possible values are: 'Http' or 'Tcp'. If 'Tcp' is specified, a received ACK is required for the probe to be successful. If 'Http' is specified, a 200 OK response from the specifies URI is required for the probe to be successful.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] interval_in_seconds: The interval, in seconds, for how frequently to probe the endpoint for health status. Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[int] number_of_probes: The number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] request_path: The URI used for requesting health status from the VM. Path is required if a protocol is set to http. Otherwise, it is not allowed. There is no default value.
"""
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if interval_in_seconds is not None:
pulumi.set(__self__, "interval_in_seconds", interval_in_seconds)
if name is not None:
pulumi.set(__self__, "name", name)
if number_of_probes is not None:
pulumi.set(__self__, "number_of_probes", number_of_probes)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
@property
@pulumi.getter
def port(self) -> pulumi.Input[int]:
"""
The port for communicating the probe. Possible values range from 1 to 65535, inclusive.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: pulumi.Input[int]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[Union[str, 'ProbeProtocol']]:
"""
The protocol of the end point. Possible values are: 'Http' or 'Tcp'. If 'Tcp' is specified, a received ACK is required for the probe to be successful. If 'Http' is specified, a 200 OK response from the specifies URI is required for the probe to be successful.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[Union[str, 'ProbeProtocol']]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The interval, in seconds, for how frequently to probe the endpoint for health status. Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5.
"""
return pulumi.get(self, "interval_in_seconds")
@interval_in_seconds.setter
def interval_in_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interval_in_seconds", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="numberOfProbes")
def number_of_probes(self) -> Optional[pulumi.Input[int]]:
"""
The number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure.
"""
return pulumi.get(self, "number_of_probes")
@number_of_probes.setter
def number_of_probes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_of_probes", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[pulumi.Input[str]]:
"""
The URI used for requesting health status from the VM. Path is required if a protocol is set to http. Otherwise, it is not allowed. There is no default value.
"""
return pulumi.get(self, "request_path")
@request_path.setter
def request_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_path", value)
@pulumi.input_type
class PublicIPAddressArgs:
def __init__(__self__, *,
dns_settings: Optional[pulumi.Input['PublicIPAddressDnsSettingsArgs']] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
public_ip_address_version: Optional[pulumi.Input[Union[str, 'IPVersion']]] = None,
public_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Public IP address resource.
:param pulumi.Input['PublicIPAddressDnsSettingsArgs'] dns_settings: The FQDN of the DNS record associated with the public IP address.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] idle_timeout_in_minutes: The idle timeout of the public IP address.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[Union[str, 'IPVersion']] public_ip_address_version: The public IP address version. Possible values are: 'IPv4' and 'IPv6'.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] public_ip_allocation_method: The public IP allocation method. Possible values are: 'Static' and 'Dynamic'.
:param pulumi.Input[str] resource_guid: The resource GUID property of the public IP resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if dns_settings is not None:
pulumi.set(__self__, "dns_settings", dns_settings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if location is not None:
pulumi.set(__self__, "location", location)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address_version is not None:
pulumi.set(__self__, "public_ip_address_version", public_ip_address_version)
if public_ip_allocation_method is not None:
pulumi.set(__self__, "public_ip_allocation_method", public_ip_allocation_method)
if resource_guid is not None:
pulumi.set(__self__, "resource_guid", resource_guid)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="dnsSettings")
def dns_settings(self) -> Optional[pulumi.Input['PublicIPAddressDnsSettingsArgs']]:
"""
The FQDN of the DNS record associated with the public IP address.
"""
return pulumi.get(self, "dns_settings")
@dns_settings.setter
def dns_settings(self, value: Optional[pulumi.Input['PublicIPAddressDnsSettingsArgs']]):
pulumi.set(self, "dns_settings", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[pulumi.Input[int]]:
"""
The idle timeout of the public IP address.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@idle_timeout_in_minutes.setter
def idle_timeout_in_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "idle_timeout_in_minutes", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="publicIPAddressVersion")
def public_ip_address_version(self) -> Optional[pulumi.Input[Union[str, 'IPVersion']]]:
"""
The public IP address version. Possible values are: 'IPv4' and 'IPv6'.
"""
return pulumi.get(self, "public_ip_address_version")
@public_ip_address_version.setter
def public_ip_address_version(self, value: Optional[pulumi.Input[Union[str, 'IPVersion']]]):
pulumi.set(self, "public_ip_address_version", value)
@property
@pulumi.getter(name="publicIPAllocationMethod")
def public_ip_allocation_method(self) -> Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]:
"""
The public IP allocation method. Possible values are: 'Static' and 'Dynamic'.
"""
return pulumi.get(self, "public_ip_allocation_method")
@public_ip_allocation_method.setter
def public_ip_allocation_method(self, value: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]):
pulumi.set(self, "public_ip_allocation_method", value)
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[pulumi.Input[str]]:
"""
The resource GUID property of the public IP resource.
"""
return pulumi.get(self, "resource_guid")
@resource_guid.setter
def resource_guid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_guid", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class PublicIPAddressDnsSettingsArgs:
def __init__(__self__, *,
domain_name_label: Optional[pulumi.Input[str]] = None,
fqdn: Optional[pulumi.Input[str]] = None,
reverse_fqdn: Optional[pulumi.Input[str]] = None):
"""
Contains FQDN of the DNS record associated with the public IP address
:param pulumi.Input[str] domain_name_label: Gets or sets the Domain name label.The concatenation of the domain name label and the regionalized DNS zone make up the fully qualified domain name associated with the public IP address. If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS system.
:param pulumi.Input[str] fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
:param pulumi.Input[str] reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
"""
if domain_name_label is not None:
pulumi.set(__self__, "domain_name_label", domain_name_label)
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if reverse_fqdn is not None:
pulumi.set(__self__, "reverse_fqdn", reverse_fqdn)
@property
@pulumi.getter(name="domainNameLabel")
def domain_name_label(self) -> Optional[pulumi.Input[str]]:
"""
Gets or sets the Domain name label.The concatenation of the domain name label and the regionalized DNS zone make up the fully qualified domain name associated with the public IP address. If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS system.
"""
return pulumi.get(self, "domain_name_label")
@domain_name_label.setter
def domain_name_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain_name_label", value)
@property
@pulumi.getter
def fqdn(self) -> Optional[pulumi.Input[str]]:
"""
Gets the FQDN, Fully qualified domain name of the A DNS record associated with the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
"""
return pulumi.get(self, "fqdn")
@fqdn.setter
def fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fqdn", value)
@property
@pulumi.getter(name="reverseFqdn")
def reverse_fqdn(self) -> Optional[pulumi.Input[str]]:
"""
Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
"""
return pulumi.get(self, "reverse_fqdn")
@reverse_fqdn.setter
def reverse_fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reverse_fqdn", value)
@pulumi.input_type
class ResourceNavigationLinkArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None,
link: Optional[pulumi.Input[str]] = None,
linked_resource_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
ResourceNavigationLink resource.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] link: Link to the external resource
:param pulumi.Input[str] linked_resource_type: Resource type of the linked resource.
:param pulumi.Input[str] name: Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if link is not None:
pulumi.set(__self__, "link", link)
if linked_resource_type is not None:
pulumi.set(__self__, "linked_resource_type", linked_resource_type)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def link(self) -> Optional[pulumi.Input[str]]:
"""
Link to the external resource
"""
return pulumi.get(self, "link")
@link.setter
def link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "link", value)
@property
@pulumi.getter(name="linkedResourceType")
def linked_resource_type(self) -> Optional[pulumi.Input[str]]:
"""
Resource type of the linked resource.
"""
return pulumi.get(self, "linked_resource_type")
@linked_resource_type.setter
def linked_resource_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "linked_resource_type", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class RouteArgs:
def __init__(__self__, *,
next_hop_type: pulumi.Input[Union[str, 'RouteNextHopType']],
address_prefix: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
next_hop_ip_address: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
Route resource
:param pulumi.Input[Union[str, 'RouteNextHopType']] next_hop_type: The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'
:param pulumi.Input[str] address_prefix: The destination CIDR to which the route applies.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance.
:param pulumi.Input[str] provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
pulumi.set(__self__, "next_hop_type", next_hop_type)
if address_prefix is not None:
pulumi.set(__self__, "address_prefix", address_prefix)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if next_hop_ip_address is not None:
pulumi.set(__self__, "next_hop_ip_address", next_hop_ip_address)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> pulumi.Input[Union[str, 'RouteNextHopType']]:
"""
The type of Azure hop the packet should be sent to. Possible values are: 'VirtualNetworkGateway', 'VnetLocal', 'Internet', 'VirtualAppliance', and 'None'
"""
return pulumi.get(self, "next_hop_type")
@next_hop_type.setter
def next_hop_type(self, value: pulumi.Input[Union[str, 'RouteNextHopType']]):
pulumi.set(self, "next_hop_type", value)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> Optional[pulumi.Input[str]]:
"""
The destination CIDR to which the route applies.
"""
return pulumi.get(self, "address_prefix")
@address_prefix.setter
def address_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_prefix", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nextHopIpAddress")
def next_hop_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance.
"""
return pulumi.get(self, "next_hop_ip_address")
@next_hop_ip_address.setter
def next_hop_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_hop_ip_address", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@pulumi.input_type
class RouteFilterArgs:
def __init__(__self__, *,
location: pulumi.Input[str],
id: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Route Filter Resource.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]] rules: Collection of RouteFilterRules contained within a route filter.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "location", location)
if id is not None:
pulumi.set(__self__, "id", id)
if rules is not None:
pulumi.set(__self__, "rules", rules)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def location(self) -> pulumi.Input[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: pulumi.Input[str]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]]]:
"""
Collection of RouteFilterRules contained within a route filter.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RouteFilterRuleArgs']]]]):
pulumi.set(self, "rules", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class RouteFilterRuleArgs:
def __init__(__self__, *,
access: pulumi.Input[Union[str, 'Access']],
communities: pulumi.Input[Sequence[pulumi.Input[str]]],
route_filter_rule_type: pulumi.Input[Union[str, 'RouteFilterRuleType']],
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Route Filter Rule Resource
:param pulumi.Input[Union[str, 'Access']] access: The access type of the rule. Valid values are: 'Allow', 'Deny'
:param pulumi.Input[Sequence[pulumi.Input[str]]] communities: The collection for bgp community values to filter on. e.g. ['12076:5010','12076:5020']
:param pulumi.Input[Union[str, 'RouteFilterRuleType']] route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "access", access)
pulumi.set(__self__, "communities", communities)
pulumi.set(__self__, "route_filter_rule_type", route_filter_rule_type)
if id is not None:
pulumi.set(__self__, "id", id)
if location is not None:
pulumi.set(__self__, "location", location)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def access(self) -> pulumi.Input[Union[str, 'Access']]:
"""
The access type of the rule. Valid values are: 'Allow', 'Deny'
"""
return pulumi.get(self, "access")
@access.setter
def access(self, value: pulumi.Input[Union[str, 'Access']]):
pulumi.set(self, "access", value)
@property
@pulumi.getter
def communities(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The collection for bgp community values to filter on. e.g. ['12076:5010','12076:5020']
"""
return pulumi.get(self, "communities")
@communities.setter
def communities(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "communities", value)
@property
@pulumi.getter(name="routeFilterRuleType")
def route_filter_rule_type(self) -> pulumi.Input[Union[str, 'RouteFilterRuleType']]:
"""
The rule type of the rule. Valid value is: 'Community'
"""
return pulumi.get(self, "route_filter_rule_type")
@route_filter_rule_type.setter
def route_filter_rule_type(self, value: pulumi.Input[Union[str, 'RouteFilterRuleType']]):
pulumi.set(self, "route_filter_rule_type", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class RouteTableArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
routes: Optional[pulumi.Input[Sequence[pulumi.Input['RouteArgs']]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Route table resource.
:param pulumi.Input[str] etag: Gets a unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] provisioning_state: The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[Sequence[pulumi.Input['RouteArgs']]] routes: Collection of routes contained within a route table.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if location is not None:
pulumi.set(__self__, "location", location)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if routes is not None:
pulumi.set(__self__, "routes", routes)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
Gets a unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter
def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteArgs']]]]:
"""
Collection of routes contained within a route table.
"""
return pulumi.get(self, "routes")
@routes.setter
def routes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RouteArgs']]]]):
pulumi.set(self, "routes", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class SecurityRuleArgs:
def __init__(__self__, *,
access: pulumi.Input[Union[str, 'SecurityRuleAccess']],
destination_address_prefix: pulumi.Input[str],
direction: pulumi.Input[Union[str, 'SecurityRuleDirection']],
protocol: pulumi.Input[Union[str, 'SecurityRuleProtocol']],
source_address_prefix: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
destination_port_range: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
priority: Optional[pulumi.Input[int]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
source_port_range: Optional[pulumi.Input[str]] = None):
"""
Network security rule.
:param pulumi.Input[Union[str, 'SecurityRuleAccess']] access: The network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'.
:param pulumi.Input[str] destination_address_prefix: The destination address prefix. CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
:param pulumi.Input[Union[str, 'SecurityRuleDirection']] direction: The direction of the rule. The direction specifies if rule will be evaluated on incoming or outgoing traffic. Possible values are: 'Inbound' and 'Outbound'.
:param pulumi.Input[Union[str, 'SecurityRuleProtocol']] protocol: Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'.
:param pulumi.Input[str] source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from.
:param pulumi.Input[str] description: A description for this rule. Restricted to 140 chars.
:param pulumi.Input[str] destination_port_range: The destination port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[int] priority: The priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule.
:param pulumi.Input[str] provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] source_port_range: The source port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
"""
pulumi.set(__self__, "access", access)
pulumi.set(__self__, "destination_address_prefix", destination_address_prefix)
pulumi.set(__self__, "direction", direction)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "source_address_prefix", source_address_prefix)
if description is not None:
pulumi.set(__self__, "description", description)
if destination_port_range is not None:
pulumi.set(__self__, "destination_port_range", destination_port_range)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if source_port_range is not None:
pulumi.set(__self__, "source_port_range", source_port_range)
@property
@pulumi.getter
def access(self) -> pulumi.Input[Union[str, 'SecurityRuleAccess']]:
"""
The network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'.
"""
return pulumi.get(self, "access")
@access.setter
def access(self, value: pulumi.Input[Union[str, 'SecurityRuleAccess']]):
pulumi.set(self, "access", value)
@property
@pulumi.getter(name="destinationAddressPrefix")
def destination_address_prefix(self) -> pulumi.Input[str]:
"""
The destination address prefix. CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
"""
return pulumi.get(self, "destination_address_prefix")
@destination_address_prefix.setter
def destination_address_prefix(self, value: pulumi.Input[str]):
pulumi.set(self, "destination_address_prefix", value)
@property
@pulumi.getter
def direction(self) -> pulumi.Input[Union[str, 'SecurityRuleDirection']]:
"""
The direction of the rule. The direction specifies if rule will be evaluated on incoming or outgoing traffic. Possible values are: 'Inbound' and 'Outbound'.
"""
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: pulumi.Input[Union[str, 'SecurityRuleDirection']]):
pulumi.set(self, "direction", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[Union[str, 'SecurityRuleProtocol']]:
"""
Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[Union[str, 'SecurityRuleProtocol']]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="sourceAddressPrefix")
def source_address_prefix(self) -> pulumi.Input[str]:
"""
The CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from.
"""
return pulumi.get(self, "source_address_prefix")
@source_address_prefix.setter
def source_address_prefix(self, value: pulumi.Input[str]):
pulumi.set(self, "source_address_prefix", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this rule. Restricted to 140 chars.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="destinationPortRange")
def destination_port_range(self) -> Optional[pulumi.Input[str]]:
"""
The destination port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
"""
return pulumi.get(self, "destination_port_range")
@destination_port_range.setter
def destination_port_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_port_range", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="sourcePortRange")
def source_port_range(self) -> Optional[pulumi.Input[str]]:
"""
The source port or range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
"""
return pulumi.get(self, "source_port_range")
@source_port_range.setter
def source_port_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_port_range", value)
@pulumi.input_type
class SubResourceArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: Resource ID.
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@pulumi.input_type
class SubnetArgs:
def __init__(__self__, *,
address_prefix: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_security_group: Optional[pulumi.Input['NetworkSecurityGroupArgs']] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_navigation_links: Optional[pulumi.Input[Sequence[pulumi.Input['ResourceNavigationLinkArgs']]]] = None,
route_table: Optional[pulumi.Input['RouteTableArgs']] = None):
"""
Subnet in a virtual network resource.
:param pulumi.Input[str] address_prefix: The address prefix for the subnet.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input['NetworkSecurityGroupArgs'] network_security_group: The reference of the NetworkSecurityGroup resource.
:param pulumi.Input[str] provisioning_state: The provisioning state of the resource.
:param pulumi.Input[Sequence[pulumi.Input['ResourceNavigationLinkArgs']]] resource_navigation_links: Gets an array of references to the external resources using subnet.
:param pulumi.Input['RouteTableArgs'] route_table: The reference of the RouteTable resource.
"""
if address_prefix is not None:
pulumi.set(__self__, "address_prefix", address_prefix)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if network_security_group is not None:
pulumi.set(__self__, "network_security_group", network_security_group)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if resource_navigation_links is not None:
pulumi.set(__self__, "resource_navigation_links", resource_navigation_links)
if route_table is not None:
pulumi.set(__self__, "route_table", route_table)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> Optional[pulumi.Input[str]]:
"""
The address prefix for the subnet.
"""
return pulumi.get(self, "address_prefix")
@address_prefix.setter
def address_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_prefix", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkSecurityGroup")
def network_security_group(self) -> Optional[pulumi.Input['NetworkSecurityGroupArgs']]:
"""
The reference of the NetworkSecurityGroup resource.
"""
return pulumi.get(self, "network_security_group")
@network_security_group.setter
def network_security_group(self, value: Optional[pulumi.Input['NetworkSecurityGroupArgs']]):
pulumi.set(self, "network_security_group", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="resourceNavigationLinks")
def resource_navigation_links(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ResourceNavigationLinkArgs']]]]:
"""
Gets an array of references to the external resources using subnet.
"""
return pulumi.get(self, "resource_navigation_links")
@resource_navigation_links.setter
def resource_navigation_links(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ResourceNavigationLinkArgs']]]]):
pulumi.set(self, "resource_navigation_links", value)
@property
@pulumi.getter(name="routeTable")
def route_table(self) -> Optional[pulumi.Input['RouteTableArgs']]:
"""
The reference of the RouteTable resource.
"""
return pulumi.get(self, "route_table")
@route_table.setter
def route_table(self, value: Optional[pulumi.Input['RouteTableArgs']]):
pulumi.set(self, "route_table", value)
@pulumi.input_type
class VirtualNetworkGatewayArgs:
def __init__(__self__, *,
active_active: Optional[pulumi.Input[bool]] = None,
bgp_settings: Optional[pulumi.Input['BgpSettingsArgs']] = None,
enable_bgp: Optional[pulumi.Input[bool]] = None,
etag: Optional[pulumi.Input[str]] = None,
gateway_default_site: Optional[pulumi.Input['SubResourceArgs']] = None,
gateway_type: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]] = None,
id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input['VirtualNetworkGatewaySkuArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpn_client_configuration: Optional[pulumi.Input['VpnClientConfigurationArgs']] = None,
vpn_type: Optional[pulumi.Input[Union[str, 'VpnType']]] = None):
"""
A common class for general resource information
:param pulumi.Input[bool] active_active: ActiveActive flag
:param pulumi.Input['BgpSettingsArgs'] bgp_settings: Virtual network gateway's BGP speaker settings.
:param pulumi.Input[bool] enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:param pulumi.Input[str] etag: Gets a unique read-only string that changes whenever the resource is updated.
:param pulumi.Input['SubResourceArgs'] gateway_default_site: The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting.
:param pulumi.Input[Union[str, 'VirtualNetworkGatewayType']] gateway_type: The type of this virtual network gateway. Possible values are: 'Vpn' and 'ExpressRoute'.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]] ip_configurations: IP configurations for virtual network gateway.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_guid: The resource GUID property of the VirtualNetworkGateway resource.
:param pulumi.Input['VirtualNetworkGatewaySkuArgs'] sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input['VpnClientConfigurationArgs'] vpn_client_configuration: The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations.
:param pulumi.Input[Union[str, 'VpnType']] vpn_type: The type of this virtual network gateway. Possible values are: 'PolicyBased' and 'RouteBased'.
"""
if active_active is not None:
pulumi.set(__self__, "active_active", active_active)
if bgp_settings is not None:
pulumi.set(__self__, "bgp_settings", bgp_settings)
if enable_bgp is not None:
pulumi.set(__self__, "enable_bgp", enable_bgp)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if gateway_default_site is not None:
pulumi.set(__self__, "gateway_default_site", gateway_default_site)
if gateway_type is not None:
pulumi.set(__self__, "gateway_type", gateway_type)
if id is not None:
pulumi.set(__self__, "id", id)
if ip_configurations is not None:
pulumi.set(__self__, "ip_configurations", ip_configurations)
if location is not None:
pulumi.set(__self__, "location", location)
if resource_guid is not None:
pulumi.set(__self__, "resource_guid", resource_guid)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vpn_client_configuration is not None:
pulumi.set(__self__, "vpn_client_configuration", vpn_client_configuration)
if vpn_type is not None:
pulumi.set(__self__, "vpn_type", vpn_type)
@property
@pulumi.getter(name="activeActive")
def active_active(self) -> Optional[pulumi.Input[bool]]:
"""
ActiveActive flag
"""
return pulumi.get(self, "active_active")
@active_active.setter
def active_active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "active_active", value)
@property
@pulumi.getter(name="bgpSettings")
def bgp_settings(self) -> Optional[pulumi.Input['BgpSettingsArgs']]:
"""
Virtual network gateway's BGP speaker settings.
"""
return pulumi.get(self, "bgp_settings")
@bgp_settings.setter
def bgp_settings(self, value: Optional[pulumi.Input['BgpSettingsArgs']]):
pulumi.set(self, "bgp_settings", value)
@property
@pulumi.getter(name="enableBgp")
def enable_bgp(self) -> Optional[pulumi.Input[bool]]:
"""
Whether BGP is enabled for this virtual network gateway or not.
"""
return pulumi.get(self, "enable_bgp")
@enable_bgp.setter
def enable_bgp(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_bgp", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
Gets a unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="gatewayDefaultSite")
def gateway_default_site(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The reference of the LocalNetworkGateway resource which represents local network site having default routes. Assign Null value in case of removing existing default site setting.
"""
return pulumi.get(self, "gateway_default_site")
@gateway_default_site.setter
def gateway_default_site(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "gateway_default_site", value)
@property
@pulumi.getter(name="gatewayType")
def gateway_type(self) -> Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]]:
"""
The type of this virtual network gateway. Possible values are: 'Vpn' and 'ExpressRoute'.
"""
return pulumi.get(self, "gateway_type")
@gateway_type.setter
def gateway_type(self, value: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewayType']]]):
pulumi.set(self, "gateway_type", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]]]:
"""
IP configurations for virtual network gateway.
"""
return pulumi.get(self, "ip_configurations")
@ip_configurations.setter
def ip_configurations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkGatewayIPConfigurationArgs']]]]):
pulumi.set(self, "ip_configurations", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[pulumi.Input[str]]:
"""
The resource GUID property of the VirtualNetworkGateway resource.
"""
return pulumi.get(self, "resource_guid")
@resource_guid.setter
def resource_guid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_guid", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input['VirtualNetworkGatewaySkuArgs']]:
"""
The reference of the VirtualNetworkGatewaySku resource which represents the SKU selected for Virtual network gateway.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input['VirtualNetworkGatewaySkuArgs']]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="vpnClientConfiguration")
def vpn_client_configuration(self) -> Optional[pulumi.Input['VpnClientConfigurationArgs']]:
"""
The reference of the VpnClientConfiguration resource which represents the P2S VpnClient configurations.
"""
return pulumi.get(self, "vpn_client_configuration")
@vpn_client_configuration.setter
def vpn_client_configuration(self, value: Optional[pulumi.Input['VpnClientConfigurationArgs']]):
pulumi.set(self, "vpn_client_configuration", value)
@property
@pulumi.getter(name="vpnType")
def vpn_type(self) -> Optional[pulumi.Input[Union[str, 'VpnType']]]:
"""
The type of this virtual network gateway. Possible values are: 'PolicyBased' and 'RouteBased'.
"""
return pulumi.get(self, "vpn_type")
@vpn_type.setter
def vpn_type(self, value: Optional[pulumi.Input[Union[str, 'VpnType']]]):
pulumi.set(self, "vpn_type", value)
@pulumi.input_type
class VirtualNetworkGatewayIPConfigurationArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
private_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
public_ip_address: Optional[pulumi.Input['SubResourceArgs']] = None,
subnet: Optional[pulumi.Input['SubResourceArgs']] = None):
"""
IP configuration for virtual network gateway
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] private_ip_allocation_method: The private IP allocation method. Possible values are: 'Static' and 'Dynamic'.
:param pulumi.Input['SubResourceArgs'] public_ip_address: The reference of the public IP resource.
:param pulumi.Input['SubResourceArgs'] subnet: The reference of the subnet resource.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]:
"""
The private IP allocation method. Possible values are: 'Static' and 'Dynamic'.
"""
return pulumi.get(self, "private_ip_allocation_method")
@private_ip_allocation_method.setter
def private_ip_allocation_method(self, value: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]):
pulumi.set(self, "private_ip_allocation_method", value)
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The reference of the public IP resource.
"""
return pulumi.get(self, "public_ip_address")
@public_ip_address.setter
def public_ip_address(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "public_ip_address", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The reference of the subnet resource.
"""
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "subnet", value)
@pulumi.input_type
class VirtualNetworkGatewaySkuArgs:
def __init__(__self__, *,
capacity: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuName']]] = None,
tier: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuTier']]] = None):
"""
VirtualNetworkGatewaySku details
:param pulumi.Input[int] capacity: The capacity.
:param pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuName']] name: Gateway SKU name.
:param pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuTier']] tier: Gateway SKU tier.
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
The capacity.
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuName']]]:
"""
Gateway SKU name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuName']]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuTier']]]:
"""
Gateway SKU tier.
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[Union[str, 'VirtualNetworkGatewaySkuTier']]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class VirtualNetworkPeeringArgs:
def __init__(__self__, *,
allow_forwarded_traffic: Optional[pulumi.Input[bool]] = None,
allow_gateway_transit: Optional[pulumi.Input[bool]] = None,
allow_virtual_network_access: Optional[pulumi.Input[bool]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
peering_state: Optional[pulumi.Input[Union[str, 'VirtualNetworkPeeringState']]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
remote_virtual_network: Optional[pulumi.Input['SubResourceArgs']] = None,
use_remote_gateways: Optional[pulumi.Input[bool]] = None):
"""
Peerings in a virtual network resource.
:param pulumi.Input[bool] allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the remote virtual network will be allowed/disallowed.
:param pulumi.Input[bool] allow_gateway_transit: If gateway links can be used in remote virtual networking to link to this virtual network.
:param pulumi.Input[bool] allow_virtual_network_access: Whether the VMs in the linked virtual network space would be able to access all the VMs in local Virtual network space.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[Union[str, 'VirtualNetworkPeeringState']] peering_state: The status of the virtual network peering. Possible values are 'Initiated', 'Connected', and 'Disconnected'.
:param pulumi.Input[str] provisioning_state: The provisioning state of the resource.
:param pulumi.Input['SubResourceArgs'] remote_virtual_network: The reference of the remote virtual network.
:param pulumi.Input[bool] use_remote_gateways: If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway.
"""
if allow_forwarded_traffic is not None:
pulumi.set(__self__, "allow_forwarded_traffic", allow_forwarded_traffic)
if allow_gateway_transit is not None:
pulumi.set(__self__, "allow_gateway_transit", allow_gateway_transit)
if allow_virtual_network_access is not None:
pulumi.set(__self__, "allow_virtual_network_access", allow_virtual_network_access)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if peering_state is not None:
pulumi.set(__self__, "peering_state", peering_state)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if remote_virtual_network is not None:
pulumi.set(__self__, "remote_virtual_network", remote_virtual_network)
if use_remote_gateways is not None:
pulumi.set(__self__, "use_remote_gateways", use_remote_gateways)
@property
@pulumi.getter(name="allowForwardedTraffic")
def allow_forwarded_traffic(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the forwarded traffic from the VMs in the remote virtual network will be allowed/disallowed.
"""
return pulumi.get(self, "allow_forwarded_traffic")
@allow_forwarded_traffic.setter
def allow_forwarded_traffic(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_forwarded_traffic", value)
@property
@pulumi.getter(name="allowGatewayTransit")
def allow_gateway_transit(self) -> Optional[pulumi.Input[bool]]:
"""
If gateway links can be used in remote virtual networking to link to this virtual network.
"""
return pulumi.get(self, "allow_gateway_transit")
@allow_gateway_transit.setter
def allow_gateway_transit(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_gateway_transit", value)
@property
@pulumi.getter(name="allowVirtualNetworkAccess")
def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the VMs in the linked virtual network space would be able to access all the VMs in local Virtual network space.
"""
return pulumi.get(self, "allow_virtual_network_access")
@allow_virtual_network_access.setter
def allow_virtual_network_access(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_virtual_network_access", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="peeringState")
def peering_state(self) -> Optional[pulumi.Input[Union[str, 'VirtualNetworkPeeringState']]]:
"""
The status of the virtual network peering. Possible values are 'Initiated', 'Connected', and 'Disconnected'.
"""
return pulumi.get(self, "peering_state")
@peering_state.setter
def peering_state(self, value: Optional[pulumi.Input[Union[str, 'VirtualNetworkPeeringState']]]):
pulumi.set(self, "peering_state", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
The provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
@property
@pulumi.getter(name="remoteVirtualNetwork")
def remote_virtual_network(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The reference of the remote virtual network.
"""
return pulumi.get(self, "remote_virtual_network")
@remote_virtual_network.setter
def remote_virtual_network(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "remote_virtual_network", value)
@property
@pulumi.getter(name="useRemoteGateways")
def use_remote_gateways(self) -> Optional[pulumi.Input[bool]]:
"""
If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway.
"""
return pulumi.get(self, "use_remote_gateways")
@use_remote_gateways.setter
def use_remote_gateways(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_remote_gateways", value)
@pulumi.input_type
class VpnClientConfigurationArgs:
def __init__(__self__, *,
vpn_client_address_pool: Optional[pulumi.Input['AddressSpaceArgs']] = None,
vpn_client_revoked_certificates: Optional[pulumi.Input[Sequence[pulumi.Input['VpnClientRevokedCertificateArgs']]]] = None,
vpn_client_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input['VpnClientRootCertificateArgs']]]] = None):
"""
VpnClientConfiguration for P2S client.
:param pulumi.Input['AddressSpaceArgs'] vpn_client_address_pool: The reference of the address space resource which represents Address space for P2S VpnClient.
:param pulumi.Input[Sequence[pulumi.Input['VpnClientRevokedCertificateArgs']]] vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network gateway.
:param pulumi.Input[Sequence[pulumi.Input['VpnClientRootCertificateArgs']]] vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway.
"""
if vpn_client_address_pool is not None:
pulumi.set(__self__, "vpn_client_address_pool", vpn_client_address_pool)
if vpn_client_revoked_certificates is not None:
pulumi.set(__self__, "vpn_client_revoked_certificates", vpn_client_revoked_certificates)
if vpn_client_root_certificates is not None:
pulumi.set(__self__, "vpn_client_root_certificates", vpn_client_root_certificates)
@property
@pulumi.getter(name="vpnClientAddressPool")
def vpn_client_address_pool(self) -> Optional[pulumi.Input['AddressSpaceArgs']]:
"""
The reference of the address space resource which represents Address space for P2S VpnClient.
"""
return pulumi.get(self, "vpn_client_address_pool")
@vpn_client_address_pool.setter
def vpn_client_address_pool(self, value: Optional[pulumi.Input['AddressSpaceArgs']]):
pulumi.set(self, "vpn_client_address_pool", value)
@property
@pulumi.getter(name="vpnClientRevokedCertificates")
def vpn_client_revoked_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VpnClientRevokedCertificateArgs']]]]:
"""
VpnClientRevokedCertificate for Virtual network gateway.
"""
return pulumi.get(self, "vpn_client_revoked_certificates")
@vpn_client_revoked_certificates.setter
def vpn_client_revoked_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VpnClientRevokedCertificateArgs']]]]):
pulumi.set(self, "vpn_client_revoked_certificates", value)
@property
@pulumi.getter(name="vpnClientRootCertificates")
def vpn_client_root_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VpnClientRootCertificateArgs']]]]:
"""
VpnClientRootCertificate for virtual network gateway.
"""
return pulumi.get(self, "vpn_client_root_certificates")
@vpn_client_root_certificates.setter
def vpn_client_root_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VpnClientRootCertificateArgs']]]]):
pulumi.set(self, "vpn_client_root_certificates", value)
@pulumi.input_type
class VpnClientRevokedCertificateArgs:
def __init__(__self__, *,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
thumbprint: Optional[pulumi.Input[str]] = None):
"""
VPN client revoked certificate of virtual network gateway.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] thumbprint: The revoked VPN client certificate thumbprint.
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if thumbprint is not None:
pulumi.set(__self__, "thumbprint", thumbprint)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
The revoked VPN client certificate thumbprint.
"""
return pulumi.get(self, "thumbprint")
@thumbprint.setter
def thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "thumbprint", value)
@pulumi.input_type
class VpnClientRootCertificateArgs:
def __init__(__self__, *,
public_cert_data: pulumi.Input[str],
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
VPN client root certificate of virtual network gateway
:param pulumi.Input[str] public_cert_data: The certificate public data.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
pulumi.set(__self__, "public_cert_data", public_cert_data)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="publicCertData")
def public_cert_data(self) -> pulumi.Input[str]:
"""
The certificate public data.
"""
return pulumi.get(self, "public_cert_data")
@public_cert_data.setter
def public_cert_data(self, value: pulumi.Input[str]):
pulumi.set(self, "public_cert_data", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
| 43.385276 | 382 | 0.663127 |
d3dfbaf5739105f8f181f8a4bb1260c0444f20bd
| 368 |
py
|
Python
|
Aula19/ex06.py
|
danicon/MD3-Curso_Python
|
3d419d440d3b28adb5c019268f4b217e7d0ce45a
|
[
"MIT"
] | null | null | null |
Aula19/ex06.py
|
danicon/MD3-Curso_Python
|
3d419d440d3b28adb5c019268f4b217e7d0ce45a
|
[
"MIT"
] | null | null | null |
Aula19/ex06.py
|
danicon/MD3-Curso_Python
|
3d419d440d3b28adb5c019268f4b217e7d0ce45a
|
[
"MIT"
] | null | null | null |
result = {}
result['nome'] = str(input('Nome: '))
result['media'] = float(input(f'Média do {result["nome"]}: '))
if result['media'] >= 7:
result['situacao'] = 'Aprovado'
elif 5 <= result['media'] < 7:
result['situacao'] = 'Recuperação'
else:
result['situacao'] = 'Reprovado'
print(30*'-=')
for k, v in result.items():
print(f' - {k} é igual a {v}')
| 24.533333 | 62 | 0.576087 |
84e29ca5cef603617b3bb7df0841c268ad37cadd
| 3,813 |
py
|
Python
|
dashboard/04.implementation/Django/backend/base/migrations/0002_order_orderitem_review_shippingaddress.py
|
BU-Spark/Justice-Media-co-Lab-NAACP-GBH-Media-Bias-project
|
b9dd7e41bc4a0fc51df0433f9af49e36427e8abd
|
[
"MIT"
] | 1 |
2021-02-12T22:57:46.000Z
|
2021-02-12T22:57:46.000Z
|
dashboard/04.implementation/Django/backend/base/migrations/0002_order_orderitem_review_shippingaddress.py
|
BU-Spark/Justice-Media-co-Lab-NAACP-GBH-Media-Bias-project
|
b9dd7e41bc4a0fc51df0433f9af49e36427e8abd
|
[
"MIT"
] | null | null | null |
dashboard/04.implementation/Django/backend/base/migrations/0002_order_orderitem_review_shippingaddress.py
|
BU-Spark/Justice-Media-co-Lab-NAACP-GBH-Media-Bias-project
|
b9dd7e41bc4a0fc51df0433f9af49e36427e8abd
|
[
"MIT"
] | 2 |
2021-02-26T08:02:02.000Z
|
2021-02-27T20:09:34.000Z
|
# Generated by Django 3.1.7 on 2021-03-27 01:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('base', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('paymentMethod', models.CharField(blank=True, max_length=200, null=True)),
('taxPrice', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('shippingPrice', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('totalPrice', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('isPaid', models.BooleanField(default=False)),
('paidAt', models.DateTimeField(blank=True, null=True)),
('isDelivered', models.BooleanField(default=False)),
('deliveredAt', models.DateTimeField(blank=True, null=True)),
('createdAt', models.DateTimeField(auto_now_add=True)),
('_id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ShippingAddress',
fields=[
('address', models.CharField(blank=True, max_length=200, null=True)),
('city', models.CharField(blank=True, max_length=200, null=True)),
('postalCode', models.CharField(blank=True, max_length=200, null=True)),
('country', models.CharField(blank=True, max_length=200, null=True)),
('shippingPrice', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('_id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('order', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='base.order')),
],
),
migrations.CreateModel(
name='Review',
fields=[
('name', models.CharField(blank=True, max_length=200, null=True)),
('rating', models.IntegerField(blank=True, default=0, null=True)),
('comment', models.TextField(blank=True, null=True)),
('createdAt', models.DateTimeField(auto_now_add=True)),
('_id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='base.product')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='OrderItem',
fields=[
('name', models.CharField(blank=True, max_length=200, null=True)),
('qty', models.IntegerField(blank=True, default=0, null=True)),
('price', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('image', models.CharField(blank=True, max_length=200, null=True)),
('_id', models.AutoField(editable=False, primary_key=True, serialize=False)),
('order', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='base.order')),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='base.product')),
],
),
]
| 55.26087 | 133 | 0.608969 |
ab48fa2e650af29ce57660a5691a71df7aa25f70
| 31,700 |
py
|
Python
|
os_win/tests/unit/utils/storage/virtdisk/test_vhdutils.py
|
mail2nsrajesh/os-win
|
b5ee321a097ddc96ea9c7a652a19d88215eab996
|
[
"Apache-2.0"
] | null | null | null |
os_win/tests/unit/utils/storage/virtdisk/test_vhdutils.py
|
mail2nsrajesh/os-win
|
b5ee321a097ddc96ea9c7a652a19d88215eab996
|
[
"Apache-2.0"
] | null | null | null |
os_win/tests/unit/utils/storage/virtdisk/test_vhdutils.py
|
mail2nsrajesh/os-win
|
b5ee321a097ddc96ea9c7a652a19d88215eab996
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from oslotest import base
import six
from os_win import constants
from os_win import exceptions
from os_win.utils.storage.virtdisk import vhdutils
from os_win.utils.winapi import constants as w_const
class VHDUtilsTestCase(base.BaseTestCase):
"""Unit tests for the Hyper-V VHDUtils class."""
def setUp(self):
super(VHDUtilsTestCase, self).setUp()
self._setup_lib_mocks()
self._fake_vst_struct = self._vdisk_struct.VIRTUAL_STORAGE_TYPE
self._vhdutils = vhdutils.VHDUtils()
self._vhdutils._win32_utils = mock.Mock()
self._mock_close = self._vhdutils._win32_utils.close_handle
self._mock_run = self._vhdutils._win32_utils.run_and_check_output
self._run_args = self._vhdutils._virtdisk_run_args
self.addCleanup(mock.patch.stopall)
def _setup_lib_mocks(self):
self._vdisk_struct = mock.Mock()
self._ctypes = mock.Mock()
# This is used in order to easily make assertions on the variables
# passed by reference.
self._ctypes.byref = lambda x: (x, "byref")
self._ctypes.c_wchar_p = lambda x: (x, "c_wchar_p")
self._ctypes.c_ulong = lambda x: (x, "c_ulong")
mock.patch.multiple(vhdutils,
ctypes=self._ctypes, kernel32=mock.DEFAULT,
wintypes=mock.DEFAULT, virtdisk=mock.DEFAULT,
vdisk_struct=self._vdisk_struct,
create=True).start()
def _test_run_and_check_output(self, raised_exc=None):
self._mock_run.side_effect = raised_exc(
func_name='fake_func_name',
error_code='fake_error_code',
error_message='fake_error_message') if raised_exc else None
if raised_exc:
self.assertRaises(
raised_exc,
self._vhdutils._run_and_check_output,
mock.sentinel.func,
mock.sentinel.arg,
cleanup_handle=mock.sentinel.handle)
else:
ret_val = self._vhdutils._run_and_check_output(
mock.sentinel.func,
mock.sentinel.arg,
cleanup_handle=mock.sentinel.handle)
self.assertEqual(self._mock_run.return_value, ret_val)
self._mock_run.assert_called_once_with(
mock.sentinel.func, mock.sentinel.arg, **self._run_args)
self._mock_close.assert_called_once_with(mock.sentinel.handle)
def test_run_and_check_output(self):
self._test_run_and_check_output()
def test_run_and_check_output_raising_error(self):
self._test_run_and_check_output(
raised_exc=exceptions.VHDWin32APIException)
@mock.patch.object(vhdutils.VHDUtils, '_get_vhd_device_id')
def test_open(self, mock_get_dev_id):
fake_vst = self._fake_vst_struct.return_value
mock_get_dev_id.return_value = mock.sentinel.device_id
handle = self._vhdutils._open(
vhd_path=mock.sentinel.vhd_path,
open_flag=mock.sentinel.open_flag,
open_access_mask=mock.sentinel.access_mask,
open_params=mock.sentinel.open_params)
self.assertEqual(vhdutils.wintypes.HANDLE.return_value, handle)
self._fake_vst_struct.assert_called_once_with(
DeviceId=mock.sentinel.device_id,
VendorId=w_const.VIRTUAL_STORAGE_TYPE_VENDOR_MICROSOFT)
self._mock_run.assert_called_once_with(
vhdutils.virtdisk.OpenVirtualDisk,
self._ctypes.byref(fake_vst),
self._ctypes.c_wchar_p(mock.sentinel.vhd_path),
mock.sentinel.access_mask,
mock.sentinel.open_flag,
mock.sentinel.open_params,
self._ctypes.byref(vhdutils.wintypes.HANDLE.return_value),
**self._run_args)
def test_close(self):
self._vhdutils._close(mock.sentinel.handle)
vhdutils.kernel32.CloseHandle.assert_called_once_with(
mock.sentinel.handle)
@mock.patch.object(vhdutils.VHDUtils, '_get_vhd_device_id')
def _test_create_vhd(self, mock_get_dev_id, new_vhd_type):
create_params_struct = (
self._vdisk_struct.CREATE_VIRTUAL_DISK_PARAMETERS)
mock_handle = vhdutils.wintypes.HANDLE.return_value
fake_vst = self._fake_vst_struct.return_value
fake_create_params = create_params_struct.return_value
expected_create_vhd_flag = (
vhdutils.CREATE_VIRTUAL_DISK_FLAGS.get(new_vhd_type, 0))
self._vhdutils.create_vhd(
new_vhd_path=mock.sentinel.new_vhd_path,
new_vhd_type=new_vhd_type,
src_path=mock.sentinel.src_path,
max_internal_size=mock.sentinel.max_internal_size,
parent_path=mock.sentinel.parent_path)
self._fake_vst_struct.assert_called_once_with(
DeviceId=mock_get_dev_id.return_value,
VendorId=w_const.VIRTUAL_STORAGE_TYPE_VENDOR_MICROSOFT)
self.assertEqual(w_const.CREATE_VIRTUAL_DISK_VERSION_2,
fake_create_params.Version)
self.assertEqual(mock.sentinel.max_internal_size,
fake_create_params.Version2.MaximumSize)
self.assertEqual(mock.sentinel.parent_path,
fake_create_params.Version2.ParentPath)
self.assertEqual(mock.sentinel.src_path,
fake_create_params.Version2.SourcePath)
self.assertEqual(
vhdutils.VIRTUAL_DISK_DEFAULT_PHYS_SECTOR_SIZE,
fake_create_params.Version2.PhysicalSectorSizeInBytes)
self.assertEqual(
w_const.CREATE_VHD_PARAMS_DEFAULT_BLOCK_SIZE,
fake_create_params.Version2.BlockSizeInBytes)
self.assertEqual(
vhdutils.VIRTUAL_DISK_DEFAULT_SECTOR_SIZE,
fake_create_params.Version2.SectorSizeInBytes)
self._mock_run.assert_called_once_with(
vhdutils.virtdisk.CreateVirtualDisk,
self._ctypes.byref(fake_vst),
self._ctypes.c_wchar_p(mock.sentinel.new_vhd_path),
0,
None,
expected_create_vhd_flag,
0,
self._ctypes.byref(fake_create_params),
None,
self._ctypes.byref(mock_handle),
**self._run_args)
self._mock_close.assert_called_once_with(mock_handle)
def test_create_dynamic_vhd(self):
self._test_create_vhd(new_vhd_type=constants.VHD_TYPE_DYNAMIC)
def test_create_fixed_vhd(self):
self._test_create_vhd(new_vhd_type=constants.VHD_TYPE_FIXED)
@mock.patch.object(vhdutils.VHDUtils, 'create_vhd')
def test_create_dynamic_vhd_helper(self, mock_create_vhd):
self._vhdutils.create_dynamic_vhd(mock.sentinel.path,
mock.sentinel.size)
mock_create_vhd.assert_called_once_with(
mock.sentinel.path,
constants.VHD_TYPE_DYNAMIC,
max_internal_size=mock.sentinel.size)
@mock.patch.object(vhdutils.VHDUtils, 'create_vhd')
def test_create_differencing_vhd_helper(self, mock_create_vhd):
self._vhdutils.create_differencing_vhd(mock.sentinel.path,
mock.sentinel.parent_path)
mock_create_vhd.assert_called_once_with(
mock.sentinel.path,
constants.VHD_TYPE_DIFFERENCING,
parent_path=mock.sentinel.parent_path)
@mock.patch.object(vhdutils.VHDUtils, 'create_vhd')
def test_convert_vhd(self, mock_create_vhd):
self._vhdutils.convert_vhd(mock.sentinel.src,
mock.sentinel.dest,
mock.sentinel.vhd_type)
mock_create_vhd.assert_called_once_with(
mock.sentinel.dest,
mock.sentinel.vhd_type,
src_path=mock.sentinel.src)
def test_get_vhd_format_found_by_ext(self):
fake_vhd_path = 'C:\\test.vhd'
ret_val = self._vhdutils.get_vhd_format(fake_vhd_path)
self.assertEqual(constants.DISK_FORMAT_VHD, ret_val)
@mock.patch.object(vhdutils.VHDUtils, '_get_vhd_format_by_signature')
@mock.patch('os.path.exists')
def _test_vhd_format_unrecognized_ext(self, mock_exists,
mock_get_vhd_fmt_by_sign,
signature_available=False):
mock_exists.return_value = True
fake_vhd_path = 'C:\\test_vhd'
mock_get_vhd_fmt_by_sign.return_value = (
constants.DISK_FORMAT_VHD if signature_available else None)
if signature_available:
ret_val = self._vhdutils.get_vhd_format(fake_vhd_path)
self.assertEqual(constants.DISK_FORMAT_VHD, ret_val)
else:
self.assertRaises(exceptions.VHDException,
self._vhdutils.get_vhd_format,
fake_vhd_path)
def test_get_vhd_format_unrecognised_ext_unavailable_signature(self):
self._test_vhd_format_unrecognized_ext()
def test_get_vhd_format_unrecognised_ext_available_signature(self):
self._test_vhd_format_unrecognized_ext(signature_available=True)
@mock.patch.object(vhdutils.VHDUtils, 'get_vhd_format')
def test_get_vhd_device_id(self, mock_get_vhd_fmt):
mock_get_vhd_fmt.return_value = constants.DISK_FORMAT_VHD
dev_id = self._vhdutils._get_vhd_device_id(mock.sentinel.vhd_path)
mock_get_vhd_fmt.assert_called_once_with(mock.sentinel.vhd_path)
self.assertEqual(w_const.VIRTUAL_STORAGE_TYPE_DEVICE_VHD,
dev_id)
def _mock_open(self, read_data=None, curr_f_pos=0):
mock_open = mock.mock_open()
mock.patch.object(vhdutils, 'open', mock_open,
create=True).start()
f = mock_open.return_value
f.read.side_effect = read_data
f.tell.return_value = curr_f_pos
return mock_open
def test_get_vhd_format_by_sig_vhdx(self):
read_data = (vhdutils.VHDX_SIGNATURE, )
self._mock_open(read_data=read_data)
fmt = self._vhdutils._get_vhd_format_by_signature(
mock.sentinel.vhd_path)
self.assertEqual(constants.DISK_FORMAT_VHDX, fmt)
def test_get_vhd_format_by_sig_vhd(self):
read_data = ('notthesig', vhdutils.VHD_SIGNATURE)
mock_open = self._mock_open(read_data=read_data, curr_f_pos=1024)
fmt = self._vhdutils._get_vhd_format_by_signature(
mock.sentinel.vhd_path)
self.assertEqual(constants.DISK_FORMAT_VHD, fmt)
mock_open.return_value.seek.assert_has_calls([mock.call(0, 2),
mock.call(-512, 2)])
def test_get_vhd_format_by_sig_invalid_format(self):
self._mock_open(read_data='notthesig', curr_f_pos=1024)
fmt = self._vhdutils._get_vhd_format_by_signature(
mock.sentinel.vhd_path)
self.assertIsNone(fmt)
def test_get_vhd_format_by_sig_zero_length_file(self):
mock_open = self._mock_open(read_data=('', ''))
fmt = self._vhdutils._get_vhd_format_by_signature(
mock.sentinel.vhd_path)
self.assertIsNone(fmt)
mock_open.return_value.seek.assert_called_once_with(0, 2)
@mock.patch.object(vhdutils.VHDUtils, '_open')
@mock.patch.object(vhdutils.VHDUtils, '_get_vhd_info_member')
def test_get_vhd_info(self, mock_get_vhd_info_member,
mock_open):
fake_info_member = w_const.GET_VIRTUAL_DISK_INFO_SIZE
fake_vhd_info = {'VirtualSize': mock.sentinel.virtual_size}
mock_open.return_value = mock.sentinel.handle
mock_get_vhd_info_member.return_value = fake_vhd_info
expected_open_flag = w_const.OPEN_VIRTUAL_DISK_FLAG_NO_PARENTS
expected_access_mask = (w_const.VIRTUAL_DISK_ACCESS_GET_INFO |
w_const.VIRTUAL_DISK_ACCESS_DETACH)
ret_val = self._vhdutils.get_vhd_info(mock.sentinel.vhd_path,
[fake_info_member])
self.assertEqual(fake_vhd_info, ret_val)
mock_open.assert_called_once_with(
mock.sentinel.vhd_path,
open_flag=expected_open_flag,
open_access_mask=expected_access_mask)
self._vhdutils._get_vhd_info_member.assert_called_once_with(
mock.sentinel.handle,
fake_info_member)
self._mock_close.assert_called_once_with(mock.sentinel.handle)
@mock.patch.object(vhdutils.VHDUtils, '_parse_vhd_info')
def test_get_vhd_info_member(self, mock_parse_vhd_info):
get_vd_info_struct = (
self._vdisk_struct.GET_VIRTUAL_DISK_INFO)
fake_params = get_vd_info_struct.return_value
fake_info_size = self._ctypes.sizeof.return_value
info_member = w_const.GET_VIRTUAL_DISK_INFO_PARENT_LOCATION
vhd_info = self._vhdutils._get_vhd_info_member(
mock.sentinel.vhd_path,
info_member)
self._mock_run.assert_called_once_with(
vhdutils.virtdisk.GetVirtualDiskInformation,
mock.sentinel.vhd_path,
self._ctypes.byref(
self._ctypes.c_ulong(fake_info_size)),
self._ctypes.byref(fake_params), None,
ignored_error_codes=[w_const.ERROR_VHD_INVALID_TYPE],
**self._run_args)
self.assertEqual(mock_parse_vhd_info.return_value, vhd_info)
mock_parse_vhd_info.assert_called_once_with(fake_params,
info_member)
def test_parse_vhd_info(self):
fake_info_member = w_const.GET_VIRTUAL_DISK_INFO_SIZE
fake_info = mock.Mock()
fake_info.Size._fields_ = [
("VirtualSize", vhdutils.wintypes.ULARGE_INTEGER),
("PhysicalSize", vhdutils.wintypes.ULARGE_INTEGER)]
fake_info.Size.VirtualSize = mock.sentinel.virt_size
fake_info.Size.PhysicalSize = mock.sentinel.phys_size
ret_val = self._vhdutils._parse_vhd_info(fake_info,
fake_info_member)
expected = {'VirtualSize': mock.sentinel.virt_size,
'PhysicalSize': mock.sentinel.phys_size}
self.assertEqual(expected, ret_val)
def test_parse_vhd_provider_subtype_member(self):
fake_info_member = w_const.GET_VIRTUAL_DISK_INFO_PROVIDER_SUBTYPE
fake_info = mock.Mock()
fake_info.ProviderSubtype = mock.sentinel.provider_subtype
ret_val = self._vhdutils._parse_vhd_info(fake_info, fake_info_member)
expected = {'ProviderSubtype': mock.sentinel.provider_subtype}
self.assertEqual(expected, ret_val)
@mock.patch.object(vhdutils.VHDUtils, 'get_vhd_info')
def test_get_vhd_size(self, mock_get_vhd_info):
ret_val = self._vhdutils.get_vhd_size(mock.sentinel.vhd_path)
self.assertEqual(mock_get_vhd_info.return_value, ret_val)
mock_get_vhd_info.assert_called_once_with(
mock.sentinel.vhd_path,
[w_const.GET_VIRTUAL_DISK_INFO_SIZE])
@mock.patch.object(vhdutils.VHDUtils, 'get_vhd_info')
def test_get_vhd_parent_path(self, mock_get_vhd_info):
mock_get_vhd_info.return_value = {
'ParentPath': mock.sentinel.parent_path}
ret_val = self._vhdutils.get_vhd_parent_path(mock.sentinel.vhd_path)
self.assertEqual(mock.sentinel.parent_path, ret_val)
mock_get_vhd_info.assert_called_once_with(
mock.sentinel.vhd_path,
[w_const.GET_VIRTUAL_DISK_INFO_PARENT_LOCATION])
@mock.patch.object(vhdutils.VHDUtils, 'get_vhd_info')
def test_get_vhd_type(self, mock_get_vhd_info):
mock_get_vhd_info.return_value = {
'ProviderSubtype': mock.sentinel.provider_subtype}
ret_val = self._vhdutils.get_vhd_type(mock.sentinel.vhd_path)
self.assertEqual(mock.sentinel.provider_subtype, ret_val)
mock_get_vhd_info.assert_called_once_with(
mock.sentinel.vhd_path,
[w_const.GET_VIRTUAL_DISK_INFO_PROVIDER_SUBTYPE])
@mock.patch.object(vhdutils.VHDUtils, '_open')
@mock.patch('os.remove')
def test_merge_vhd(self, mock_remove, mock_open):
open_params_struct = (
self._vdisk_struct.OPEN_VIRTUAL_DISK_PARAMETERS)
merge_params_struct = (
self._vdisk_struct.MERGE_VIRTUAL_DISK_PARAMETERS)
fake_open_params = open_params_struct.return_value
fake_merge_params = merge_params_struct.return_value
mock_open.return_value = mock.sentinel.handle
self._vhdutils.merge_vhd(mock.sentinel.vhd_path)
self.assertEqual(w_const.OPEN_VIRTUAL_DISK_VERSION_1,
fake_open_params.Version)
self.assertEqual(2,
fake_open_params.Version1.RWDepth)
mock_open.assert_called_once_with(
mock.sentinel.vhd_path,
open_params=self._ctypes.byref(fake_open_params))
self.assertEqual(w_const.MERGE_VIRTUAL_DISK_VERSION_1,
fake_merge_params.Version)
self.assertEqual(1,
fake_merge_params.Version1.MergeDepth)
self._mock_run.assert_called_once_with(
vhdutils.virtdisk.MergeVirtualDisk,
mock.sentinel.handle,
0,
self._ctypes.byref(fake_merge_params),
None,
**self._run_args)
mock_remove.assert_called_once_with(
mock.sentinel.vhd_path)
self._mock_close.assert_called_once_with(mock.sentinel.handle)
@mock.patch.object(vhdutils.VHDUtils, '_open')
def test_reconnect_parent_vhd(self, mock_open):
set_vdisk_info_struct = (
self._vdisk_struct.SET_VIRTUAL_DISK_INFO)
open_params_struct = (
self._vdisk_struct.OPEN_VIRTUAL_DISK_PARAMETERS)
fake_set_params = set_vdisk_info_struct.return_value
fake_open_params = open_params_struct.return_value
mock_open.return_value = mock.sentinel.handle
self._vhdutils.reconnect_parent_vhd(mock.sentinel.vhd_path,
mock.sentinel.parent_path)
self.assertEqual(w_const.OPEN_VIRTUAL_DISK_VERSION_2,
fake_open_params.Version)
self.assertFalse(fake_open_params.Version2.GetInfoOnly)
self._vhdutils._open.assert_called_once_with(
mock.sentinel.vhd_path,
open_flag=w_const.OPEN_VIRTUAL_DISK_FLAG_NO_PARENTS,
open_access_mask=0,
open_params=vhdutils.ctypes.byref(fake_open_params))
self.assertEqual(w_const.SET_VIRTUAL_DISK_INFO_PARENT_PATH,
fake_set_params.Version)
self.assertEqual(mock.sentinel.parent_path,
fake_set_params.ParentFilePath)
self._mock_run.assert_called_once_with(
vhdutils.virtdisk.SetVirtualDiskInformation,
mock.sentinel.handle,
vhdutils.ctypes.byref(fake_set_params),
**self._run_args)
self._mock_close.assert_called_once_with(mock.sentinel.handle)
@mock.patch.object(vhdutils.VHDUtils, 'get_internal_vhd_size_by_file_size')
@mock.patch.object(vhdutils.VHDUtils, '_resize_vhd')
@mock.patch.object(vhdutils.VHDUtils, '_check_resize_needed')
def _test_resize_vhd(self, mock_check_resize_needed,
mock_resize_helper, mock_get_internal_size,
is_file_max_size=True, resize_needed=True):
mock_check_resize_needed.return_value = resize_needed
self._vhdutils.resize_vhd(mock.sentinel.vhd_path,
mock.sentinel.new_size,
is_file_max_size,
validate_new_size=True)
if is_file_max_size:
mock_get_internal_size.assert_called_once_with(
mock.sentinel.vhd_path, mock.sentinel.new_size)
expected_new_size = mock_get_internal_size.return_value
else:
expected_new_size = mock.sentinel.new_size
mock_check_resize_needed.assert_called_once_with(
mock.sentinel.vhd_path, expected_new_size)
if resize_needed:
mock_resize_helper.assert_called_once_with(mock.sentinel.vhd_path,
expected_new_size)
else:
self.assertFalse(mock_resize_helper.called)
def test_resize_vhd_specifying_internal_size(self):
self._test_resize_vhd(is_file_max_size=False)
def test_resize_vhd_specifying_file_max_size(self):
self._test_resize_vhd()
def test_resize_vhd_already_having_requested_size(self):
self._test_resize_vhd(resize_needed=False)
@mock.patch.object(vhdutils.VHDUtils, 'get_vhd_size')
def _test_check_resize_needed(self, mock_get_vhd_size,
current_size=1, new_size=2):
mock_get_vhd_size.return_value = dict(VirtualSize=current_size)
if current_size > new_size:
self.assertRaises(exceptions.VHDException,
self._vhdutils._check_resize_needed,
mock.sentinel.vhd_path,
new_size)
else:
resize_needed = self._vhdutils._check_resize_needed(
mock.sentinel.vhd_path, new_size)
self.assertEqual(current_size < new_size, resize_needed)
def test_check_resize_needed_smaller_new_size(self):
self._test_check_resize_needed(current_size=2, new_size=1)
def test_check_resize_needed_bigger_new_size(self):
self._test_check_resize_needed()
def test_check_resize_needed_smaller_equal_size(self):
self._test_check_resize_needed(current_size=1, new_size=1)
@mock.patch.object(vhdutils.VHDUtils, '_open')
def test_resize_vhd_helper(self, mock_open):
resize_vdisk_struct = (
self._vdisk_struct.RESIZE_VIRTUAL_DISK_PARAMETERS)
fake_params = resize_vdisk_struct.return_value
mock_open.return_value = mock.sentinel.handle
self._vhdutils._resize_vhd(mock.sentinel.vhd_path,
mock.sentinel.new_size)
self.assertEqual(w_const.RESIZE_VIRTUAL_DISK_VERSION_1,
fake_params.Version)
self.assertEqual(mock.sentinel.new_size,
fake_params.Version1.NewSize)
self._mock_run.assert_called_once_with(
vhdutils.virtdisk.ResizeVirtualDisk,
mock.sentinel.handle,
0,
vhdutils.ctypes.byref(fake_params),
None,
**self._run_args)
self._mock_close.assert_called_once_with(mock.sentinel.handle)
@mock.patch.object(vhdutils.VHDUtils, 'get_vhd_info')
@mock.patch.object(vhdutils.VHDUtils,
'_get_internal_vhd_size_by_file_size')
@mock.patch.object(vhdutils.VHDUtils,
'_get_internal_vhdx_size_by_file_size')
def _test_get_int_sz_by_file_size(
self, mock_get_vhdx_int_size,
mock_get_vhd_int_size, mock_get_vhd_info,
vhd_dev_id=w_const.VIRTUAL_STORAGE_TYPE_DEVICE_VHD,
vhd_type=constants.VHD_TYPE_DYNAMIC):
fake_vhd_info = dict(ProviderSubtype=vhd_type,
ParentPath=mock.sentinel.parent_path,
DeviceId=vhd_dev_id)
mock_get_vhd_info.side_effect = [fake_vhd_info]
exppected_vhd_info_calls = [mock.call(mock.sentinel.vhd_path)]
expected_vhd_checked = mock.sentinel.vhd_path
expected_checked_vhd_info = fake_vhd_info
if vhd_type == constants.VHD_TYPE_DIFFERENCING:
expected_checked_vhd_info = dict(
fake_vhd_info, vhd_type=constants.VHD_TYPE_DYNAMIC)
mock_get_vhd_info.side_effect.append(
expected_checked_vhd_info)
exppected_vhd_info_calls.append(
mock.call(mock.sentinel.parent_path))
expected_vhd_checked = mock.sentinel.parent_path
is_vhd = vhd_dev_id == w_const.VIRTUAL_STORAGE_TYPE_DEVICE_VHD
expected_helper = (mock_get_vhd_int_size
if is_vhd
else mock_get_vhdx_int_size)
ret_val = self._vhdutils.get_internal_vhd_size_by_file_size(
mock.sentinel.vhd_path, mock.sentinel.vhd_size)
mock_get_vhd_info.assert_has_calls(exppected_vhd_info_calls)
expected_helper.assert_called_once_with(expected_vhd_checked,
mock.sentinel.vhd_size,
expected_checked_vhd_info)
self.assertEqual(expected_helper.return_value, ret_val)
def test_get_int_sz_by_file_size_vhd(self):
self._test_get_int_sz_by_file_size()
def test_get_int_sz_by_file_size_vhdx(self):
self._test_get_int_sz_by_file_size(
vhd_dev_id=w_const.VIRTUAL_STORAGE_TYPE_DEVICE_VHDX)
def test_get_int_sz_by_file_size_differencing(self):
self._test_get_int_sz_by_file_size(
vhd_dev_id=w_const.VIRTUAL_STORAGE_TYPE_DEVICE_VHDX)
def _mocked_get_internal_vhd_size(self, root_vhd_size, vhd_type):
fake_vhd_info = dict(ProviderSubtype=vhd_type,
BlockSize=2097152,
ParentPath=mock.sentinel.parent_path)
return self._vhdutils._get_internal_vhd_size_by_file_size(
mock.sentinel.vhd_path, root_vhd_size, fake_vhd_info)
def test_get_internal_vhd_size_by_file_size_fixed(self):
root_vhd_size = 1 << 30
real_size = self._mocked_get_internal_vhd_size(
root_vhd_size=root_vhd_size,
vhd_type=constants.VHD_TYPE_FIXED)
expected_vhd_size = root_vhd_size - 512
self.assertEqual(expected_vhd_size, real_size)
def test_get_internal_vhd_size_by_file_size_dynamic(self):
root_vhd_size = 20 << 30
real_size = self._mocked_get_internal_vhd_size(
root_vhd_size=root_vhd_size,
vhd_type=constants.VHD_TYPE_DYNAMIC)
expected_md_size = 43008
expected_vhd_size = root_vhd_size - expected_md_size
self.assertEqual(expected_vhd_size, real_size)
@mock.patch.object(vhdutils.VHDUtils, '_get_vhdx_block_size')
@mock.patch.object(vhdutils.VHDUtils, '_get_vhdx_log_size')
@mock.patch.object(vhdutils.VHDUtils, '_get_vhdx_metadata_size_and_offset')
def test_get_vhdx_internal_size(self, mock_get_vhdx_md_sz_and_off,
mock_get_vhdx_log_sz,
mock_get_vhdx_block_size):
self._mock_open()
fake_log_sz = 1 << 20
fake_block_sz = 32 << 20
fake_md_sz = 1 << 20
fake_logical_sector_sz = 4096
new_vhd_sz = 1 << 30
# We expect less than a block to be reserved for internal metadata.
expected_max_int_sz = new_vhd_sz - fake_block_sz
fake_vhd_info = dict(SectorSize=fake_logical_sector_sz)
mock_get_vhdx_block_size.return_value = fake_block_sz
mock_get_vhdx_log_sz.return_value = fake_log_sz
mock_get_vhdx_md_sz_and_off.return_value = fake_md_sz, None
internal_size = self._vhdutils._get_internal_vhdx_size_by_file_size(
mock.sentinel.vhd_path, new_vhd_sz, fake_vhd_info)
self.assertIn(type(internal_size), six.integer_types)
self.assertEqual(expected_max_int_sz, internal_size)
def test_get_vhdx_internal_size_exception(self):
mock_open = self._mock_open()
mock_open.side_effect = IOError
func = self._vhdutils._get_internal_vhdx_size_by_file_size
self.assertRaises(exceptions.VHDException,
func,
mock.sentinel.vhd_path,
mock.sentinel.vhd_size,
mock.sentinel.vhd_info)
def _get_mock_file_handle(self, *args):
mock_file_handle = mock.Mock()
mock_file_handle.read.side_effect = args
return mock_file_handle
def test_get_vhdx_current_header(self):
# The current header has the maximum sequence number.
fake_seq_numbers = [
bytearray(b'\x01\x00\x00\x00\x00\x00\x00\x00'),
bytearray(b'\x02\x00\x00\x00\x00\x00\x00\x00')]
mock_handle = self._get_mock_file_handle(*fake_seq_numbers)
offset = self._vhdutils._get_vhdx_current_header_offset(mock_handle)
self.assertEqual(vhdutils.VHDX_HEADER_OFFSETS[1], offset)
@mock.patch.object(vhdutils.VHDUtils, '_get_vhdx_current_header_offset')
def test_get_log_size(self, mock_get_vhdx_curr_hd_offset):
fake_curr_header_offset = vhdutils.VHDX_HEADER_OFFSETS[0]
fake_log_sz = bytearray(b'\x01\x00\x00\x00')
mock_get_vhdx_curr_hd_offset.return_value = fake_curr_header_offset
mock_handle = self._get_mock_file_handle(fake_log_sz)
log_size = self._vhdutils._get_vhdx_log_size(mock_handle)
self.assertEqual(log_size, 1)
def test_get_vhdx_metadata_size(self):
fake_md_offset = bytearray(b'\x01\x00\x00\x00\x00\x00\x00\x00')
fake_md_sz = bytearray(b'\x01\x00\x00\x00')
mock_handle = self._get_mock_file_handle(fake_md_offset,
fake_md_sz)
md_sz, md_offset = self._vhdutils._get_vhdx_metadata_size_and_offset(
mock_handle)
self.assertEqual(1, md_sz)
self.assertEqual(1, md_offset)
@mock.patch.object(vhdutils.VHDUtils,
'_get_vhdx_metadata_size_and_offset')
def test_get_block_size(self, mock_get_md_sz_and_offset):
mock_get_md_sz_and_offset.return_value = (mock.sentinel.md_sz, 1024)
fake_block_size = bytearray(b'\x01\x00\x00\x00')
fake_offset = bytearray(b'\x02\x00\x00\x00')
mock_handle = self._get_mock_file_handle(fake_offset,
fake_block_size)
block_size = self._vhdutils._get_vhdx_block_size(mock_handle)
self.assertEqual(block_size, 1)
@mock.patch.object(vhdutils.VHDUtils, 'convert_vhd')
@mock.patch.object(os, 'unlink')
@mock.patch.object(os, 'rename')
def test_flatten_vhd(self, mock_rename, mock_unlink, mock_convert):
fake_vhd_path = r'C:\test.vhd'
expected_tmp_path = r'C:\test.tmp.vhd'
self._vhdutils.flatten_vhd(fake_vhd_path)
mock_convert.assert_called_once_with(fake_vhd_path, expected_tmp_path)
mock_unlink.assert_called_once_with(fake_vhd_path)
mock_rename.assert_called_once_with(expected_tmp_path, fake_vhd_path)
def test_get_best_supported_vhd_format(self):
fmt = self._vhdutils.get_best_supported_vhd_format()
self.assertEqual(constants.DISK_FORMAT_VHDX, fmt)
| 41.546527 | 79 | 0.672145 |
a9a2f166618e90394f1b3acea8b1edac02eebb9d
| 1,710 |
py
|
Python
|
minn/contrib/functions/activation.py
|
chantera/minn
|
ceb838a5ce6da76eefad270c37137b9c3e6e3240
|
[
"MIT"
] | 1 |
2019-05-27T13:46:06.000Z
|
2019-05-27T13:46:06.000Z
|
minn/contrib/functions/activation.py
|
chantera/minn
|
ceb838a5ce6da76eefad270c37137b9c3e6e3240
|
[
"MIT"
] | null | null | null |
minn/contrib/functions/activation.py
|
chantera/minn
|
ceb838a5ce6da76eefad270c37137b9c3e6e3240
|
[
"MIT"
] | null | null | null |
from minn.contrib.devices import get_device_from_array
from minn.core import FunctionNode
class Sigmoid(FunctionNode):
def forward(self, x):
x, = x
xp = get_device_from_array(x).xp
half = x.dtype.type(0.5)
y = xp.tanh(x * half) * half + half
return y,
def backward(self, gy, x, y):
gy, = gy
y, = y
return gy * y * (1. - y),
def sigmoid(x):
return x._g().apply(Sigmoid(), (x,))[0]
class Softmax(FunctionNode):
def __init__(self, axis=1):
self.axis = axis
def forward(self, x):
x, = x
xp = get_device_from_array(x).xp
y = x - x.max(axis=self.axis, keepdims=True)
xp.exp(y, out=y)
y /= y.sum(axis=self.axis, keepdims=True)
return y,
def backward(self, gy, x, y):
gx = y[0] * gy[0]
sumdx = gx.sum(axis=self.axis, keepdims=True)
gx -= y[0] * sumdx
return gx,
def softmax(x, axis=1):
return x._g().apply(Softmax(axis), (x,))[0]
def _logsumexp(x, axis):
xp = get_device_from_array(x).xp
m = x.max(axis=axis, keepdims=True)
y = x - m
xp.exp(y, out=y)
s = y.sum(axis=axis, keepdims=True)
xp.log(s, out=s)
m += s
return m
class LogSoftmax(FunctionNode):
def __init__(self, axis=1):
self.axis = axis
def forward(self, x):
x, = x
log_z = _logsumexp(x, self.axis)
y = x - log_z
return y,
def backward(self, gy, x, y):
xp = get_device_from_array(y[0]).xp
gx = gy[0] - xp.exp(y[0]) * gy[0].sum(axis=self.axis, keepdims=True)
return gx,
def log_softmax(x, axis=1):
return x._g().apply(LogSoftmax(axis), (x,))[0]
| 21.923077 | 76 | 0.549708 |
b535ef992ef54ec5b26dc055d0bc18ae45093f96
| 378 |
py
|
Python
|
utils/callbacks/error_processing_callbacks.py
|
harshitandro/Python-Instrumentator
|
f5185a814c6b7ecda5f2571846397bec2fa1b2f1
|
[
"Apache-2.0"
] | 1 |
2020-03-11T20:56:41.000Z
|
2020-03-11T20:56:41.000Z
|
utils/callbacks/error_processing_callbacks.py
|
harshitandro/Python-Instrumentator
|
f5185a814c6b7ecda5f2571846397bec2fa1b2f1
|
[
"Apache-2.0"
] | null | null | null |
utils/callbacks/error_processing_callbacks.py
|
harshitandro/Python-Instrumentator
|
f5185a814c6b7ecda5f2571846397bec2fa1b2f1
|
[
"Apache-2.0"
] | 1 |
2020-03-05T07:42:21.000Z
|
2020-03-05T07:42:21.000Z
|
import threading
def django_err_processing_callback(source, threadID, type, value, traceback):
pass
def django_static_err_processing_callback(source, threadID, type, value, traceback):
pass
def flask_err_processing_callback(source, threadID, type, value, traceback):
pass
def empty_err_processing_callback(source, threadID, type, value, traceback):
pass
| 21 | 84 | 0.783069 |
a7d44b466712b7ad775216340074571ed9448d5f
| 1,504 |
py
|
Python
|
conftest.py
|
karolinepauls/pytest-kafka
|
9a91408f8de0f841b3da2e077fc50eae47282771
|
[
"MIT"
] | 1 |
2019-10-25T07:12:37.000Z
|
2019-10-25T07:12:37.000Z
|
conftest.py
|
karolinepauls/pytest-kafka
|
9a91408f8de0f841b3da2e077fc50eae47282771
|
[
"MIT"
] | null | null | null |
conftest.py
|
karolinepauls/pytest-kafka
|
9a91408f8de0f841b3da2e077fc50eae47282771
|
[
"MIT"
] | null | null | null |
"""Test setup."""
from typing import Optional
import pytest # type: ignore
from test_pytest_kafka import test_custom_kill
EXPECTED_TEARDOWN_ON_KILL = 0.2
test_custom_kill_duration = None # type: Optional[float]
def _test_custom_kill_slow_teardown() -> bool:
return (test_custom_kill_duration is not None
and test_custom_kill_duration > EXPECTED_TEARDOWN_ON_KILL)
def pytest_runtest_makereport(item, call):
"""Record teardown time of `test_custom_kill`."""
if item.function == test_custom_kill and call.when == 'teardown':
global test_custom_kill_duration
test_custom_kill_duration = call.stop - call.start
@pytest.hookimpl(trylast=True)
def pytest_terminal_summary(terminalreporter, exitstatus, config):
"""Report `test_custom_kill` teardown time."""
if _test_custom_kill_slow_teardown():
terminalreporter.write_sep(
'=',
"`test_custom_kill` didn't tear down in time",
bold=True, red=True
)
assert test_custom_kill_duration is not None
terminalreporter.write_line(
'`test_custom_kill` is expected to tear down in under {} but it took {:.2f} sec'.format(
EXPECTED_TEARDOWN_ON_KILL, test_custom_kill_duration),
red=True, bold=True
)
def pytest_sessionfinish(session, exitstatus):
"""Exit with an error if `test_custom_kill` didn't tear down in time."""
if _test_custom_kill_slow_teardown():
session.exitstatus = 1
| 34.181818 | 100 | 0.703457 |
b2ecb9c33f82ac35e4f3349a8f3bce4bd22e4426
| 3,268 |
py
|
Python
|
src/main.py
|
alwayslivid/ShellBot
|
381d7c663ebc365af6069d74b127c52beb7ffbf6
|
[
"MIT"
] | 1 |
2019-01-04T14:37:07.000Z
|
2019-01-04T14:37:07.000Z
|
src/main.py
|
AlwaysLivid/Discord-ShellBot
|
381d7c663ebc365af6069d74b127c52beb7ffbf6
|
[
"MIT"
] | 1 |
2019-03-03T14:25:04.000Z
|
2019-03-03T14:25:04.000Z
|
src/main.py
|
AlwaysLivid/Discord-ShellBot
|
381d7c663ebc365af6069d74b127c52beb7ffbf6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@author: AlwaysLivid
@description: Perform administrative tasks remotely via Discord, without the need of port forwarding and other complicated networking stuff.
'''
print("""
.__ .__ .__ ___. __
_____| |__ ____ | | | |\\_ |__ _____/ |_
/ ___/ | \\_/ __ \\| | | | | __ \\ / _ \\ __\\
\\___ \\| Y \\ ___/| |_| |_| \\_\\ ( <_> ) |
/____ >___| /\\___ >____/____/___ /\\____/|__|
\\/ \\/ \\/ \\/
Copyright (C) 2019 AlwaysLivid
=============================================================
======================= DISCLAIMER ==========================
=============================================================
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions; read the LICENSE file for details.
This bot was made for personal use and only the bot owner is
permitted to perform any sort of operation.
Never trust untrusted input.
The official documentation does not recommend the use of the
subprocess module without any sort of input sanitization.
https://docs.python.org/3/library/subprocess.html
This bot currently does not support interactive programs that
require additional user input.
=============================================================
""")
import discord
from discord.ext import commands
import subprocess
import logging, os, random, sys
import config
logging.basicConfig(
level=logging.INFO,
format='%(name)s - %(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler("{0}/{1}.txt".format(os.path.dirname(os.path.realpath(__file__)), "log")),
logging.StreamHandler()
]
)
try:
from private import *
logging.warning("Using private.py file!")
except ImportError:
logging.info("The file private.py was not found.")
logging.warning("Using environment variable instead.")
client_secret = os.environ['CLIENT_SECRET']
bot = commands.Bot(command_prefix=config.prefix, description=config.description)
async def CogLoader():
for file in config.extensions:
try:
bot.load_extension(file)
except Exception as e:
logging.critical("Failed to load {}! ({})".format(file, e))
@bot.event
async def on_ready():
logging.info("Name: {}".format(bot.user.name))
logging.info("Name: {}".format(bot.user.discriminator))
logging.info("ID: {}".format(bot.user.id))
await CogLoader()
activity = random.choice(config.statuses)
await bot.change_presence(activity=discord.Game(name=activity))
if not (os.getenv('CI') == None or os.getenv('CI') == False) or not (os.getenv('CONTINUOUS_INTEGRATION') == None or os.getenv('CONTINUOUS_INTEGRATION') == False):
logging.critical("CI detected!")
logging.critical("Everything seems to be fine. Exiting...")
exit()
logging.info("Bot ready!")
def main():
logging.basicConfig(level=logging.INFO)
logging.info("Logging in to Discord!")
try:
bot.run(client_secret, reconnect = True)
except discord.DiscordException as e:
logging.critical(e)
if __name__ == "__main__":
main()
| 33.690722 | 166 | 0.609241 |
7bc896eb97ebfa8768a40e3b4f3c4cbc5da4d274
| 3,297 |
py
|
Python
|
tests/unit/test_preprocess.py
|
rsadaphule/nlp
|
a1241df90620e6e17e9f322621313ead4aa890f1
|
[
"MIT"
] | 4,407 |
2019-10-29T21:35:19.000Z
|
2022-03-31T13:56:37.000Z
|
tests/unit/test_preprocess.py
|
shubham9g17/nlp-recipes
|
a5cd2303187239799ae0b1597a7c16eb99a97108
|
[
"MIT"
] | 134 |
2019-10-30T23:38:59.000Z
|
2022-03-01T11:42:53.000Z
|
tests/unit/test_preprocess.py
|
shubham9g17/nlp-recipes
|
a5cd2303187239799ae0b1597a7c16eb99a97108
|
[
"MIT"
] | 726 |
2019-10-31T15:21:52.000Z
|
2022-03-31T10:18:22.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import pytest
import pandas as pd
import numpy as np
import utils_nlp.dataset.preprocess as preprocess
@pytest.fixture(scope="module")
def df_sentences():
sentences = np.array(
[
"The man is playing the piano.",
"Some men are fighting.",
"A man is spreading shreded cheese on a pizza.",
"A man is playing the cello.",
"A man is spreading shreded cheese on a pizza.",
"A man is playing a large flute.",
"A man is playing the cello.",
"A man is playing on a guitar and singing.",
"The man is playing the piano.",
"Some men are fighting.",
]
).reshape(2, 5)
return pd.DataFrame(sentences, columns=["s1", "s2", "s3", "s4", "s5"])
def test_to_lowercase_all(df_sentences):
ldf = preprocess.to_lowercase_all(df_sentences)
assert sum(map(lambda x: x.islower(), ldf.values.flatten())) == len(
ldf.values.flatten()
)
def test_to_lowercase_subset(df_sentences):
ldf = preprocess.to_lowercase(df_sentences, column_names=["s4"])
assert sum(map(lambda x: x.islower(), ldf.s4.values.flatten())) == len(
ldf.s4.values.flatten()
)
def test_to_spacy_tokens(df_sentences):
sentence_cols = ["s1", "s2"]
token_cols = ["t1", "t2"]
token_df = preprocess.to_spacy_tokens(
df_sentences, sentence_cols=sentence_cols, token_cols=token_cols
)
assert token_df.shape[1] == df_sentences.shape[1] + len(
token_cols
) and sum(
list(
map(lambda x: (token_df[x].apply(type) == list).all(), token_cols)
)
) == len(
token_cols
)
def test_rm_spacy_stopwords(df_sentences):
sentence_cols = ["s1", "s2"]
stop_cols = ["stop1", "stop2"]
stop_df = preprocess.rm_spacy_stopwords(
df_sentences, sentence_cols=sentence_cols, stop_cols=stop_cols
)
assert stop_df.shape[1] == df_sentences.shape[1] + len(stop_cols) and sum(
list(map(lambda x: (stop_df[x].apply(type) == list).all(), stop_cols))
) == len(stop_cols)
def test_to_nltk_tokens(df_sentences):
sentence_cols = ["s1", "s2"]
token_cols = ["t1", "t2"]
token_df = preprocess.to_nltk_tokens(
df_sentences, sentence_cols=sentence_cols, token_cols=token_cols
)
assert token_df.shape[1] == df_sentences.shape[1] + len(
token_cols
) and sum(
list(
map(lambda x: (token_df[x].apply(type) == list).all(), token_cols)
)
) == len(
token_cols
)
def test_rm_nltk_stopwords(df_sentences):
sentence_cols = ["s1", "s2"]
stop_cols = ["stop1", "stop2"]
stop_df = preprocess.rm_nltk_stopwords(
df_sentences, sentence_cols=sentence_cols, stop_cols=stop_cols
)
assert stop_df.shape[1] == df_sentences.shape[1] + len(stop_cols) and sum(
list(map(lambda x: (stop_df[x].apply(type) == list).all(), stop_cols))
) == len(stop_cols)
def test_convert_to_unicode():
test_str = "test"
test_byte = test_str.encode("utf-8")
assert isinstance(preprocess.convert_to_unicode(test_str), str)
assert isinstance(preprocess.convert_to_unicode(test_byte), str)
| 30.813084 | 78 | 0.635729 |
c08f1ae060142be0e1d618a71a902a0ba47ddec5
| 7,563 |
py
|
Python
|
mysite/settings.py
|
mush42/mushy-mezzanine-on-openshift
|
4ceada506fc85920df66c0e13cb2b8815ade4fb9
|
[
"MIT"
] | null | null | null |
mysite/settings.py
|
mush42/mushy-mezzanine-on-openshift
|
4ceada506fc85920df66c0e13cb2b8815ade4fb9
|
[
"MIT"
] | null | null | null |
mysite/settings.py
|
mush42/mushy-mezzanine-on-openshift
|
4ceada506fc85920df66c0e13cb2b8815ade4fb9
|
[
"MIT"
] | null | null | null |
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import socket
#########
# PATHS #
#########
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Full filesystem path to the project.
PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_APP = os.path.basename(PROJECT_APP_PATH)
PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)
# openshift is our PAAS for now.
ON_PAAS = 'OPENSHIFT_REPO_DIR' in os.environ
if ON_PAAS:
SECRET_KEY = os.environ['OPENSHIFT_SECRET_TOKEN']
else:
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')_7av^!cy(wfx=k#3*7x+(=j^fzv+ot^1@sh9s9t=8$bu@r(z$'
# SECURITY WARNING: don't run with debug turned on in production!
# adjust to turn off when on Openshift, but allow an environment variable to override on PAAS
DEBUG = not ON_PAAS
DEBUG = DEBUG or 'DEBUG' in os.environ
if ON_PAAS and DEBUG:
print("*** Warning - Debug mode is on ***")
TEMPLATE_DEBUG = True
if ON_PAAS:
ALLOWED_HOSTS = [os.environ['OPENSHIFT_APP_DNS'], socket.gethostname()]
else:
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.galleries",
"mezzanine.twitter",
# "mezzanine.accounts",
# "mezzanine.mobile",
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware',
# Uncomment if using internationalisation or localisation
# 'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.TemplateForDeviceMiddleware",
"mezzanine.core.middleware.TemplateForHostMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
# Uncomment the following if using any of the SSL settings:
# "mezzanine.core.middleware.SSLRedirectMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
)
ROOT_URLCONF = 'mysite.urls'
# Templates
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
TEMPLATE_LOADERS = (
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
if ON_PAAS:
# determine if we are on MySQL or POSTGRESQL
if "OPENSHIFT_POSTGRESQL_DB_USERNAME" in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['OPENSHIFT_APP_NAME'],
'USER': os.environ['OPENSHIFT_POSTGRESQL_DB_USERNAME'],
'PASSWORD': os.environ['OPENSHIFT_POSTGRESQL_DB_PASSWORD'],
'HOST': os.environ['OPENSHIFT_POSTGRESQL_DB_HOST'],
'PORT': os.environ['OPENSHIFT_POSTGRESQL_DB_PORT'],
}
}
elif "OPENSHIFT_MYSQL_DB_USERNAME" in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ['OPENSHIFT_APP_NAME'],
'USER': os.environ['OPENSHIFT_MYSQL_DB_USERNAME'],
'PASSWORD': os.environ['OPENSHIFT_MYSQL_DB_PASSWORD'],
'HOST': os.environ['OPENSHIFT_MYSQL_DB_HOST'],
'PORT': os.environ['OPENSHIFT_MYSQL_DB_PORT'],
}
}
else:
# stock django, local development.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'wsgi', 'static')
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(BASE_DIR, "static"),
)
# Start mezzanine settings.
TIME_ZONE = 'UTC'
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# Supported languages
LANGUAGES = (
('en', 'English'),
)
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
USE_I18N = False
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP
MEDIA_URL = STATIC_URL + "media/"
MEDIA_ROOT = os.path.join(BASE_DIR, 'wsgi', *MEDIA_URL.strip("/").split("/"))
ROOT_URLCONF = "%s.urls" % PROJECT_APP
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.static",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.tz",
"mezzanine.conf.context_processors.settings",
"mezzanine.pages.context_processors.page",
)
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
# Instead of doing "from .local_settings import *", we use exec so that
# local_settings has full access to everything defined in this module.
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
exec(open(f, "rb").read())
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
| 32.04661 | 93 | 0.709903 |
62b1a9876a61ba0af2219f7e8683df46e856b340
| 606 |
py
|
Python
|
shared-data/python/opentrons_shared_data/__init__.py
|
knownmed/opentrons
|
d02eb3c6cbf9f1c8c05c5e9e1dac30a92a8c5e6c
|
[
"Apache-2.0"
] | 235 |
2017-10-27T20:37:27.000Z
|
2022-03-30T14:09:49.000Z
|
shared-data/python/opentrons_shared_data/__init__.py
|
knownmed/opentrons
|
d02eb3c6cbf9f1c8c05c5e9e1dac30a92a8c5e6c
|
[
"Apache-2.0"
] | 8,425 |
2017-10-26T15:25:43.000Z
|
2022-03-31T23:54:26.000Z
|
shared-data/python/opentrons_shared_data/__init__.py
|
knownmed/opentrons
|
d02eb3c6cbf9f1c8c05c5e9e1dac30a92a8c5e6c
|
[
"Apache-2.0"
] | 130 |
2017-11-09T21:02:37.000Z
|
2022-03-15T18:01:24.000Z
|
"""A Python package wrapping json config definitions for the Opentrons stack.
This package should never be installed on its own, only as a dependency of
the main opentrons package
"""
import os
import json
from .load import get_shared_data_root, load_shared_data
HERE = os.path.abspath(os.path.dirname(__file__))
try:
with open(os.path.join(HERE, 'package.json')) as pkg:
package_json = json.load(pkg)
__version__ = package_json.get('version')
except (FileNotFoundError, OSError):
__version__ = 'unknown'
__all__ = ['__version__', 'get_shared_data_root', 'load_shared_data']
| 26.347826 | 77 | 0.744224 |
b6d7c634a8b64f4dfadb7191136fe4df64c84160
| 3,021 |
py
|
Python
|
workflows/cloudify_system_workflows/snapshots/constants.py
|
ilan-WS/cloudify-manager
|
510d8a277c848db351f38fc5b264806b2cb36d0b
|
[
"Apache-2.0"
] | null | null | null |
workflows/cloudify_system_workflows/snapshots/constants.py
|
ilan-WS/cloudify-manager
|
510d8a277c848db351f38fc5b264806b2cb36d0b
|
[
"Apache-2.0"
] | 2 |
2021-05-31T15:12:21.000Z
|
2021-05-31T19:03:05.000Z
|
workflows/cloudify_system_workflows/snapshots/constants.py
|
ilan-WS/cloudify-manager
|
510d8a277c848db351f38fc5b264806b2cb36d0b
|
[
"Apache-2.0"
] | null | null | null |
########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from os.path import join
from cloudify.utils import ManagerVersion
HASH_SALT_FILENAME = 'hash_salt.json'
ADMIN_DUMP_FILE = 'admin_account.json'
LICENSE_DUMP_FILE = 'license.json'
METADATA_FILENAME = 'metadata.json'
M_VERSION = 'snapshot_version'
M_SCHEMA_REVISION = 'schema_revision'
M_STAGE_SCHEMA_REVISION = 'stage_schema_revision'
M_COMPOSER_SCHEMA_REVISION = 'composer_schema_revision'
M_HAS_CLOUDIFY_EVENTS = 'has_cloudify_events'
ARCHIVE_CERT_DIR = 'ssl'
CERT_DIR = '/etc/cloudify/ssl'
INTERNAL_CA_CERT_FILENAME = 'cloudify_internal_ca_cert.pem'
INTERNAL_CA_KEY_FILENAME = 'cloudify_internal_ca_key.pem'
INTERNAL_CERT_FILENAME = 'cloudify_internal_cert.pem'
INTERNAL_KEY_FILENAME = 'cloudify_internal_key.pem'
INTERNAL_P12_FILENAME = 'cloudify_internal.p12'
BROKER_DEFAULT_VHOST = '/'
DEFAULT_TENANT_NAME = 'default_tenant'
SECRET_STORE_AGENT_KEY_PREFIX = 'cfyagent_key__'
STAGE_BASE_FOLDER = '/opt/cloudify-stage'
STAGE_WIDGETS_FOLDER = 'dist/widgets'
STAGE_TEMPLATES_FOLDER = 'dist/templates'
STAGE_USERDATA_FOLDER = 'dist/userData'
STAGE_USER = 'stage_user'
STAGE_APP = 'stage'
# created during bootstrap
STAGE_RESTORE_SCRIPT = '/opt/cloudify/stage/restore-snapshot.py'
MANAGER_PYTHON = '/opt/manager/env/bin/python'
ADMIN_TOKEN_SCRIPT = '/opt/cloudify/mgmtworker/create-admin-token.py'
ALLOW_DB_CLIENT_CERTS_SCRIPT = (
'/opt/cloudify/mgmtworker/allow-snapshot-ssl-client-cert-access'
)
DENY_DB_CLIENT_CERTS_SCRIPT = (
'/opt/cloudify/mgmtworker/deny-snapshot-ssl-client-cert-access'
)
COMPOSER_BASE_FOLDER = '/opt/cloudify-composer'
COMPOSER_BLUEPRINTS_FOLDER = 'backend/dev'
COMPOSER_USER = 'composer_user'
COMPOSER_APP = 'composer'
SECURITY_FILENAME = 'rest-security.conf'
SECURITY_FILE_LOCATION = join('/opt/manager/', SECURITY_FILENAME)
REST_AUTHORIZATION_CONFIG_PATH = '/opt/manager/authorization.conf'
V_4_0_0 = ManagerVersion('4.0.0')
V_4_1_0 = ManagerVersion('4.1.0')
V_4_2_0 = ManagerVersion('4.2.0')
V_4_3_0 = ManagerVersion('4.3.0')
V_4_4_0 = ManagerVersion('4.4.0')
V_4_5_5 = ManagerVersion('4.5.5')
V_4_6_0 = ManagerVersion('4.6.0')
V_5_0_5 = ManagerVersion('5.0.5')
V_5_1_0 = ManagerVersion('5.1.0')
V_5_2_0 = ManagerVersion('5.2.0')
V_5_3_0 = ManagerVersion('5.3.0')
V_6_0_0 = ManagerVersion('6.0.0')
class VisibilityState(object):
PRIVATE = 'private'
TENANT = 'tenant'
GLOBAL = 'global'
STATES = [PRIVATE, TENANT, GLOBAL]
| 36.39759 | 79 | 0.777557 |
1744be46e92ec86e4b62db54f0e5e7e0c1aeee63
| 414 |
py
|
Python
|
handlers/cancel.py
|
Kvm99/Pressure-Bot-
|
4dc5c850f12265dd89150891c2e383c9c32d421f
|
[
"BSD-3-Clause"
] | null | null | null |
handlers/cancel.py
|
Kvm99/Pressure-Bot-
|
4dc5c850f12265dd89150891c2e383c9c32d421f
|
[
"BSD-3-Clause"
] | 208 |
2019-12-06T12:48:58.000Z
|
2022-03-28T21:10:35.000Z
|
handlers/cancel.py
|
Kvm99/Telegram-Pressurebot
|
4dc5c850f12265dd89150891c2e383c9c32d421f
|
[
"BSD-3-Clause"
] | null | null | null |
from buttons import start_markup
from states import States
def cancel(update, context):
"""
close the conversation,
return START possition of the conversation handler
"""
text = (
"Bye! I hope we can talk again some day."
)
context.bot.send_message(
chat_id=update.message.chat_id,
text=text,
reply_markup=start_markup)
return States.START_BUTTON
| 21.789474 | 54 | 0.664251 |
4a5ef6d8249163ebb599e2d31e98a5c96a029955
| 76 |
py
|
Python
|
getnotes/util.py
|
GeneriedJenelle/crossed-cogs-world
|
b31c398a7c4863cd588271a85140144babf25d11
|
[
"MIT"
] | 3 |
2020-04-05T20:29:34.000Z
|
2022-01-05T15:27:53.000Z
|
getnotes/util.py
|
GeneriedJenelle/crossed-cogs-world
|
b31c398a7c4863cd588271a85140144babf25d11
|
[
"MIT"
] | 7 |
2020-01-17T09:44:00.000Z
|
2022-03-06T12:37:23.000Z
|
getnotes/util.py
|
GeneriedJenelle/crossed-cogs-world
|
b31c398a7c4863cd588271a85140144babf25d11
|
[
"MIT"
] | 8 |
2020-04-24T04:31:03.000Z
|
2022-03-09T15:09:46.000Z
|
import re
def key_to_ckey(key):
return re.sub('[^A-Za-z0-9]+', '', key)
| 19 | 40 | 0.592105 |
ac5eb93cf475b64237dfde5408e2fd066264a859
| 9,472 |
py
|
Python
|
emissions_calculator/phase1_emissions_calculator/subcomp_b_process_emissions_factors.py
|
NW-Demand-Response-Emissions-Impacts/emissions_calculator
|
d4ed609eaf7503c02d83456148f6e0b5ca135050
|
[
"MIT"
] | null | null | null |
emissions_calculator/phase1_emissions_calculator/subcomp_b_process_emissions_factors.py
|
NW-Demand-Response-Emissions-Impacts/emissions_calculator
|
d4ed609eaf7503c02d83456148f6e0b5ca135050
|
[
"MIT"
] | 1 |
2021-11-04T16:00:00.000Z
|
2021-11-04T16:00:00.000Z
|
emissions_calculator/phase1_emissions_calculator/subcomp_b_process_emissions_factors.py
|
NW-Demand-Response-Emissions-Impacts/Main
|
d4ed609eaf7503c02d83456148f6e0b5ca135050
|
[
"MIT"
] | 2 |
2021-12-07T21:22:28.000Z
|
2021-12-09T17:33:13.000Z
|
"""
subcomp_b_process_emissions_factors.py
Read output emissions rates and DR hours from subcomponent a.
For all DR plans (e.g. old bins, new bins),
return seasonal and annual emissions rates averages
only for days with DR, averaged 2022-2041.
Also return seasonal and annual emissions rates averages
for all days in a given year (e.g. 2022),
which will be shown on the general public page.
"""
import pandas as pd
from emissions_parameters import SEASONS_ALLDAYS
def seasonal_ave(dr_name, dr_seasons, emissions_scenario_list,
emissions_rates_df_out, dr_hours_df_dict_out):
"""
Compute seasonal averages of hourly emissions for DR days
for each DR plan and season and each emissions scenario.
Args:
dr_name: list of the names of each DR plan (str)
dr_seasons: array containing a list of seasons (str) with DR hours
for each DR plan
emissions_scenario_list: list of policy scenarios (str)
with emissions rates files
emissions_rates_df_out: the emissions rates dataframe
dr_hours_df_dict_out: dictionary of DR hours dataframes
Returns:
df_seasonal_ave: dictionary of seasonal emissions rates averages
Access output by:
df_seasonal_ave=seasonal_ave()
Output example:
df_seasonal_ave['oldbins_Winter']['Baseline']
"""
df_seasonal_ave = {}
for idx, drname in enumerate(dr_name):
drname = dr_name[idx]
seasons = dr_seasons[idx]
for season in seasons:
dict_key = drname + '_' + season
df_seasonal_ave[dict_key] = {}
for scenario_name in emissions_scenario_list:
column_name = scenario_name + ' Emissions Rate Estimate'
df_seasonal_ave[dict_key][scenario_name] = \
get_hour_ave(emissions_rates_df_out,
dr_hours_df_dict_out[dict_key],
column_name)
return df_seasonal_ave
def annual_ave(dr_name, dr_seasons, emissions_scenario_list,
emissions_rates_df_out, dr_hours_df_dict_out):
"""
Compute annual averages of hourly emissions for DR days
for each DR plan and each emissions scenario.
Args:
dr_name: list of the names of each DR plan (str)
dr_seasons: array containing a list of seasons (str) with DR hours
for each DR plan
emissions_scenario_list: list of policy scenarios (str)
with emissions rates files
emissions_rates_df_out: the emissions rates dataframe
dr_hours_df_dict_out: dictionary of DR hours dataframes
Returns:
df_annual_ave: a dictionary of annual emissions rates averages
Access output by:
df_annual_ave=annual_ave()
Output example:
df_annual_ave['oldbins']['Baseline']
"""
df_annual_ave = {}
for idx, drname in enumerate(dr_name):
drname = dr_name[idx]
seasons = dr_seasons[idx]
df_annual_ave[drname] = {}
# bin_season_name distinguish old/new bins, winter/summer/fall
bin_season_name = []
for season in seasons:
dict_key = drname + '_' + season
bin_season_name.append(dict_key)
# For old bins, combine winter & summer
# For new bins, combine winter, summer & fall
frames = [dr_hours_df_dict_out[x] for x in bin_season_name]
dr_hours_df = pd.concat(frames)
for scenario_name in emissions_scenario_list:
column_name = scenario_name + ' Emissions Rate Estimate'
df_annual_ave[drname][scenario_name] = \
get_hour_ave(emissions_rates_df_out, dr_hours_df, column_name)
return df_annual_ave
def get_hour_ave(emissions_data, dr_hours, column_name):
"""
Select DR hour days and return hourly average emissions rates.
Called in seasonal_ave(), annual_ave()
Args:
emissions_data: dataframe with hourly emissions rates
dr_hours: dataframe with hours of DR implementation
column_name: name (str) of emissions rates column in emissions_data
Returns:
hourly average emissions rates for DR days
"""
df_cp = emissions_data
# Group by month and day
# Sum product column
# Select (sum>=1), got DR days!
df_1 = dr_hours.groupby(['Month', 'Day'])['DVR'].sum().reset_index()
df_1 = df_1[df_1['DVR'] >= 1]
# Combine month and day together
df_1['month_day'] = df_1['Month']*100 + df_1['Day']
df_cp['month_day'] = df_cp['Report_Month']*100 + df_cp['Report_Day']
# Select DR days in emission rates dataset
df_2 = df_cp[df_cp['month_day'].isin(df_1['month_day'])]
# Compute daily average
return df_2.groupby(['Report_Hour'])[column_name].mean().reset_index()
def alldays_oneyear_seasonal_ave(emissions_scenario_list,
emissions_rates_df_out, year):
"""
Compute seasonal and annual emissions rates averages
for all days for one year.
Args:
emissions_scenario_list: list of policy scenarios (str)
with emissions rates files
emissions_rates_df_out: the emissions rates dataframe
year: the year (int) to average emissions rates over
Returns:
df_oneyear_seasonal_ave: dictionary of average emissions rates
for each season and emissions scenario
Access output by:
df_oneyear_seasonal_ave=alldays_oneyear_seasonal_ave()
Output example:
df_oneyear_seasonal_ave['Winter']['Baseline']
"""
if not year in emissions_rates_df_out['Report_Year'].tolist():
raise ValueError('Year unavailable!')
else:
pass
df_oneyear_seasonal_ave = {}
for season in SEASONS_ALLDAYS:
df_oneyear_seasonal_ave[season] = {}
for scenario_name in emissions_scenario_list:
column_name = scenario_name + ' Emissions Rate Estimate'
df_oneyear_seasonal_ave[season][scenario_name] = \
get_oneyear_hour_ave(emissions_rates_df_out, season, column_name, year)
return df_oneyear_seasonal_ave
def get_oneyear_hour_ave(emissions_data, season, column_name, year):
"""
Select all days according to season (including all seasons)
and return hourly average emissions rates.
Called in alldays_oneyear_seasonal_ave()
Args:
emissions_data: dataframe with hourly emissions rates
season: season (str) to calculate average over
column_name: name (str) of emissions rates column in emissions_data
year: year (int) to calculate average over
Returns:
hourly average emissions rates for the given season
"""
df_cp = emissions_data
# Month range for different seasons are defined as follows:
if season == 'Winter':
month = [1, 2, 3]
elif season == 'Spring':
month = [4, 5, 6]
elif season == 'Summer':
month = [7, 8, 9]
elif season == 'Fall':
month = [10, 11, 12]
elif season == 'Annual':
month = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
else:
raise ValueError('Time period unavailable!')
if not year in df_cp['Report_Year'].tolist():
raise ValueError('Year unavailable!')
else:
pass
df_2 = df_cp[df_cp['Report_Month'].isin(month)]
df_2 = df_2[df_2['Report_Year'] == year]
return df_2.groupby(['Report_Hour'])[column_name].mean().reset_index()
def subcomp_b_runall(dr_name, dr_seasons, emissions_scenario_list,
emissions_rates_df_out, dr_hours_df_dict_out, year):
"""
Runs through all of the above functions.
Args:
dr_name: list of the names of each DR plan (str)
dr_seasons: array containing a list of seasons (str) with DR hours
for each DR plan
emissions_scenario_list: list of policy scenarios (str)
with emissions rates files
emissions_rates_df_out: the emissions rates dataframe
dr_hours_df_dict_out: dictionary of DR hours dataframes
year: year (int) to output emissions rates averages for all days
for general info page of dashboard
Returns:
df_seasonal_ave: dictionary of seasonally averaged hourly emissions rates
for days with DR averaged over full period (2022-2041)
df_annual_ave: dictionary of annually averaged hourly emissions rates
for days with DR averaged over full period (2022-2041)
df_oneyear_seasonal_ave: dictionary of seasonally, annually averaged hourly
emissions rates for all days of a given year
"""
if not year in emissions_rates_df_out['Report_Year'].tolist():
raise ValueError('Year unavailable!')
else:
pass
df_seasonal_ave = seasonal_ave(dr_name, dr_seasons, emissions_scenario_list,
emissions_rates_df_out, dr_hours_df_dict_out)
df_annual_ave = annual_ave(dr_name, dr_seasons, emissions_scenario_list,
emissions_rates_df_out, dr_hours_df_dict_out)
df_oneyear_seasonal_ave = alldays_oneyear_seasonal_ave(emissions_scenario_list,
emissions_rates_df_out, year)
return df_seasonal_ave, df_annual_ave, df_oneyear_seasonal_ave
| 36.015209 | 88 | 0.656883 |
85cb3c7c7f94a6f1a7f88e229c9fd88167e7c60b
| 9,680 |
py
|
Python
|
3 calcGUI/calcGUI.py
|
rbrownsr/pythonteachingcode
|
ba2724da9059877a16711452024d366740e35940
|
[
"MIT"
] | null | null | null |
3 calcGUI/calcGUI.py
|
rbrownsr/pythonteachingcode
|
ba2724da9059877a16711452024d366740e35940
|
[
"MIT"
] | null | null | null |
3 calcGUI/calcGUI.py
|
rbrownsr/pythonteachingcode
|
ba2724da9059877a16711452024d366740e35940
|
[
"MIT"
] | null | null | null |
from tkinter import *
from math import sqrt as sqr
from math import sin as Sin
class Application(Frame):
"""
An example of a calculator app developed using the
Tkinter GUI.
"""
def __init__(self, master):
"""
Initializes the frame.
:param master: root.Tk()
"""
Frame.__init__(self, master)
self.entry = Entry(master, width=24, font=("Arial",25))
self.entry.grid(row=0, column=0, columnspan=6, sticky="w")
self.entry.focus_set()
self.entry.configure(state="disabled", disabledbackground="white", disabledforeground="black")
self.create_widgets()
self.bind_buttons(master)
self.grid()
def add_chr(self, char, btn=None):
"""
Concatenates a character passed from a button press (or key type)
to a string.
:param char: string to add passed from a button
:param btn: button name to use if key is pressed (to flash)
:return: None
"""
self.entry.configure(state="normal")
self.flash(btn) # Flash a button correspond to keystroke
if self.entry.get() == "Invalid Input":
self.entry.delete(0,END)
self.entry.insert(END, char)
self.entry.configure(state="disabled")
def clear(self):
"""
Allows user to backspace their entry.
:return: None
"""
self.entry.configure(state="normal")
if self.entry.get() != "Invalid Input":
# Clears full entry when "Invalid Input"
text = self.entry.get()[:-1]
self.entry.delete(0,END)
self.entry.insert(0,text)
else:
self.entry.delete(0, END)
self.entry.configure(state="disabled")
def clear_all(self):
"""
Allows user to clear the full entry.
:return: None
"""
self.entry.configure(state="normal")
self.entry.delete(0, END)
self.entry.configure(state="disabled")
def calculate(self):
"""
Changes the operation symbols to their mathematical representation used in
the eval() method.
:return: None
"""
self.entry.configure(state="normal")
e = self.entry.get()
e = e.replace("√","sqr")
e = e.replace("×", "*")
e = e.replace("²", "**2")
e = e.replace("^", "**")
e = e.replace("÷", "/")
e = e.replace("sin", "Sin")
try:
ans = eval(e)
except Exception as ex:
self.entry.delete(0,END)
self.entry.insert(0, "Invalid Input")
else:
self.entry.delete(0,END)
if len(str(ans)) > 20: # Alleviates problem of large numbers
self.entry.insert(0, '{:.10e}'.format(ans))
else:
self.entry.insert(0, ans)
self.entry.configure(state="disabled")
def flash(self,btn):
"""
Flashes a corresponding button when key is pressed.
:param btn: button
:return: None
"""
if btn != None:
btn.config(bg="yellow")
if btn == self.c_bttn:
self.clear()
self.master.after(100, lambda: btn.config(bg="SystemButtonFace"))
elif btn == self.eq_bttn:
self.master.after(100, lambda: btn.config(bg="lightgrey"))
self.calculate()
elif btn == self.ac_bttn:
self.clear_all()
self.master.after(100, lambda: btn.config(bg="SystemButtonFace"))
else:
self.master.after(100, lambda: btn.config(bg="SystemButtonFace"))
else:
pass
def bind_buttons(self, master):
"""
Binds keys to their appropriate input
:param master: root.Tk()
:return: None
"""
master.bind("<Return>", lambda event, btn=self.eq_bttn: self.flash(btn))
master.bind("<BackSpace>", lambda event, btn=self.c_bttn: self.flash(btn))
master.bind("9", lambda event, char="9", btn=self.nine_bttn: self.add_chr(char, btn))
master.bind("8", lambda event, char="8", btn=self.eight_bttn: self.add_chr(char, btn))
master.bind("7", lambda event, char="7", btn=self.seven_bttn: self.add_chr(char, btn))
master.bind("6", lambda event, char="6", btn=self.six_bttn: self.add_chr(char, btn))
master.bind("5", lambda event, char="5", btn=self.five_bttn: self.add_chr(char, btn))
master.bind("4", lambda event, char="4", btn=self.four_bttn: self.add_chr(char, btn))
master.bind("3", lambda event, char="3", btn=self.three_bttn: self.add_chr(char, btn))
master.bind("2", lambda event, char="2", btn=self.two_bttn: self.add_chr(char, btn))
master.bind("1", lambda event, char="1", btn=self.one_bttn: self.add_chr(char, btn))
master.bind("0", lambda event, char="0", btn=self.zero_bttn: self.add_chr(char, btn))
master.bind("*", lambda event, char="×", btn=self.mult_bttn: self.add_chr(char, btn))
master.bind("/", lambda event, char="÷", btn=self.div_bttn: self.add_chr(char, btn))
master.bind("^", lambda event, char="^", btn=self.sqr_bttn: self.add_chr(char, btn))
master.bind("%", lambda event, char="%", btn=self.mod_bttn: self.add_chr(char, btn))
master.bind(".", lambda event, char=".", btn=self.dec_bttn: self.add_chr(char, btn))
master.bind("-", lambda event, char="-", btn=self.sub_bttn: self.add_chr(char, btn))
master.bind("+", lambda event, char="+", btn=self.add_bttn: self.add_chr(char, btn))
master.bind("(", lambda event, char="(", btn=self.lpar_bttn: self.add_chr(char, btn))
master.bind(")", lambda event, char=")", btn=self.rpar_bttn: self.add_chr(char, btn))
master.bind("c", lambda event, btn=self.ac_bttn: self.flash(btn), self.clear_all)
def create_widgets(self):
"""
Creates the widgets to be used in the grid.
:return: None
"""
self.sin_bttn = Button(self, text="sin", width=9, height=3, command=lambda: self.add_chr('sin'))
self.sin_bttn.grid(row=1, column=6)
self.eq_bttn = Button(self, text="=", width=20, height=3, bg="lightgrey", command=lambda: self.calculate())
self.eq_bttn.grid(row=4, column=4, columnspan=2)
self.ac_bttn = Button(self, text='CE', width=9, height=3, command=lambda: self.clear_all())
self.ac_bttn.grid(row=1, column=4)
self.c_bttn = Button(self, text='←', width=9, height=3, command=lambda: self.clear())
self.c_bttn.grid(row=1, column=5 )
self.add_bttn = Button(self, text="+", width=9, height=3, command=lambda: self.add_chr('+'))
self.add_bttn.grid(row=4, column=3)
self.mult_bttn = Button(self, text="×", width=9, height=3, command=lambda: self.add_chr('×'))
self.mult_bttn.grid(row=2, column=3)
self.sub_bttn = Button(self, text="-", width=9, height=3, command=lambda: self.add_chr('-'))
self.sub_bttn.grid(row=3, column=3)
self.div_bttn = Button(self, text="÷", width=9, height=3, command=lambda: self.add_chr('/'))
self.div_bttn.grid(row=1, column=3)
self.mod_bttn = Button(self, text="%", width=9, height=3, command=lambda: self.add_chr('%'))
self.mod_bttn.grid(row=4, column=2)
self.seven_bttn = Button(self, text="7", width=9, height=3, command=lambda: self.add_chr(7))
self.seven_bttn.grid(row=1, column=0)
self.eight_bttn = Button(self, text="8", width=9, height=3, command=lambda: self.add_chr(8))
self.eight_bttn.grid(row=1, column=1)
self.nine_bttn = Button(self, text="9", width=9, height=3, command=lambda: self.add_chr(9))
self.nine_bttn.grid(row=1, column=2)
self.four_bttn = Button(self, text="4", width=9, height=3, command=lambda: self.add_chr(4))
self.four_bttn.grid(row=2, column=0)
self.five_bttn = Button(self, text="5", width=9, height=3, command=lambda: self.add_chr(5))
self.five_bttn.grid(row=2, column=1)
self.six_bttn = Button(self, text="6", width=9, height=3, command=lambda: self.add_chr(6))
self.six_bttn.grid(row=2, column=2)
self.one_bttn = Button(self, text="1", width=9, height=3, command=lambda: self.add_chr(1))
self.one_bttn.grid(row=3, column=0)
self.two_bttn = Button(self, text="2", width=9, height=3, command=lambda: self.add_chr(2))
self.two_bttn.grid(row=3, column=1)
self.three_bttn = Button(self, text="3", width=9, height=3, command=lambda: self.add_chr(3))
self.three_bttn.grid(row=3, column=2)
self.zero_bttn = Button(self, text="0", width=9, height=3, command=lambda: self.add_chr(0))
self.zero_bttn.grid(row=4, column=0)
self.dec_bttn = Button(self, text=".", width=9, height=3, command=lambda: self.add_chr('.'))
self.dec_bttn.grid(row=4, column=1)
self.lpar_bttn = Button(self, text="(", width=9, height=3, command=lambda: self.add_chr('('))
self.lpar_bttn.grid(row=2, column=4)
self.rpar_bttn = Button(self, text=")", width=9, height=3, command=lambda: self.add_chr(')'))
self.rpar_bttn.grid(row=2, column=5)
self.sq_bttn = Button(self, text="√", width=9, height=3, command=lambda: self.add_chr('√('))
self.sq_bttn.grid(row=3, column=4)
self.sqr_bttn = Button(self, text="^", width=9, height=3, command=lambda: self.add_chr('^'))
self.sqr_bttn.grid(row=3, column=5)
root = Tk()
root.geometry()
root.title("Exciting GUI Calculator")
app = Application(root)
root.mainloop()
| 43.214286 | 115 | 0.596178 |
75c7b768139308e683ae232a5ddf90814bc58f46
| 3,390 |
py
|
Python
|
third_party/gsutil/gslib/tests/test_file_part.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 2,151 |
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
third_party/gsutil/gslib/tests/test_file_part.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 395 |
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
third_party/gsutil/gslib/tests/test_file_part.py
|
tingshao/catapult
|
a8fe19e0c492472a8ed5710be9077e24cc517c5c
|
[
"BSD-3-Clause"
] | 338 |
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for FilePart class."""
from __future__ import absolute_import
import os
from gslib.file_part import FilePart
import gslib.tests.testcase as testcase
# pylint: disable=protected-access
class TestFilePart(testcase.GsUtilUnitTestCase):
"""Unit tests for FilePart class."""
def test_tell(self):
filename = 'test_tell'
contents = 100 * 'x'
fpath = self.CreateTempFile(file_name=filename, contents=contents)
part_length = 23
start_pos = 50
fp = FilePart(fpath, start_pos, part_length)
self.assertEqual(start_pos, fp._fp.tell())
self.assertEqual(0, fp.tell())
def test_seek(self):
"""Tests seeking in a FilePart."""
filename = 'test_seek'
contents = 100 * 'x'
part_length = 23
start_pos = 50
fpath = self.CreateTempFile(file_name=filename, contents=contents)
fp = FilePart(fpath, start_pos, part_length)
offset = 10
# Absolute positioning.
fp.seek(offset)
self.assertEqual(start_pos + offset, fp._fp.tell())
self.assertEqual(offset, fp.tell())
# Relative positioning.
fp.seek(offset, whence=os.SEEK_CUR)
self.assertEqual(start_pos + 2 * offset, fp._fp.tell())
self.assertEqual(2 * offset, fp.tell())
# Absolute positioning from EOF.
fp.seek(-offset, whence=os.SEEK_END)
self.assertEqual(start_pos + part_length - offset, fp._fp.tell())
self.assertEqual(part_length - offset, fp.tell())
# Seek past EOF.
fp.seek(1, whence=os.SEEK_END)
self.assertEqual(start_pos + part_length + 1, fp._fp.tell())
self.assertEqual(part_length + 1, fp.tell())
def test_read(self):
"""Tests various reaad operations with FilePart."""
filename = 'test_read'
contents = ''
for i in range(1, 256):
contents += str(i)
part_length = 23
start_pos = 50
fpath = self.CreateTempFile(file_name=filename, contents=contents)
# Read in the whole file.
fp = FilePart(fpath, start_pos, part_length)
whole_file = fp.read()
self.assertEqual(contents[start_pos:(start_pos + part_length)], whole_file)
# Read in a piece of the file from the beginning.
fp.seek(0)
offset = 10
partial_file = fp.read(offset)
self.assertEqual(
contents[start_pos:(start_pos + offset)],
partial_file)
# Read in the rest of the file.
remaining_file = fp.read(part_length - offset)
self.assertEqual(
contents[(start_pos + offset):(start_pos + part_length)],
remaining_file)
self.assertEqual(
contents[start_pos:(start_pos + part_length)],
partial_file + remaining_file)
# Try to read after reaching EOF.
empty_file = fp.read(100)
self.assertEqual('', empty_file)
empty_file = fp.read()
self.assertEqual('', empty_file)
| 31.682243 | 79 | 0.689971 |
50dd05cc0b92785186bd922ca788d23124c3d45b
| 380 |
py
|
Python
|
dataworkspace/dataworkspace/apps/applications/migrations/0009_auto_20200610_1318.py
|
uktrade/jupyterhub-data-auth-admin
|
91544f376209a201531f4dbfb8faad1b8ada18c9
|
[
"MIT"
] | 1 |
2019-06-10T08:22:56.000Z
|
2019-06-10T08:22:56.000Z
|
dataworkspace/dataworkspace/apps/applications/migrations/0009_auto_20200610_1318.py
|
uktrade/jupyterhub-data-auth-admin
|
91544f376209a201531f4dbfb8faad1b8ada18c9
|
[
"MIT"
] | 2 |
2019-05-17T13:10:42.000Z
|
2019-06-17T10:48:46.000Z
|
dataworkspace/dataworkspace/apps/applications/migrations/0009_auto_20200610_1318.py
|
uktrade/jupyterhub-data-auth-admin
|
91544f376209a201531f4dbfb8faad1b8ada18c9
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-06-10 13:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("applications", "0008_auto_20200519_1245")]
operations = [
migrations.RemoveField(model_name="applicationtemplate", name="user_access_type"),
migrations.DeleteModel(name="ApplicationTemplateUserPermission"),
]
| 27.142857 | 90 | 0.736842 |
b26f4ab2516db7b179a1808e7c21aa0fe0065369
| 279 |
py
|
Python
|
game/main.py
|
PictElm/Kaafhet
|
f9e2a1fac1db1f5ebb377eef91c938db0b997770
|
[
"Apache-2.0"
] | null | null | null |
game/main.py
|
PictElm/Kaafhet
|
f9e2a1fac1db1f5ebb377eef91c938db0b997770
|
[
"Apache-2.0"
] | null | null | null |
game/main.py
|
PictElm/Kaafhet
|
f9e2a1fac1db1f5ebb377eef91c938db0b997770
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: latin-1 -*-
"""
Created on Mon Feb 22 15:37:08 2016
@author: Celestin
"""
from Kaafhet.Inter import register, run, makeDefaultWorld, counter, display
from sac import sac
from gro import gro
register(sac)
register(gro)
run(makeDefaultWorld(), counter, display)
| 16.411765 | 75 | 0.727599 |
c91c833a5199b9948338ce062c1148a7d6877f84
| 468 |
py
|
Python
|
mytest/annoPropertyTest.py
|
liangjie18430/flask_test_myself
|
8923e058d834d6ab7326f869b945601c13674105
|
[
"BSD-3-Clause"
] | null | null | null |
mytest/annoPropertyTest.py
|
liangjie18430/flask_test_myself
|
8923e058d834d6ab7326f869b945601c13674105
|
[
"BSD-3-Clause"
] | null | null | null |
mytest/annoPropertyTest.py
|
liangjie18430/flask_test_myself
|
8923e058d834d6ab7326f869b945601c13674105
|
[
"BSD-3-Clause"
] | null | null | null |
class Student(object):
@property
def birth(self):
return self._birth
@birth.setter
def birth(self, value):
self._birth = value
@property
def age(self):
return 2015 - self._birth
def __call__(self, *args, **kwargs):
pass
if __name__ == '__main__':
s = Student()
s.birth=1992
print(s.birth)
print(s.age)
try:
s.age=10
except Exception:
print("can't not set the age")
| 18.72 | 40 | 0.566239 |
2f84eaef856a77cda610aac50eaebe5e45f8f08e
| 10,054 |
py
|
Python
|
wjn-source/contrib/spendfrom/spendfrom.py
|
WJNLLC/WJN
|
39475229d0ab1239b31bdfd7f2ea7672e08b70a0
|
[
"MIT"
] | null | null | null |
wjn-source/contrib/spendfrom/spendfrom.py
|
WJNLLC/WJN
|
39475229d0ab1239b31bdfd7f2ea7672e08b70a0
|
[
"MIT"
] | null | null | null |
wjn-source/contrib/spendfrom/spendfrom.py
|
WJNLLC/WJN
|
39475229d0ab1239b31bdfd7f2ea7672e08b70a0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a bitcoind or Bitcoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_bitcoin_config(dbdir):
"""Read the bitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "bitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 47014 if testnet else 37014
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the bitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(bitcoind):
info = bitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
bitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = bitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(bitcoind):
address_summary = dict()
address_to_account = dict()
for info in bitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = bitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = bitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(bitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(bitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to bitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = bitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = bitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(bitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = bitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(bitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = bitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(bitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of bitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
bitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(bitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(bitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(bitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(bitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = bitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| 37.514925 | 111 | 0.632286 |
8c510aa6fbc27d5872a2121bfaeb8db8ebbdb5ec
| 1,140 |
py
|
Python
|
theia/ide/admin/cli/setup.py
|
ShubhamGG/Anubis
|
2c538ef258a1edf5463596a33bc66caa2ef7e35b
|
[
"MIT"
] | 87 |
2021-11-08T10:58:26.000Z
|
2022-03-31T19:02:47.000Z
|
theia/ide/admin/cli/setup.py
|
efaraz27/Anubis
|
40a12933877df7f39dd75ca26148858774fcda7b
|
[
"MIT"
] | 114 |
2021-06-27T08:37:43.000Z
|
2021-10-24T00:51:01.000Z
|
theia/ide/admin/cli/setup.py
|
efaraz27/Anubis
|
40a12933877df7f39dd75ca26148858774fcda7b
|
[
"MIT"
] | 15 |
2021-06-27T07:26:51.000Z
|
2021-10-06T18:42:39.000Z
|
#!/usr/bin/env python
"""The setup script."""
from setuptools import setup, find_packages
requirements = ['Click>=7.0', 'requests', 'pyyaml']
setup_requirements = []
test_requirements = []
setup(
author="John McCann Cunniff Jr.",
python_requires='>=3.5',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
description="CLI component to the Anubis autograder.",
entry_points={
'console_scripts': [
'anubis=anubis.cli:main',
],
},
install_requires=requirements,
include_package_data=True,
keywords='anubis',
name='anubis',
packages=find_packages(include=['anubis', 'anubis.*']),
setup_requires=setup_requirements,
test_suite='tests',
tests_require=test_requirements,
version='v2.0.0',
zip_safe=False,
)
| 27.142857 | 59 | 0.621053 |
75afc6d68dad97d51c48c478fcaba0828167f015
| 10,300 |
py
|
Python
|
netmiko/snmp_autodetect.py
|
soaliou1/Automation
|
86fa8eed5ffa7af8f21ca7b503f44fc0d3d5ed2c
|
[
"MIT"
] | null | null | null |
netmiko/snmp_autodetect.py
|
soaliou1/Automation
|
86fa8eed5ffa7af8f21ca7b503f44fc0d3d5ed2c
|
[
"MIT"
] | null | null | null |
netmiko/snmp_autodetect.py
|
soaliou1/Automation
|
86fa8eed5ffa7af8f21ca7b503f44fc0d3d5ed2c
|
[
"MIT"
] | null | null | null |
"""
This module is used to auto-detect the type of a device in order to automatically create a
Netmiko connection.
The will avoid to hard coding the 'device_type' when using the ConnectHandler factory function
from Netmiko.
Example:
------------------
from netmiko.snmp_autodetect import SNMPDetect
my_snmp = SNMPDetect(hostname='1.1.1.70', user='pysnmp', auth_key='key1', encrypt_key='key2')
device_type = my_snmp.autodetect()
------------------
autodetect will return None if no match.
SNMPDetect class defaults to SNMPv3
Note, pysnmp is a required dependency for SNMPDetect and is intentionally not included in
netmiko requirements. So installation of pysnmp might be required.
"""
from __future__ import unicode_literals
import re
try:
from pysnmp.entity.rfc3413.oneliner import cmdgen
except ImportError:
raise ImportError("pysnmp not installed; please install it: 'pip install pysnmp'")
from netmiko.ssh_dispatcher import CLASS_MAPPER
# Higher priority indicates a better match.
SNMP_MAPPER_BASE = {
'arista_eos': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*Arista Networks EOS.*", re.IGNORECASE),
"priority": 99},
'hp_comware': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*HP Comware.*", re.IGNORECASE),
"priority": 99},
'cisco_ios': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*Cisco IOS Software,.*", re.IGNORECASE),
"priority": 60},
'cisco_xe': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*IOS-XE Software,.*", re.IGNORECASE),
"priority": 99},
'cisco_asa': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*Cisco Adaptive Security Appliance.*", re.IGNORECASE),
"priority": 99},
'cisco_nxos': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*Cisco NX-OS.*", re.IGNORECASE),
"priority": 99},
'cisco_wlc': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r".*Cisco Controller.*", re.IGNORECASE),
"priority": 99},
'f5_ltm': {"oid": ".1.3.6.1.4.1.3375.2.1.4.1.0",
"expr": re.compile(r".*BIG-IP.*", re.IGNORECASE),
"priority": 99},
'fortinet': {"oid": ".1.3.6.1.2.1.1.1.0",
"expr": re.compile(r"Forti.*", re.IGNORECASE),
"priority": 80},
'checkpoint': {"oid": ".1.3.6.1.4.1.2620.1.6.16.9.0",
"expr": re.compile(r"CheckPoint"),
"priority": 79},
}
# Ensure all SNMP device types are supported by Netmiko
SNMP_MAPPER = {}
std_device_types = list(CLASS_MAPPER.keys())
for device_type in std_device_types:
if SNMP_MAPPER_BASE.get(device_type):
SNMP_MAPPER[device_type] = SNMP_MAPPER_BASE[device_type]
class SNMPDetect(object):
"""
The SNMPDetect class tries to automatically determine the device type.
Typically this will use the MIB-2 SysDescr and regular expressions.
Parameters
----------
hostname: str
The name or IP address of the hostname we want to guess the type
snmp_version : str, optional ('v1', 'v2c' or 'v3')
The SNMP version that is running on the device (default: 'v3')
snmp_port : int, optional
The UDP port on which SNMP is listening (default: 161)
community : str, optional
The SNMP read community when using SNMPv2 (default: None)
user : str, optional
The SNMPv3 user for authentication (default: '')
auth_key : str, optional
The SNMPv3 authentication key (default: '')
encrypt_key : str, optional
The SNMPv3 encryption key (default: '')
auth_proto : str, optional ('des', '3des', 'aes128', 'aes192', 'aes256')
The SNMPv3 authentication protocol (default: 'aes128')
encrypt_proto : str, optional ('sha', 'md5')
The SNMPv3 encryption protocol (default: 'sha')
Attributes
----------
hostname: str
The name or IP address of the device we want to guess the type
snmp_version : str
The SNMP version that is running on the device
snmp_port : int
The UDP port on which SNMP is listening
community : str
The SNMP read community when using SNMPv2
user : str
The SNMPv3 user for authentication
auth_key : str
The SNMPv3 authentication key
encrypt_key : str
The SNMPv3 encryption key
auth_proto : str
The SNMPv3 authentication protocol
encrypt_proto : str
The SNMPv3 encryption protocol
Methods
-------
autodetect()
Try to determine the device type.
"""
def __init__(self, hostname, snmp_version="v3", snmp_port=161, community=None, user="",
auth_key="", encrypt_key="", auth_proto="sha", encrypt_proto="aes128"):
# Check that the SNMP version is matching predefined type or raise ValueError
if snmp_version == "v1" or snmp_version == "v2c":
if not community:
raise ValueError("SNMP version v1/v2c community must be set.")
elif snmp_version == "v3":
if not user:
raise ValueError("SNMP version v3 user and password must be set")
else:
raise ValueError("SNMP version must be set to 'v1', 'v2c' or 'v3'")
# Check that the SNMPv3 auth & priv parameters match allowed types
self._snmp_v3_authentication = {"sha": cmdgen.usmHMACSHAAuthProtocol,
"md5": cmdgen.usmHMACMD5AuthProtocol}
self._snmp_v3_encryption = {"des": cmdgen.usmDESPrivProtocol,
"3des": cmdgen.usm3DESEDEPrivProtocol,
"aes128": cmdgen.usmAesCfb128Protocol,
"aes192": cmdgen.usmAesCfb192Protocol,
"aes256": cmdgen.usmAesCfb256Protocol}
if auth_proto not in self._snmp_v3_authentication.keys():
raise ValueError("SNMP V3 'auth_proto' argument must be one of the following: {}"
.format(self._snmp_v3_authentication.keys()))
if encrypt_proto not in self._snmp_v3_encryption.keys():
raise ValueError("SNMP V3 'encrypt_proto' argument must be one of the following: {}"
.format(self._snmp_v3_encryption.keys()))
self.hostname = hostname
self.snmp_version = snmp_version
self.snmp_port = snmp_port
self.community = community
self.user = user
self.auth_key = auth_key
self.encrypt_key = encrypt_key
self.auth_proto = self._snmp_v3_authentication[auth_proto]
self.encryp_proto = self._snmp_v3_encryption[encrypt_proto]
self._response_cache = {}
def _get_snmpv3(self, oid):
"""
Try to send an SNMP GET operation using SNMPv3 for the specified OID.
Parameters
----------
oid : str
The SNMP OID that you want to get.
Returns
-------
string : str
The string as part of the value from the OID you are trying to retrieve.
"""
snmp_target = (self.hostname, self.snmp_port)
cmd_gen = cmdgen.CommandGenerator()
(error_detected, error_status, error_index, snmp_data) = cmd_gen.getCmd(
cmdgen.UsmUserData(self.user, self.auth_key, self.encrypt_key,
authProtocol=self.auth_proto,
privProtocol=self.encryp_proto),
cmdgen.UdpTransportTarget(snmp_target, timeout=1.5, retries=2),
oid, lookupNames=True, lookupValues=True)
if not error_detected and snmp_data[0][1]:
return str(snmp_data[0][1])
return ""
def _get_snmpv2c(self, oid):
"""
Try to send an SNMP GET operation using SNMPv2 for the specified OID.
Parameters
----------
oid : str
The SNMP OID that you want to get.
Returns
-------
string : str
The string as part of the value from the OID you are trying to retrieve.
"""
snmp_target = (self.hostname, self.snmp_port)
cmd_gen = cmdgen.CommandGenerator()
(error_detected, error_status, error_index, snmp_data) = cmd_gen.getCmd(
cmdgen.CommunityData(self.community),
cmdgen.UdpTransportTarget(snmp_target, timeout=1.5, retries=2),
oid, lookupNames=True, lookupValues=True)
if not error_detected and snmp_data[0][1]:
return str(snmp_data[0][1])
return ""
def _get_snmp(self, oid):
"""Wrapper for generic SNMP call."""
if self.snmp_version in ["v1", "v2c"]:
return self._get_snmpv2c(oid)
else:
return self._get_snmpv3(oid)
def autodetect(self):
"""
Try to guess the device_type using SNMP GET based on the SNMP_MAPPER dict. The type which
is returned is directly matching the name in *netmiko.ssh_dispatcher.CLASS_MAPPER_BASE*
dict.
Thus you can use this name to retrieve automatically the right ConnectionClass
Returns
-------
potential_type : str
The name of the device_type that must be running.
"""
# Convert SNMP_MAPPER to a list and sort by priority
snmp_mapper_list = []
for k, v in SNMP_MAPPER.items():
snmp_mapper_list.append({k: v})
snmp_mapper_list = sorted(snmp_mapper_list, key=lambda x: list(x.values())[0]['priority'])
snmp_mapper_list.reverse()
for entry in snmp_mapper_list:
for device_type, v in entry.items():
oid = v['oid']
regex = v['expr']
# Used cache data if we already queryied this OID
if self._response_cache.get(oid):
snmp_response = self._response_cache.get(oid)
else:
snmp_response = self._get_snmp(oid)
self._response_cache[oid] = snmp_response
# See if we had a match
if re.search(regex, snmp_response):
return device_type
return None
| 38.432836 | 98 | 0.597476 |
8290fb4e3e1ffa574a24249511242b82a913a4ca
| 10,009 |
py
|
Python
|
code/rnn_seq2seq/src/args.py
|
arkilpatel/SVAMP
|
6f09ab516ab06c18e948c0325236e84e80b5d4bd
|
[
"MIT"
] | 39 |
2021-04-08T01:24:36.000Z
|
2022-03-12T06:51:33.000Z
|
code/rnn_seq2seq/src/args.py
|
intflow/SVAMP
|
10731d8ea489f4eb8e12e35c6c2781f8d837866a
|
[
"MIT"
] | 8 |
2021-04-12T08:02:05.000Z
|
2022-03-07T06:36:39.000Z
|
code/rnn_seq2seq/src/args.py
|
intflow/SVAMP
|
10731d8ea489f4eb8e12e35c6c2781f8d837866a
|
[
"MIT"
] | 11 |
2021-04-08T01:24:41.000Z
|
2021-12-15T22:51:51.000Z
|
import argparse
### Add Early Stopping ###
def build_parser():
# Data loading parameters
parser = argparse.ArgumentParser(description='Run Single sequence model')
# Mode specifications
parser.add_argument('-mode', type=str, default='train', choices=['train', 'test', 'conf'], help='Modes: train, test, conf')
parser.add_argument('-debug', dest='debug', action='store_true', help='Operate in debug mode')
parser.add_argument('-no-debug', dest='debug', action='store_false', help='Operate in normal mode')
parser.set_defaults(debug=False)
# Run Config
parser.add_argument('-run_name', type=str, default='debug', help='run name for logs')
parser.add_argument('-dataset', type=str, default='mawps', help='Dataset')
parser.add_argument('-display_freq', type=int, default= 10000, help='number of batches after which to display samples')
parser.add_argument('-outputs', dest='outputs', action='store_true', help='Show full validation outputs')
parser.add_argument('-no-outputs', dest='outputs', action='store_false', help='Do not show full validation outputs')
parser.set_defaults(outputs=True)
parser.add_argument('-results', dest='results', action='store_true', help='Store results')
parser.add_argument('-no-results', dest='results', action='store_false', help='Do not store results')
parser.set_defaults(results=True)
# Meta Attributes
parser.add_argument('-vocab_size', type=int, default=30000, help='Vocabulary size to consider')
parser.add_argument('-histogram', dest='histogram', action='store_true', help='Operate in debug mode')
parser.add_argument('-no-histogram', dest='histogram', action='store_false', help='Operate in normal mode')
parser.set_defaults(histogram=True)
parser.add_argument('-save_writer', dest='save_writer',action='store_true', help='To write tensorboard')
parser.add_argument('-no-save_writer', dest='save_writer', action='store_false', help='Dont write tensorboard')
parser.set_defaults(save_writer=False)
# Device Configuration
parser.add_argument('-gpu', type=int, default=2, help='Specify the gpu to use')
parser.add_argument('-early_stopping', type=int, default=50, help='Early Stopping after n epoch')
parser.add_argument('-seed', type=int, default=6174, help='Default seed to set')
parser.add_argument('-logging', type=int, default=1, help='Set to 0 if you do not require logging')
parser.add_argument('-ckpt', type=str, default='model', help='Checkpoint file name')
parser.add_argument('-save_model', dest='save_model',action='store_true', help='To save the model')
parser.add_argument('-no-save_model', dest='save_model', action='store_false', help='Dont save the model')
parser.set_defaults(save_model=False)
# parser.add_argument('-log_fmt', type=str, default='%(asctime)s | %(levelname)s | %(name)s | %(message)s', help='Specify format of the logger')
# LSTM parameters
parser.add_argument('-emb2_size', type=int, default=16, help='Embedding dimensions of inputs')
parser.add_argument('-cell_type', type=str, default='lstm', help='RNN cell for encoder and decoder, default: lstm')
parser.add_argument('-use_attn', dest='use_attn',action='store_true', help='To use attention mechanism?')
parser.add_argument('-no-attn', dest='use_attn', action='store_false', help='Not to use attention mechanism?')
parser.set_defaults(use_attn=True)
parser.add_argument('-attn_type', type=str, default='general', help='Attention mechanism: (general, concat), default: general')
parser.add_argument('-hidden_size', type=int, default=256, help='Number of hidden units in each layer')
parser.add_argument('-depth', type=int, default=1, help='Number of layers in each encoder and decoder')
parser.add_argument('-dropout', type=float, default=0.1, help= 'Dropout probability for input/output/state units (0.0: no dropout)')
parser.add_argument('-max_length', type=int, default=100, help='Specify max decode steps: Max length string to output')
parser.add_argument('-init_range', type=float, default=0.08, help='Initialization range for seq2seq model')
parser.add_argument('-bidirectional', dest='bidirectional', action='store_true', help='Bidirectionality in LSTMs')
parser.add_argument('-no-bidirectional', dest='bidirectional', action='store_false', help='Bidirectionality in LSTMs')
parser.set_defaults(bidirectional=True)
parser.add_argument('-lr', type=float, default=0.0005, help='Learning rate')
# parser.add_argument('-bert_lr', type=float, default=5e-5, help='Larning rate to train BERT embeddings')
parser.add_argument('-warmup', type=float, default=0.1, help='Proportion of training to perform linear learning rate warmup for')
parser.add_argument('-max_grad_norm', type=float, default=0.25, help='Clip gradients to this norm')
parser.add_argument('-batch_size', type=int, default=8, help='Batch size')
parser.add_argument('-epochs', type=int, default=50, help='Maximum # of training epochs')
parser.add_argument('-opt', type=str, default='adam', choices=['adam', 'adadelta', 'sgd', 'asgd'], help='Optimizer for training')
parser.add_argument('-separate_opt', dest='separate_opt', action='store_true', help='Separate Optimizers for Embedding and model - AdamW for emb and Adam for model')
parser.add_argument('-no-separate_opt', dest='separate_opt', action='store_false', help='Common optimizer for Embedding and model')
parser.set_defaults(separate_opt=False)
parser.add_argument('-teacher_forcing_ratio', type=float, default=0.9, help='Teacher forcing ratio')
# Embeddings
parser.add_argument('-embedding', type=str, default='roberta', choices=['bert', 'roberta', 'word2vec', 'random'], help='Embeddings')
# parser.add_argument('-use_word2vec', dest='use_word2vec', action='store_true', help='use word2vec')
# parser.add_argument('-no-use_word2vec', dest='use_word2vec', action='store_false', help='Do not use word2vec')
# parser.set_defaults(use_word2vec=False)
# parser.add_argument('-word2vec_bin', type=str, default='/datadrive/satwik/global_data/glove.840B.300d.txt', help='Binary file of word2vec')
parser.add_argument('-word2vec_bin', type=str, default='/datadrive/global_files/GoogleNews-vectors-negative300.bin', help='Binary file of word2vec')
# parser.add_argument('-train_word2vec', dest='train_word2vec', action='store_true', help='train word2vec')
# parser.add_argument('-no-train_word2vec', dest='train_word2vec', action='store_false', help='Do not train word2vec')
# parser.set_defaults(train_word2vec=True)
parser.add_argument('-emb1_size', type=int, default=768, help='Embedding dimensions of inputs')
parser.add_argument('-emb_name', type=str, default='roberta-base', choices=['bert-base-uncased', 'roberta-base'], help='Which pre-trained model')
# parser.add_argument('-bert_size', type=int, default = 768, help = 'Size of BERT\'s last layer representations')
parser.add_argument('-emb_lr', type=float, default=1e-5, help='Larning rate to train embeddings')
parser.add_argument('-freeze_emb', dest='freeze_emb', action='store_true', help='Freeze embedding weights')
parser.add_argument('-no-freeze_emb', dest='freeze_emb', action='store_false', help='Train embedding weights')
parser.set_defaults(freeze_emb=False)
parser.add_argument('-grade_disp', dest='grade_disp', action='store_true', help='Display grade information in validation outputs')
parser.add_argument('-no-grade_disp', dest='grade_disp', action='store_false', help='Don\'t display grade information')
parser.set_defaults(grade_disp=False)
parser.add_argument('-type_disp', dest='type_disp', action='store_true', help='Display Type information in validation outputs')
parser.add_argument('-no-type_disp', dest='type_disp', action='store_false', help='Don\'t display Type information')
parser.set_defaults(type_disp=False)
parser.add_argument('-challenge_disp', dest='challenge_disp', action='store_true', help='Display information in validation outputs')
parser.add_argument('-no-challenge_disp', dest='challenge_disp', action='store_false', help='Don\'t display information')
parser.set_defaults(challenge_disp=False)
parser.add_argument('-nums_disp', dest='nums_disp', action='store_true', help='Display number of numbers information in validation outputs')
parser.add_argument('-no-nums_disp', dest='nums_disp', action='store_false', help='Don\'t display number of numbers information')
parser.set_defaults(nums_disp=True)
parser.add_argument('-more_nums', dest='more_nums', action='store_true', help='More numbers in Voc2')
parser.add_argument('-no-more_nums', dest='more_nums', action='store_false', help='Usual numbers in Voc2')
parser.set_defaults(more_nums=False)
parser.add_argument('-mawps_vocab', dest='mawps_vocab', action='store_true', help='Custom Numbers in Voc2')
parser.add_argument('-no-mawps_vocab', dest='mawps_vocab', action='store_false', help='No Custom Numbers in Voc2')
parser.set_defaults(mawps_vocab=False)
parser.add_argument('-show_train_acc', dest='show_train_acc', action='store_true', help='Calculate the train accuracy')
parser.add_argument('-no-show_train_acc', dest='show_train_acc', action='store_false', help='Don\'t calculate the train accuracy')
parser.set_defaults(show_train_acc=True)
parser.add_argument('-full_cv', dest='full_cv', action='store_true', help='5-fold CV')
parser.add_argument('-no-full_cv', dest='full_cv', action='store_false', help='No 5-fold CV')
parser.set_defaults(full_cv=False)
#Conf parameters
parser.add_argument('-conf', type = str, default = 'posterior', choices = ["posterior", "similarity"], help = 'Confidence estimation criteria to use, ["posterior", "similarity"]')
parser.add_argument('-sim_criteria', type = str, default = 'bleu', choices = ['bert_score', 'bleu_score'], help = 'Only applicable if similarity based criteria is selected for confidence.')
parser.add_argument('-adv', action = 'store_true', help = 'If dealing with out of distribution examples')
return parser
| 79.436508 | 191 | 0.748726 |
991bcc19ddfadf3c69f56dfbb98d0a50d0d9e05a
| 47 |
py
|
Python
|
learning_to_adapt/utils/__init__.py
|
ondrejklejch/learning_to_adapt
|
6de0b98370769596da16a1688582925ea2e1fa29
|
[
"Apache-2.0"
] | 18 |
2019-10-24T04:42:16.000Z
|
2021-11-24T03:07:59.000Z
|
learning_to_adapt/utils/__init__.py
|
choko/learning_to_adapt
|
6de0b98370769596da16a1688582925ea2e1fa29
|
[
"Apache-2.0"
] | null | null | null |
learning_to_adapt/utils/__init__.py
|
choko/learning_to_adapt
|
6de0b98370769596da16a1688582925ea2e1fa29
|
[
"Apache-2.0"
] | 4 |
2018-08-31T01:08:50.000Z
|
2019-05-10T12:12:57.000Z
|
from data_generator import *
from lda import *
| 15.666667 | 28 | 0.787234 |
b1bd38b6a3205832d684b4ab52cc2c88d09a773c
| 3,596 |
py
|
Python
|
Python/linked_list.py
|
Rohit01-pro/All_Program_helper
|
86b75ecc4ecb095f11e46e6f80c660e27dd22f27
|
[
"MIT"
] | 16 |
2021-10-03T11:15:49.000Z
|
2021-10-31T04:40:24.000Z
|
Python/linked_list.py
|
Rohit01-pro/All_Program_helper
|
86b75ecc4ecb095f11e46e6f80c660e27dd22f27
|
[
"MIT"
] | 232 |
2021-10-02T14:51:43.000Z
|
2021-11-14T08:23:27.000Z
|
Python/linked_list.py
|
Rohit01-pro/All_Program_helper
|
86b75ecc4ecb095f11e46e6f80c660e27dd22f27
|
[
"MIT"
] | 166 |
2021-10-02T13:56:34.000Z
|
2021-10-31T17:56:34.000Z
|
#Structure for Node
class Node:
def __init__(self, data):
self.data = data
self.next = None
#Class representing different methods of Linked List
class LinkedList:
def __init__(self):
self.head = None
def display(self):
if self.head == None:
print('No elements in LinkedList to display')
else:
p = self.head
while p.next != None:
print(p.data, end='->')
p = p.next
print(p.data)
def insertAtBeginning(self):
data = input('Insert data into LinkedList:')
new = Node(data)
if self.head == None:
self.head = new
else:
new.next = self.head
self.head = new
print('After inserting At Beginning:')
self.display()
def insertAtEnd(self):
data = input('Insert data into LinkedList:')
new = Node(data)
if self.head == None:
self.head = new
else:
p = self.head
while p.next != None:
p = p.next
p.next = new
print('After inserting At End:')
self.display()
def getLength(self):
count = 0
p = self.head
while p != None:
p = p.next
count += 1
return count
def insertAtPosition(self):
pos = int(input('Enter the position to insert:'))
if self.head == None:
self.insertAtBeginning()
elif pos == 0:
self.insertAtBeginning()
elif pos == self.getLength()-1:
self.insertAtEnd()
else:
data = input('Insert data into LinkedList:')
new = Node(data)
p = q = self.head
count = 1
while count != pos:
p = p.next
count += 1
while q.next != p:
q = q.next
new.next = p
q.next = new
print('After inserting At Position:', pos)
self.display()
def deleteAtBeginning(self):
if self.head == None:
print('Nothing to delete')
else:
p = self.head
self.head = self.head.next
del p
print('After deleting an element at Beginning:')
self.display()
def deleteAtEnd(self):
if self.head == None:
print('Nothing to delete')
else:
p = self.head
while p.next.next:
p = p.next
q = p.next
p.next = None
del q
print('After deleting an element at End:')
self.display()
def deleteAtPosition(self):
print('Number of elements present:', self.getLength())
pos = int(input('Enter the position you want to delete:'))
if pos == 0:
self.deleteAtBeginning()
elif pos == self.getLength():
self.deleteAtEnd()
else:
count = 1
p = q = self.head
while count != pos:
count += 1
p = p.next
while q.next != p:
q = q.next
q.next = p.next
p.next = None
del p
print('After deleting an element at Beginning:')
self.display()
if __name__ == '__main__':
ll = LinkedList()
for i in range(5):
ll.insertAtBeginning()
for i in range(5):
ll.insertAtEnd()
ll.insertAtPosition()
for i in range(5):
ll.deleteAtBeginning()
ll.deleteAtEnd()
ll.deleteAtPosition()
| 27.037594 | 66 | 0.486096 |
7ff59e3d355415ace866a9d97d846fa789ea99c5
| 451 |
py
|
Python
|
training/callbacks/early_stopping.py
|
Jav1d/Generative_Models
|
b7578b5277488ebd212a50d1c22a5c9708f4c311
|
[
"MIT"
] | null | null | null |
training/callbacks/early_stopping.py
|
Jav1d/Generative_Models
|
b7578b5277488ebd212a50d1c22a5c9708f4c311
|
[
"MIT"
] | null | null | null |
training/callbacks/early_stopping.py
|
Jav1d/Generative_Models
|
b7578b5277488ebd212a50d1c22a5c9708f4c311
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
EarlyStopping = lambda: tf.keras.callbacks.EarlyStopping(
monitor='loss',
min_delta=1e-12,
patience=50,
verbose=1,
restore_best_weights=False
)
| 56.375 | 74 | 0.299335 |
35dce234aff5d4c8f3d978fe12d6449e759b7fea
| 2,834 |
py
|
Python
|
networks/base.py
|
ypotdevin/randomized-defenses
|
34bc3cbe8e3178dd8a6f7abae927b10c9ae4bad9
|
[
"MIT"
] | null | null | null |
networks/base.py
|
ypotdevin/randomized-defenses
|
34bc3cbe8e3178dd8a6f7abae927b10c9ae4bad9
|
[
"MIT"
] | 2 |
2021-10-06T12:33:54.000Z
|
2021-10-06T12:35:16.000Z
|
networks/base.py
|
ypotdevin/randomized-defenses
|
34bc3cbe8e3178dd8a6f7abae927b10c9ae4bad9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module consists of network class definitions, used to hide implementation
details of used deep learning libraries.
"""
from abc import ABC, abstractmethod
import numpy as np
class Network(ABC):
"""
This class hides the functional API of (e.g.) Keras models and reduces the
available features to the bare minimum. This eases adapting to other deep
learning libraries.
"""
@abstractmethod
def predict(self, x):#pylint: disable=C0103
"""
Parameters
----------
x : array_like
The input (batch of inputs) which should be processed by this
network.
Returns
-------
y : array_like
The predicted class confidence(s) belonging to input `x`. This is
the (raw) output of the network, before applying argmax -- but
(depending on the network's topology) after softmax.
"""
raise NotImplementedError
def labels(self, x):
"""
Parameters
----------
x : array_like
The input (batch of inputs) which should be processed by this
network.
Returns
-------
labels : array_like of int
The predicted class labels (determined by numpy's argmax function)
belonging to `x`.
"""
predictions = self.predict(x)
labels = np.argmax(predictions, axis = 1)
return labels
@staticmethod
@abstractmethod
def bounds():
"""
Returns
-------
bounds : (float, float)
Lower and upper bound of input scalars (e. g. pixel values).
The default implementation assumes the network to be trained on the
channel centered ImageNet training data set, using BGR encoding.
Notes
-----
Although this methods provides a default implementation, it is tagged as
abstractmethod, to force inheriting classes to define their bounds
explicitly.
"""
bgr_mean_pixel = [103.939, 116.779, 123.68]
bnds = (np.subtract(0, max(bgr_mean_pixel), dtype = np.float32),
np.subtract(255, min(bgr_mean_pixel), dtype = np.float32) )
return bnds
@abstractmethod
def name():
"""
Returns
-------
name : str
A class-unique human readable identifier of the network.
"""
class KerasNetwork(Network):#pylint: disable=W0223
"""
An intermediate class to make it easier to lift Keras applications to the
`Network` interface.
"""
def __init__(self, model):
self._wrapped_model = model
def predict(self, x):
return self._wrapped_model.predict(x)
def wrapped_model(self):
return self._wrapped_model
| 29.216495 | 80 | 0.591743 |
496276200bc5975052e93ec3c8d6cdc1f0684df6
| 807 |
py
|
Python
|
siteframe/migrations/0002_article.py
|
Tian-rg/easysite
|
6a34cb373e43c263e98dceae47f41c99b28803f1
|
[
"MIT"
] | null | null | null |
siteframe/migrations/0002_article.py
|
Tian-rg/easysite
|
6a34cb373e43c263e98dceae47f41c99b28803f1
|
[
"MIT"
] | null | null | null |
siteframe/migrations/0002_article.py
|
Tian-rg/easysite
|
6a34cb373e43c263e98dceae47f41c99b28803f1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-03 17:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('siteframe', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('article_title', models.CharField(max_length=50)),
('article_author', models.CharField(max_length=50)),
('article_body', models.CharField(max_length=5000)),
('article_pubdate', models.DateTimeField(verbose_name='date published')),
],
),
]
| 31.038462 | 114 | 0.60223 |
c174d6009cc6841aa09f4cf656ebd7862d5e7f1f
| 501 |
py
|
Python
|
web_app/code_analysis/migrations/0014_problem_date_submitted.py
|
Lockers13/codagio
|
cfe9325cb3c207f7728db3c287439ce761ffea14
|
[
"MIT"
] | 2 |
2021-01-16T13:42:14.000Z
|
2021-03-03T19:36:47.000Z
|
web_app/code_analysis/migrations/0014_problem_date_submitted.py
|
Lockers13/codagio
|
cfe9325cb3c207f7728db3c287439ce761ffea14
|
[
"MIT"
] | null | null | null |
web_app/code_analysis/migrations/0014_problem_date_submitted.py
|
Lockers13/codagio
|
cfe9325cb3c207f7728db3c287439ce761ffea14
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.5 on 2021-03-21 18:49
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('code_analysis', '0013_remove_problem_category'),
]
operations = [
migrations.AddField(
model_name='problem',
name='date_submitted',
field=models.DateField(default=datetime.datetime(2021, 3, 21, 18, 49, 41, 215294)),
preserve_default=False,
),
]
| 23.857143 | 95 | 0.626747 |
d6437947723e675d7cb009e138963bc7ad380ed4
| 362 |
py
|
Python
|
config.py
|
LdyOn/12306
|
d9824231acdf268a308762489eb33feff02b01c6
|
[
"Apache-2.0"
] | 2 |
2021-04-16T02:40:58.000Z
|
2021-04-17T04:11:47.000Z
|
config.py
|
LdyOn/12306
|
d9824231acdf268a308762489eb33feff02b01c6
|
[
"Apache-2.0"
] | 2 |
2021-04-17T08:37:24.000Z
|
2021-05-10T09:10:51.000Z
|
config.py
|
LdyOn/12306
|
d9824231acdf268a308762489eb33feff02b01c6
|
[
"Apache-2.0"
] | 1 |
2021-04-16T07:56:52.000Z
|
2021-04-16T07:56:52.000Z
|
import re
class Config():
"""配置文件"""
def __init__(self):
self.config = {}
self.read_setting()
# 从配置文件读取配置
def read_setting(self):
s_file = open("setting.ini", encoding='UTF-8')
lines = s_file.readlines()
for x in lines:
if re.match(";", x) == None :
x = x.strip('\n')
s = x.split("=")
self.config[s[0].strip()] = s[1].strip()
| 17.238095 | 48 | 0.574586 |
712e367553d38eb89864bd7855267afad1040608
| 26,024 |
py
|
Python
|
idtt/wp2tt/__init__.py
|
ErezVolk/evstuff
|
a79ba23773c41ed445107cffd235747a84bb4ff1
|
[
"MIT"
] | 1 |
2018-11-05T08:52:41.000Z
|
2018-11-05T08:52:41.000Z
|
idtt/wp2tt/__init__.py
|
ErezVolk/evstuff
|
a79ba23773c41ed445107cffd235747a84bb4ff1
|
[
"MIT"
] | null | null | null |
idtt/wp2tt/__init__.py
|
ErezVolk/evstuff
|
a79ba23773c41ed445107cffd235747a84bb4ff1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""A utility to convert word processor files (.docx, .odt) to InDesign's Tagged Text."""
import argparse
import collections
import configparser
import contextlib
import itertools
import logging
import os
import re
import shlex
import shutil
import sys
import attr
from wp2tt.version import WP2TT_VERSION
from wp2tt.ini import ini_fields
from wp2tt.styles import Style
from wp2tt.styles import Rule
from wp2tt.docx import DocxInput
from wp2tt.odt import OdtInput
from wp2tt.output import WhitespaceStripper
from wp2tt.tagged_text import InDesignTaggedTextOutput
def main():
WordProcessorToInDesignTaggedText().run()
class StopMarkerFound(Exception):
"""We raise this to stop the presses."""
pass
class BadReferenceInRule(Exception):
"""We raise this for bad ruels."""
pass
class ParseDict(argparse.Action):
"""Helper class to convert KEY=VALUE pairs to a dict."""
def __call__(self, parser, namespace, values, option_string):
setattr(namespace, self.dest, dict(val.split('=', 1) for val in values))
@attr.s
class State(object):
curr_char_style = attr.ib(default=None)
prev_para_style = attr.ib(default=None)
curr_para_text = attr.ib(default='')
prev_para_text = attr.ib(default=None)
class WordProcessorToInDesignTaggedText(object):
"""Read a word processor file. Write an InDesign Tagged Text file. What's not to like?"""
SETTING_FILE_ENCODING = 'UTF-8'
CONFIG_SECTION_NAME = 'General'
SPECIAL_GROUP = '(autogenerated)'
DEFAULT_BASE = SPECIAL_GROUP + '/(Basic Style)'
FOOTNOTE_REF_STYLE = SPECIAL_GROUP + '/(Footnote Reference in Text)'
COMMENT_REF_STYLE = SPECIAL_GROUP + '/(Comment Reference)'
IGNORED_STYLES = {
'character': ['annotation reference'],
}
SPECIAL_STYLE = {
'character': {
COMMENT_REF_STYLE: {
'idtt': '<pShadingColor:Cyain><pShadingOn:1><pShadingTint:100>',
}
},
'paragraph': {
'annotation text': {
'name': SPECIAL_GROUP + '/(Comment Text)',
'idtt': '<cSize:6><cColor:Cyan><cColorTint:100>',
}
},
}
def run(self):
"""Main entry point."""
self.parse_command_line()
self.configure_logging()
self.read_settings()
self.load_docx()
self.write_idtt()
self.report_statistics()
self.write_settings()
self.write_rerunner()
def parse_command_line(self):
"""Find out what we're supposed to do."""
self.parser = argparse.ArgumentParser(
description='Word Processor to InDesign Tagged Text Converter, v' + WP2TT_VERSION
)
self.parser.add_argument('input', help='Input .docx file')
self.parser.add_argument('output', nargs='?',
help='InDesign Tagged Text file')
self.parser.add_argument('-s', '--stop-at', metavar='TEXT',
required=False,
help='Stop importing when TEXT is found')
self.parser.add_argument('-c', '--base-character-style', metavar='NAME',
default=self.DEFAULT_BASE,
help='Base all character styles on this.')
self.parser.add_argument('-p', '--base-paragraph-style', metavar='NAME',
default=self.DEFAULT_BASE,
help='Base all paragraph styles on this.')
self.parser.add_argument('-v', '--style-to-variable', metavar='STYLE=VARIABLE', nargs='+',
action=ParseDict,
help='Map paragraph styles to document variables.')
self.parser.add_argument('-f', '--fresh-start', action='store_true',
help='Do not read any existing settings.')
self.parser.add_argument('-d', '--debug', action='store_true',
help='Print interesting debug information.')
self.parser.add_argument('--no-rerunner', action='store_true',
help='Do not (over)write the rerruner script.')
self.args = self.parser.parse_args()
if self.args.output:
self.output_fn = self.args.output
else:
basename, dummy_ext = os.path.splitext(self.args.input)
self.output_fn = basename + '.tagged.txt'
self.settings_fn = self.output_fn + '.ini'
self.rerunner_fn = self.output_fn + '.rerun'
self.stop_marker = self.args.stop_at
def configure_logging(self):
"""Set logging level and format."""
logging.basicConfig(
format='%(asctime)s %(message)s',
level=logging.DEBUG if self.args.debug else logging.INFO
)
def read_settings(self):
"""Read and parse the ini file."""
self.settings = configparser.ConfigParser()
self.settings_touched = False
self.style_sections_used = set()
if os.path.isfile(self.settings_fn) and not self.args.fresh_start:
logging.info('Reading %r', self.settings_fn)
self.settings.read(self.settings_fn, encoding=self.SETTING_FILE_ENCODING)
self.load_rules()
self.config = self.ensure_setting_section(self.CONFIG_SECTION_NAME)
if self.stop_marker:
self.config['stop_marker'] = self.stop_marker
else:
self.stop_marker = self.config.get('stop_marker')
def load_rules(self):
"""Convert Rule sections into Rule objects."""
self.rules = []
for section_name in self.settings.sections():
if not section_name.lower().startswith('rule:'):
continue
section = self.settings[section_name]
self.rules.append(Rule(
mnemonic='R%s' % (len(self.rules) + 1),
description=section_name[5:],
**{
name: section[ini_name]
for name, ini_name in ini_fields(Rule)
if ini_name in section
}
))
logging.debug(self.rules[-1])
def write_settings(self):
"""When done, write the settings file for the next time."""
if self.settings_touched and os.path.isfile(self.settings_fn):
logging.debug('Backing up %r', self.settings_fn)
shutil.copy(self.settings_fn, self.settings_fn + '.bak')
logging.info('Writing %r', self.settings_fn)
with open(self.settings_fn, 'w', encoding=self.SETTING_FILE_ENCODING) as fo:
self.settings.write(fo)
def write_rerunner(self):
"""Write a script to regenerate the output."""
if self.args.no_rerunner:
return
logging.info('Writing %r', self.rerunner_fn)
with open(self.rerunner_fn, 'w', encoding=self.SETTING_FILE_ENCODING) as fo:
fo.write(
'#!/bin/bash\n'
'# AUTOGENERATED FILE, DO NOT EDIT.\n'
'\n'
)
cli = [
shlex.quote(os.path.abspath(sys.argv[0])),
shlex.quote(os.path.abspath(self.args.input)),
]
if self.args.output:
cli.append(shlex.quote(os.path.abspath(self.output_fn)))
cli.append('"$@"') # Has to come before the dashes
if self.stop_marker:
cli.extend(['--stop-at', shlex.quote(self.stop_marker)])
if self.args.base_character_style != self.DEFAULT_BASE:
cli.extend(['--base-character-style', shlex.quote(self.args.base_character_style)])
if self.args.base_paragraph_style != self.DEFAULT_BASE:
cli.extend(['--base-paragraph-style', shlex.quote(self.args.base_paragraph_style)])
if self.args.style_to_variable:
cli.append('--style-to-variable')
cli.extend(
shlex.quote('%s=%s' % (k, v))
for k, v in self.args.style_to_variable.items()
)
if self.args.debug:
cli.append('--debug')
cli.extend(['2>&1', '|tee', os.path.abspath(self.rerunner_fn + '.output')])
fo.write(' '.join(cli))
fo.write('\n')
os.chmod(self.rerunner_fn, 0o755)
def load_docx(self):
"""Unzip and parse a .docx file."""
logging.info('Reading %r', self.args.input)
with self.create_reader() as self.doc:
self.scan_style_definitions()
self.scan_style_mentions()
self.link_styles()
self.link_rules()
def create_reader(self):
path = self.args.input
_, ext = os.path.splitext(path)
ext = ext.lower()
if ext == '.docx':
return DocxInput(path)
if ext == '.odt':
return OdtInput(path)
raise RuntimeError('Unknown file extension for %r', path)
def scan_style_definitions(self):
"""Create a Style object for everything in the document."""
self.styles = {}
self.create_special_styles()
counts = collections.defaultdict(lambda: itertools.count(start=1))
for style_kwargs in self.doc.styles_defined():
if style_kwargs.get('automatic'):
style_kwargs['name'] = '%s/automatic-%u' % (
self.SPECIAL_GROUP, next(counts[style_kwargs['realm']])
)
self.found_style_definition(**style_kwargs)
def create_special_styles(self):
"""Add any internal styles (i.e., not imported from the doc)."""
self.base_names = {
'character': self.args.base_character_style,
'paragraph': self.args.base_paragraph_style,
}
self.base_styles = {
realm: self.found_style_definition(
realm=realm,
internal_name=name,
wpid=name,
used=True,
automatic=True,
)
for realm, name in self.base_names.items()
}
self.footnote_ref_style = self.found_style_definition(
realm='character',
internal_name=self.FOOTNOTE_REF_STYLE,
wpid=self.FOOTNOTE_REF_STYLE,
idtt='<cColor:Magenta><cColorTint:100><cPosition:Superscript>',
automatic=True,
)
self.comment_ref_style = self.found_style_definition(
realm='character',
internal_name=self.COMMENT_REF_STYLE,
wpid=self.COMMENT_REF_STYLE,
parent_wpid=self.FOOTNOTE_REF_STYLE,
idtt='<cColor:Cyan><cColorTint:100>',
automatic=True,
)
def scan_style_mentions(self):
"""Mark which styles are actually used."""
for realm, wpid in self.doc.styles_in_use():
style_key = self.style_key(realm=realm, wpid=wpid)
if style_key not in self.styles:
logging.debug('Used but not defined? %r', style_key)
elif not self.styles[style_key].used:
logging.debug('Style used: %r', style_key)
self.styles[style_key].used = True
def link_styles(self):
"""A sort of alchemy-relationship thing."""
for style in self.styles.values():
style.parent_style = self.style_or_none(style.realm, style.parent_wpid)
style.next_style = self.style_or_none(style.realm, style.next_wpid)
def style_or_none(self, realm, wpid):
if not wpid:
return None
return self.styles[self.style_key(realm=realm, wpid=wpid)]
def link_rules(self):
"""A sort of alchemy-relationship thing."""
for rule in self.rules:
try:
rule.turn_this_style = self.find_style_by_ini_ref(
rule.turn_this,
required=True
)
rule.into_this_style = self.find_style_by_ini_ref(
rule.into_this,
required=True,
inherit_from=rule.turn_this_style,
)
if rule.when_following is not None:
rule.when_following_styles = [
self.find_style_by_ini_ref(ini_ref)
for ini_ref in
re.findall(r'\[.*?\]', rule.when_following)
]
except BadReferenceInRule:
logging.warn('Ignoring rule with bad references: %s', rule)
rule.valid = False
def find_style_by_ini_ref(self, ini_ref, required=False, inherit_from=None):
"""Returns a style, given type of string we use for ini file section names."""
if not ini_ref:
if required:
logging.debug('MISSING REQUIRED SOMETHING')
raise BadReferenceInRule()
return None
m = re.match(
r'^\[(?P<realm>\w+):(?P<internal_name>.+)\]$',
ini_ref,
re.IGNORECASE
)
if not m:
logging.debug('Malformed %r', ini_ref)
raise BadReferenceInRule()
try:
realm = m.group('realm').lower()
internal_name = m.group('internal_name')
return next(
style for style in self.styles.values()
if style.realm.lower() == realm and style.internal_name == internal_name
)
except StopIteration:
if not inherit_from:
logging.debug('ERROR: Unknown %r', ini_ref)
raise BadReferenceInRule()
return self.add_style(
realm=realm,
wpid=ini_ref,
internal_name=internal_name,
parent_wpid=inherit_from.wpid,
parent_style=inherit_from,
)
def found_style_definition(self, realm, internal_name, wpid, **kwargs):
if realm not in self.base_names:
logging.debug('What about %s:%r [%r]?', realm, wpid, internal_name)
return
if wpid != self.base_names.get(realm):
kwargs.setdefault('parent_wpid', self.base_names.get(realm))
# Allow any special overrides (color, name, etc.)
kwargs.update(self.SPECIAL_STYLE.get(realm, {}).get(internal_name, {}))
section = self.get_setting_section(realm=realm, internal_name=internal_name)
if not kwargs.get('name'):
kwargs['name'] = internal_name
kwargs.update({
name: section[ini_name]
for name, ini_name in ini_fields(Style, writeable=True)
if ini_name in section
})
return self.add_style(
realm=realm,
wpid=wpid,
internal_name=internal_name,
**kwargs
)
def add_style(self, **kwargs):
"""Create a new Style object and add to our map."""
if self.args.style_to_variable:
if kwargs['realm'] == 'paragraph':
kwargs.setdefault('variable', self.args.style_to_variable.get(kwargs['internal_name']))
kwargs.setdefault('name', kwargs['internal_name'])
style = Style(**kwargs)
self.styles[self.style_key(style=style)] = style
return style
def style_key(self, style=None, realm=None, wpid=None):
"""The string which we use for `self.styles`.
Note that this is based on the wpid, because
that's the key docx files use for cross-reference.
"""
if style:
realm = style.realm
wpid = style.wpid
return '%s:%s' % (realm, wpid)
def section_name(self, realm=None, internal_name=None, style=None):
"""The name of the ini section for a given style.
This uses `internal_name`, rather than `wpid` or `name`,
because `wpid` can get ugly ("a2") and `name` should be
modifyable.
"""
if style:
realm = style.realm
internal_name = style.internal_name
return '%s:%s' % (realm.capitalize(), internal_name)
def write_idtt(self):
logging.info('Writing %r', self.output_fn)
self.set_state(State())
with InDesignTaggedTextOutput(self.output_fn, self.args.debug, self.doc.properties) as self.writer:
self.convert_document()
def convert_document(self):
try:
self.stop_marker_found = False
for p in self.doc.paragraphs():
self.convert_paragraph(p)
if self.stop_marker:
logging.info('Note: Stop marker was never found')
logging.debug('In other words, no %r', self.stop_marker)
except StopMarkerFound as marker:
logging.info(marker)
def convert_paragraph(self, p):
if self.stop_marker:
self.check_for_stop_paragraph(p)
style = self.apply_rules_to(self.style('paragraph', p.style_wpid()))
with self.ParagraphContext(self, style):
for r in p.spans():
self.convert_range(r)
if style and style.variable:
self.define_variable_from_paragraph(style.variable, p)
def apply_rules_to(self, style):
for rule in self.rules:
if self.rule_applies_to(rule, style):
rule.applied += 1
return rule.into_this_style
return style
def rule_applies_to(self, rule, style):
if not rule.valid:
return False
if rule.turn_this_style is not style:
return False
if rule.when_first_in_doc:
if style.count > 1:
return False
if rule.when_following_styles:
if self.state.prev_para_style not in rule.when_following_styles:
return False
return True
def check_for_stop_paragraph(self, p):
text = ''
for chunk in p.text():
text += chunk
if text.startswith(self.stop_marker):
raise StopMarkerFound('Stop marker found at the beginning of a paragraph')
if len(text) >= len(self.stop_marker):
return
def define_variable_from_paragraph(self, variable, p):
self.writer.define_text_variable(variable, ''.join(chunk for chunk in p.text()))
def set_state(self, state):
prev = getattr(self, 'state', None)
self.state = state
return prev
class ParagraphContext(contextlib.AbstractContextManager):
def __init__(self, outer, style):
super().__init__()
self.outer = outer
self.writer = self.outer.writer
self.style = style
self.writer.enter_paragraph(self.style)
def __exit__(self, *args):
self.writer.leave_paragraph()
self.outer.set_state(State(
prev_para_style=self.style,
prev_para_text=self.outer.state.curr_para_text,
))
def convert_range(self, r):
self.switch_character_style(self.style('character', r.style_wpid()))
self.convert_range_text(r)
for fn in r.footnotes():
self.convert_footnote(fn)
for cmt in r.comments():
self.convert_comment(cmt)
def convert_range_text(self, r):
for t in r.text():
self.write_text(t)
def switch_character_style(self, style):
prev = self.state.curr_char_style
if style is not prev:
self.writer.set_character_style(style)
self.state.curr_char_style = style
return prev
def write_text(self, text):
"""Add some plain text."""
with contextlib.ExitStack() as stack:
stack.callback(lambda: self.do_write_text(text))
if not self.stop_marker:
return
offset = text.find(self.stop_marker)
if offset < 0:
return
text = text[:offset]
raise StopMarkerFound('Stop marker found')
def do_write_text(self, text):
self.writer.write_text(text)
self.state.curr_para_text += text
def convert_footnote(self, fn, ref_style=None):
with self.FootnoteContext(self, ref_style):
for p in fn.paragraphs():
self.convert_paragraph(p)
def convert_comment(self, cmt):
"""Tagged Text doesn't support notes, so we convert them to footnotes."""
return self.convert_footnote(cmt, ref_style=self.comment_ref_style)
class FootnoteContext(contextlib.AbstractContextManager):
def __init__(self, outer, ref_style=None):
super().__init__()
self.outer = outer
self.writer = outer.writer
self.outer_character_style = outer.state.curr_char_style
ref_style = ref_style or outer.footnote_ref_style
self.outer.activate_style(ref_style)
self.writer.set_character_style(ref_style)
self.writer.enter_footnote()
self.outer.writer = WhitespaceStripper(self.writer)
self.outer_state = self.outer.set_state(State())
def __exit__(self, *args):
self.outer.set_state(self.outer_state)
self.writer.leave_footnote()
self.writer.set_character_style(self.outer_character_style)
self.outer.writer = self.writer
def report_statistics(self):
realms = {s.realm for s in self.styles.values()}
for realm in realms:
logging.info('Number of %s styles used: %u',
realm.capitalize(),
sum(1 for s in self.styles.values() if s.realm == realm and s.used))
for rule in self.rules:
if rule.applied:
logging.info('%s application(s) of %s', rule.applied, rule)
def style(self, realm, wpid):
if not wpid:
return None
style = self.styles[self.style_key(realm=realm, wpid=wpid)]
if realm in self.IGNORED_STYLES:
if style.internal_name in self.IGNORED_STYLES[realm]:
return None
self.activate_style(style)
style.count += 1
return style
def activate_style(self, style):
section_name = self.section_name(style.realm, style.internal_name)
if section_name in self.style_sections_used:
return
if style.parent_style:
if not style.parent_style.used:
logging.debug('[%s] leads to missing %r', section_name, style.parent_wpid)
style.parent_style = self.base_styles[style.realm]
style.parent_wpid = style.parent_style.wpid
self.activate_style(style.parent_style)
self.update_setting_section(section_name, style)
self.style_sections_used.add(section_name)
if style.next_style and style.next_style.used:
self.activate_style(style.next_style)
elif style.next_wpid:
logging.debug('[%s] leads to missing %r', section_name, style.next_wpid)
def get_setting_section(self, section_name=None, realm=None, internal_name=None, style=None):
"""Return a section from the ini file.
If no such section exists, return a new dict, but don't add it
to the configuration.
"""
if not section_name:
section_name = self.section_name(realm=realm, internal_name=internal_name, style=style)
if self.settings.has_section(section_name):
return self.settings[section_name]
return {}
def ensure_setting_section(self, section_name=None, realm=None, internal_name=None, style=None):
"""Return a section from the ini file, adding one if needed."""
if not section_name:
section_name = self.section_name(realm=realm, internal_name=internal_name, style=style)
if not self.settings.has_section(section_name):
self.settings[section_name] = {}
return self.settings[section_name]
def update_setting_section(self, section_name, style):
"""Update key:value pairs in an ini section.
Sets `self.settings_touched` if anything was changed.
"""
section = self.ensure_setting_section(section_name)
for name, ini_name in ini_fields(Style):
value = getattr(style, name)
self.settings_touched |= section.get(ini_name) != value
if value:
section[ini_name] = str(value or '')
else:
section.pop(ini_name, None)
# TODO:
# - ODT: footnotes
# - ODT: comments
# - PUB: Support non-ME docs
# - PUB: Manual
# - many-to-one wp_name -> name
# - mapping csv
# - [paragraph rule] when_first_in_doc
# - [paragraph rule] when_matches_re
# - [paragraph style] keep_last_n_chars
# - character style rule (grep)
# - Non-unicode when not required?
# - Import MarkDown
# - Paragraph direction (w:r/w:rPr/w:rtl -> <pParaDir:1>; but what about the basic dir?)
# - For post edit/proof: Manual formatting consolidation, TBD
# - Para: global base -> body base, heading base
# - More rule context: after same, after different, first since...
# - Really need a test suite of some sort.
# - Manual format: autogenerate styles
# - Manual format: collapse with existing styles
# - A flag to only create/update the ini file
# - Maybe add front matter (best done in Id? either that or the template thingy (ooh, jinja2!))
# - Something usable for those balloons (footnote+hl? endnote? convert to note in jsx?)
# - Autocreate character styles from manual combos
# bold: w:b (w:bCs?); italic: w:i (w:iCs?); undeline w:u
# font: <w:rFonts w:ascii="Courier New" w:hAnsi="Courier New" w:cs="Courier New">
# override style: <w:i w:val="0">
# - (f)odt import
# - Convert editing marks
# - idml import
# - Automatic header group
# - More complex BiDi
# - Endnotes
# - Linked styles?
# - Derivation rules?
# - Latent styles?
# - Digraph kerning (probably better in InDesign?)
| 38.158358 | 107 | 0.595566 |
f1878dcd10c27f22a30cb339a07fa33921ada3b8
| 433 |
py
|
Python
|
plotly/validators/sankey/_name.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 12 |
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/sankey/_name.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 27 |
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/sankey/_name.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 6 |
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
import _plotly_utils.basevalidators
class NameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name='name', parent_name='sankey', **kwargs):
super(NameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 30.928571 | 75 | 0.644342 |
ce1ca6eb08c6d2dfbe9ca41e2607b1b10f9d00ff
| 2,746 |
py
|
Python
|
line/line/inverse4.py
|
liuboyan122/working-line
|
abd68e7e19fd341d5f90d42fd25f7ec83c75383b
|
[
"Apache-2.0"
] | null | null | null |
line/line/inverse4.py
|
liuboyan122/working-line
|
abd68e7e19fd341d5f90d42fd25f7ec83c75383b
|
[
"Apache-2.0"
] | null | null | null |
line/line/inverse4.py
|
liuboyan122/working-line
|
abd68e7e19fd341d5f90d42fd25f7ec83c75383b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import rclpy
from rclpy.node import Node
from builtin_interfaces.msg import Duration
from trajectory_msgs.msg import JointTrajectory , JointTrajectoryPoint
from ament_index_python.packages import get_package_share_directory
import ikpy.chain
import sys
import numpy as np
import os
## Near to the ground to check grab
#2.1 0 1.94
## full strech right
#3.33 0.05 0.66
## Full strech up
#0.47 0 3.78
## Random
# 0.63 -0.148 2.39
class Trajectory_publisher4(Node):
def __init__(self):
super().__init__('trajectory_publsiher_node')
publish_topic = "/joint_trajectory_controller4/joint_trajectory"
self.trajectory_publihser = self.create_publisher(JointTrajectory,publish_topic, 10);timer_period = 1
self.timer= self.create_timer(timer_period, self.timer_callback)
self.joints = ['joint_8']
package_share_dir = get_package_share_directory("line")
urdf_file= os.path.join(package_share_dir, "urdf", "press1.urdf")
## Toolbox interface
self.robot_initialize(urdf_file)
def timer_callback(self):
bazu_trajectory_msg = JointTrajectory()
bazu_trajectory_msg.joint_names = self.joints
point = JointTrajectoryPoint()
global i
if i <16:
point.positions = [0.0]
point.time_from_start = Duration(sec=1)
bazu_trajectory_msg.points.append(point)
self.trajectory_publihser.publish(bazu_trajectory_msg)
print("\nTrajectory Sent !\n")
i += 1
elif i< 17:
point.positions = [-0.5]
point.time_from_start = Duration(sec=1)
bazu_trajectory_msg.points.append(point)
self.trajectory_publihser.publish(bazu_trajectory_msg)
print("\nTrajectory Sent !\n")
i += 1
elif i< 18:
point.positions = [0.0]
point.time_from_start = Duration(sec=1)
bazu_trajectory_msg.points.append(point)
self.trajectory_publihser.publish(bazu_trajectory_msg)
print("\nTrajectory Sent !\n")
i += 1
else:
i=0
def robot_initialize(self,urdf_file):
self.kuka_robot = ikpy.chain.Chain.from_urdf_file(urdf_file)
def main(args=None):
global i
i=0
rclpy.init(args=args)
joint_trajectory_object4 = Trajectory_publisher4()
rclpy.spin(joint_trajectory_object4)
joint_trajectory_object4.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| 29.847826 | 110 | 0.615805 |
0e180a8fc5561a83a5ecb59ee88f64279bcaee94
| 1,712 |
py
|
Python
|
marl_coop/utils/plot.py
|
PierreMsy/DRL_cooperation
|
0385f4c88857659f44ddd5fc8c5c6c33344a38cc
|
[
"MIT"
] | 1 |
2022-01-05T14:04:29.000Z
|
2022-01-05T14:04:29.000Z
|
marl_coop/utils/plot.py
|
PierreMsy/DRL_cooperation
|
0385f4c88857659f44ddd5fc8c5c6c33344a38cc
|
[
"MIT"
] | null | null | null |
marl_coop/utils/plot.py
|
PierreMsy/DRL_cooperation
|
0385f4c88857659f44ddd5fc8c5c6c33344a38cc
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
def plot_scores(dic_scores, window_size=20, target_score=None, axe=None, colors=None, title=None):
"""
Plot the global scores of the agents in function of the number of episodes.
Args:
dic_scores (dict): pandas DataFrame of the scores by run key.
window_size (int, defaults to 20): The size of the window for the rolling average.
target_score (float, optional): Display the given target score as a dotted horizontal line.
axe (AxesSubplot, optional): axe of the subplot when this plot is embedded in a subplot.
colors (list[mplt color], optional): list of color when default colors are not suitable.
title (str, optional): title of the plot to display.
"""
if axe:
fig = plt.gcf()
else:
fig, axe = plt.subplots(1,1,figsize=(12,6), dpi=175)
if not colors:
colors = [None] * len(dic_scores)
max_len = 0
for idx,(key, result) in enumerate(dic_scores.items()):
score = np.array(result.score)
score_averaged = []
for i in range(len(score)):
score_averaged.append(
np.mean(
score[max(0, i-window_size//2): min(len(score)-1, i+window_size//2)]))
max_len = max(max_len, len(score_averaged))
axe.plot(score_averaged, label=key, color=colors[idx])
if target_score:
axe.hlines(target_score, 0, max_len, 'k', linestyle=':', label='target score')
axe.set_ylabel('Score')
axe.set_xlabel('Episode #')
if title:
axe.set_title(title, fontdict={'fontsize': 14})
fig.legend(bbox_to_anchor=(.985, .98), loc='upper left')
plt.tight_layout()
| 39.813953 | 99 | 0.639603 |
5cc046a5eb3ee3f0a2923da120e6f5492763effb
| 4,801 |
py
|
Python
|
lnbits/wallets/opennode.py
|
taxmeifyoucan/lnbits
|
19ae1ddf0d50b507135c418af9d5becc336d5ce3
|
[
"MIT"
] | 258 |
2020-04-27T21:36:21.000Z
|
2021-10-30T23:24:48.000Z
|
lnbits/wallets/opennode.py
|
taxmeifyoucan/lnbits
|
19ae1ddf0d50b507135c418af9d5becc336d5ce3
|
[
"MIT"
] | 283 |
2020-04-27T17:23:12.000Z
|
2021-11-01T10:07:20.000Z
|
lnbits/wallets/opennode.py
|
taxmeifyoucan/lnbits
|
19ae1ddf0d50b507135c418af9d5becc336d5ce3
|
[
"MIT"
] | 109 |
2020-04-28T06:00:17.000Z
|
2021-10-13T02:48:28.000Z
|
import trio
import hmac
import httpx
from http import HTTPStatus
from os import getenv
from typing import Optional, AsyncGenerator
from quart import request, url_for
from .base import (
StatusResponse,
InvoiceResponse,
PaymentResponse,
PaymentStatus,
Wallet,
Unsupported,
)
class OpenNodeWallet(Wallet):
"""https://developers.opennode.com/"""
def __init__(self):
endpoint = getenv("OPENNODE_API_ENDPOINT")
self.endpoint = endpoint[:-1] if endpoint.endswith("/") else endpoint
key = (
getenv("OPENNODE_KEY")
or getenv("OPENNODE_ADMIN_KEY")
or getenv("OPENNODE_INVOICE_KEY")
)
self.auth = {"Authorization": key}
async def status(self) -> StatusResponse:
try:
async with httpx.AsyncClient() as client:
r = await client.get(
f"{self.endpoint}/v1/account/balance",
headers=self.auth,
timeout=40,
)
except (httpx.ConnectError, httpx.RequestError):
return StatusResponse(f"Unable to connect to '{self.endpoint}'", 0)
data = r.json()["data"]
if r.is_error:
return StatusResponse(data["message"], 0)
return StatusResponse(None, data["balance"]["BTC"] / 100_000_000_000)
async def create_invoice(
self,
amount: int,
memo: Optional[str] = None,
description_hash: Optional[bytes] = None,
) -> InvoiceResponse:
if description_hash:
raise Unsupported("description_hash")
async with httpx.AsyncClient() as client:
r = await client.post(
f"{self.endpoint}/v1/charges",
headers=self.auth,
json={
"amount": amount,
"description": memo or "",
"callback_url": url_for("webhook_listener", _external=True),
},
timeout=40,
)
if r.is_error:
error_message = r.json()["message"]
return InvoiceResponse(False, None, None, error_message)
data = r.json()["data"]
checking_id = data["id"]
payment_request = data["lightning_invoice"]["payreq"]
return InvoiceResponse(True, checking_id, payment_request, None)
async def pay_invoice(self, bolt11: str) -> PaymentResponse:
async with httpx.AsyncClient() as client:
r = await client.post(
f"{self.endpoint}/v2/withdrawals",
headers=self.auth,
json={"type": "ln", "address": bolt11},
timeout=180,
)
if r.is_error:
error_message = r.json()["message"]
return PaymentResponse(False, None, 0, None, error_message)
data = r.json()["data"]
checking_id = data["id"]
fee_msat = data["fee"] * 1000
return PaymentResponse(True, checking_id, fee_msat, None, None)
async def get_invoice_status(self, checking_id: str) -> PaymentStatus:
async with httpx.AsyncClient() as client:
r = await client.get(
f"{self.endpoint}/v1/charge/{checking_id}", headers=self.auth
)
if r.is_error:
return PaymentStatus(None)
statuses = {"processing": None, "paid": True, "unpaid": False}
return PaymentStatus(statuses[r.json()["data"]["status"]])
async def get_payment_status(self, checking_id: str) -> PaymentStatus:
async with httpx.AsyncClient() as client:
r = await client.get(
f"{self.endpoint}/v1/withdrawal/{checking_id}", headers=self.auth
)
if r.is_error:
return PaymentStatus(None)
statuses = {
"initial": None,
"pending": None,
"confirmed": True,
"error": False,
"failed": False,
}
return PaymentStatus(statuses[r.json()["data"]["status"]])
async def paid_invoices_stream(self) -> AsyncGenerator[str, None]:
self.send, receive = trio.open_memory_channel(0)
async for value in receive:
yield value
async def webhook_listener(self):
data = await request.form
if "status" not in data or data["status"] != "paid":
return "", HTTPStatus.NO_CONTENT
charge_id = data["id"]
x = hmac.new(self.auth["Authorization"].encode("ascii"), digestmod="sha256")
x.update(charge_id.encode("ascii"))
if x.hexdigest() != data["hashed_order"]:
print("invalid webhook, not from opennode")
return "", HTTPStatus.NO_CONTENT
await self.send.send(charge_id)
return "", HTTPStatus.NO_CONTENT
| 32.883562 | 84 | 0.570923 |
7edb5b16ce77ada6f248403d2ed93f04155d06a3
| 10,549 |
py
|
Python
|
pandas/core/util/hashing.py
|
vimalromeo/pandas
|
7c14e4f14aff216be558bf5d4d2d00b4838c2360
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 69 |
2020-03-31T06:40:17.000Z
|
2022-02-25T11:48:18.000Z
|
venv/lib/python3.7/site-packages/pandas/core/util/hashing.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 8 |
2019-12-04T23:44:11.000Z
|
2022-02-10T08:31:40.000Z
|
venv/lib/python3.7/site-packages/pandas/core/util/hashing.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 28 |
2020-04-15T15:24:17.000Z
|
2021-12-26T04:05:02.000Z
|
"""
data hash pandas / numpy objects
"""
import itertools
import numpy as np
from pandas._libs import hashing, tslib
from pandas.core.dtypes.generic import (
ABCMultiIndex,
ABCIndexClass,
ABCSeries,
ABCDataFrame)
from pandas.core.dtypes.common import (
is_categorical_dtype, is_list_like)
from pandas.core.dtypes.missing import isna
from pandas.core.dtypes.cast import infer_dtype_from_scalar
# 16 byte long hashing key
_default_hash_key = '0123456789123456'
def _combine_hash_arrays(arrays, num_items):
"""
Parameters
----------
arrays : generator
num_items : int
Should be the same as CPython's tupleobject.c
"""
try:
first = next(arrays)
except StopIteration:
return np.array([], dtype=np.uint64)
arrays = itertools.chain([first], arrays)
mult = np.uint64(1000003)
out = np.zeros_like(first) + np.uint64(0x345678)
for i, a in enumerate(arrays):
inverse_i = num_items - i
out ^= a
out *= mult
mult += np.uint64(82520 + inverse_i + inverse_i)
assert i + 1 == num_items, 'Fed in wrong num_items'
out += np.uint64(97531)
return out
def hash_pandas_object(obj, index=True, encoding='utf8', hash_key=None,
categorize=True):
"""
Return a data hash of the Index/Series/DataFrame
.. versionadded:: 0.19.2
Parameters
----------
index : boolean, default True
include the index in the hash (if Series/DataFrame)
encoding : string, default 'utf8'
encoding for data & key when strings
hash_key : string key to encode, default to _default_hash_key
categorize : bool, default True
Whether to first categorize object arrays before hashing. This is more
efficient when the array contains duplicate values.
.. versionadded:: 0.20.0
Returns
-------
Series of uint64, same length as the object
"""
from pandas import Series
if hash_key is None:
hash_key = _default_hash_key
if isinstance(obj, ABCMultiIndex):
return Series(hash_tuples(obj, encoding, hash_key),
dtype='uint64', copy=False)
if isinstance(obj, ABCIndexClass):
h = hash_array(obj.values, encoding, hash_key,
categorize).astype('uint64', copy=False)
h = Series(h, index=obj, dtype='uint64', copy=False)
elif isinstance(obj, ABCSeries):
h = hash_array(obj.values, encoding, hash_key,
categorize).astype('uint64', copy=False)
if index:
index_iter = (hash_pandas_object(obj.index,
index=False,
encoding=encoding,
hash_key=hash_key,
categorize=categorize).values
for _ in [None])
arrays = itertools.chain([h], index_iter)
h = _combine_hash_arrays(arrays, 2)
h = Series(h, index=obj.index, dtype='uint64', copy=False)
elif isinstance(obj, ABCDataFrame):
hashes = (hash_array(series.values) for _, series in obj.iteritems())
num_items = len(obj.columns)
if index:
index_hash_generator = (hash_pandas_object(obj.index,
index=False,
encoding=encoding,
hash_key=hash_key,
categorize=categorize).values # noqa
for _ in [None])
num_items += 1
hashes = itertools.chain(hashes, index_hash_generator)
h = _combine_hash_arrays(hashes, num_items)
h = Series(h, index=obj.index, dtype='uint64', copy=False)
else:
raise TypeError("Unexpected type for hashing %s" % type(obj))
return h
def hash_tuples(vals, encoding='utf8', hash_key=None):
"""
Hash an MultiIndex / list-of-tuples efficiently
.. versionadded:: 0.20.0
Parameters
----------
vals : MultiIndex, list-of-tuples, or single tuple
encoding : string, default 'utf8'
hash_key : string key to encode, default to _default_hash_key
Returns
-------
ndarray of hashed values array
"""
is_tuple = False
if isinstance(vals, tuple):
vals = [vals]
is_tuple = True
elif not is_list_like(vals):
raise TypeError("must be convertible to a list-of-tuples")
from pandas import Categorical, MultiIndex
if not isinstance(vals, ABCMultiIndex):
vals = MultiIndex.from_tuples(vals)
# create a list-of-Categoricals
vals = [Categorical(vals.labels[level],
vals.levels[level],
ordered=False,
fastpath=True)
for level in range(vals.nlevels)]
# hash the list-of-ndarrays
hashes = (_hash_categorical(cat,
encoding=encoding,
hash_key=hash_key)
for cat in vals)
h = _combine_hash_arrays(hashes, len(vals))
if is_tuple:
h = h[0]
return h
def hash_tuple(val, encoding='utf8', hash_key=None):
"""
Hash a single tuple efficiently
Parameters
----------
val : single tuple
encoding : string, default 'utf8'
hash_key : string key to encode, default to _default_hash_key
Returns
-------
hash
"""
hashes = (_hash_scalar(v, encoding=encoding, hash_key=hash_key)
for v in val)
h = _combine_hash_arrays(hashes, len(val))[0]
return h
def _hash_categorical(c, encoding, hash_key):
"""
Hash a Categorical by hashing its categories, and then mapping the codes
to the hashes
Parameters
----------
c : Categorical
encoding : string, default 'utf8'
hash_key : string key to encode, default to _default_hash_key
Returns
-------
ndarray of hashed values array, same size as len(c)
"""
hashed = hash_array(c.categories.values, encoding, hash_key,
categorize=False)
# we have uint64, as we don't directly support missing values
# we don't want to use take_nd which will coerce to float
# instead, directly construct the result with a
# max(np.uint64) as the missing value indicator
#
# TODO: GH 15362
mask = c.isna()
if len(hashed):
result = hashed.take(c.codes)
else:
result = np.zeros(len(mask), dtype='uint64')
if mask.any():
result[mask] = np.iinfo(np.uint64).max
return result
def hash_array(vals, encoding='utf8', hash_key=None, categorize=True):
"""
Given a 1d array, return an array of deterministic integers.
.. versionadded:: 0.19.2
Parameters
----------
vals : ndarray, Categorical
encoding : string, default 'utf8'
encoding for data & key when strings
hash_key : string key to encode, default to _default_hash_key
categorize : bool, default True
Whether to first categorize object arrays before hashing. This is more
efficient when the array contains duplicate values.
.. versionadded:: 0.20.0
Returns
-------
1d uint64 numpy array of hash values, same length as the vals
"""
if not hasattr(vals, 'dtype'):
raise TypeError("must pass a ndarray-like")
dtype = vals.dtype
if hash_key is None:
hash_key = _default_hash_key
# For categoricals, we hash the categories, then remap the codes to the
# hash values. (This check is above the complex check so that we don't ask
# numpy if categorical is a subdtype of complex, as it will choke).
if is_categorical_dtype(dtype):
return _hash_categorical(vals, encoding, hash_key)
# we'll be working with everything as 64-bit values, so handle this
# 128-bit value early
elif np.issubdtype(dtype, np.complex128):
return hash_array(vals.real) + 23 * hash_array(vals.imag)
# First, turn whatever array this is into unsigned 64-bit ints, if we can
# manage it.
elif isinstance(dtype, np.bool):
vals = vals.astype('u8')
elif issubclass(dtype.type, (np.datetime64, np.timedelta64)):
vals = vals.view('i8').astype('u8', copy=False)
elif issubclass(dtype.type, np.number) and dtype.itemsize <= 8:
vals = vals.view('u{}'.format(vals.dtype.itemsize)).astype('u8')
else:
# With repeated values, its MUCH faster to categorize object dtypes,
# then hash and rename categories. We allow skipping the categorization
# when the values are known/likely to be unique.
if categorize:
from pandas import factorize, Categorical, Index
codes, categories = factorize(vals, sort=False)
cat = Categorical(codes, Index(categories),
ordered=False, fastpath=True)
return _hash_categorical(cat, encoding, hash_key)
try:
vals = hashing.hash_object_array(vals, hash_key, encoding)
except TypeError:
# we have mixed types
vals = hashing.hash_object_array(vals.astype(str).astype(object),
hash_key, encoding)
# Then, redistribute these 64-bit ints within the space of 64-bit ints
vals ^= vals >> 30
vals *= np.uint64(0xbf58476d1ce4e5b9)
vals ^= vals >> 27
vals *= np.uint64(0x94d049bb133111eb)
vals ^= vals >> 31
return vals
def _hash_scalar(val, encoding='utf8', hash_key=None):
"""
Hash scalar value
Returns
-------
1d uint64 numpy array of hash value, of length 1
"""
if isna(val):
# this is to be consistent with the _hash_categorical implementation
return np.array([np.iinfo(np.uint64).max], dtype='u8')
if getattr(val, 'tzinfo', None) is not None:
# for tz-aware datetimes, we need the underlying naive UTC value and
# not the tz aware object or pd extension type (as
# infer_dtype_from_scalar would do)
if not isinstance(val, tslib.Timestamp):
val = tslib.Timestamp(val)
val = val.tz_convert(None)
dtype, val = infer_dtype_from_scalar(val)
vals = np.array([val], dtype=dtype)
return hash_array(vals, hash_key=hash_key, encoding=encoding,
categorize=False)
| 31.678679 | 92 | 0.604133 |
81deff619312d45862113f15252dbb408e17e49a
| 20,607 |
py
|
Python
|
niftynet/layer/crf.py
|
alanpeixinho/NiftyNet
|
9a17022a71985974f9e5ca992c765d55860fdd7d
|
[
"Apache-2.0"
] | null | null | null |
niftynet/layer/crf.py
|
alanpeixinho/NiftyNet
|
9a17022a71985974f9e5ca992c765d55860fdd7d
|
[
"Apache-2.0"
] | null | null | null |
niftynet/layer/crf.py
|
alanpeixinho/NiftyNet
|
9a17022a71985974f9e5ca992c765d55860fdd7d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Re-implementation of [1] in Tensorflow for volumetric image processing.
[1] Zheng et al.
"Conditional random fields as recurrent neural networks." ICCV 2015.
https://arxiv.org/abs/1502.03240
"""
from __future__ import absolute_import, print_function
import numpy as np
import tensorflow as tf
from niftynet.layer.base_layer import TrainableLayer
from niftynet.layer.layer_util import infer_spatial_rank, expand_spatial_params
class CRFAsRNNLayer(TrainableLayer):
"""
This class defines a layer implementing CRFAsRNN described in [1] using
a bilateral and a spatial kernel as in [2].
Essentially, this layer smooths its input based on a distance in a feature
space comprising spatial and feature dimensions.
High-dimensional Gaussian filtering adapted from [3].
[1] Zheng et al., https://arxiv.org/abs/1502.03240
[2] Krahenbuhl and Koltun, https://arxiv.org/pdf/1210.5644.pdf
[3] Adam et al., https://graphics.stanford.edu/papers/permutohedral/
"""
def __init__(self,
alpha=5.,
beta=5.,
gamma=5.,
T=5,
aspect_ratio=None,
mu_init=None,
w_init=None,
name="crf_as_rnn"):
"""
Currently this layer supports spatial ND dense CRF with CPU only.
To place the layer on CPU::
with tf.device('/cpu:0'):
crf_layer = CRFAsRNNLayer()
crf_output = crf_layer(features, raw_logits)
To ensure backpropagations during training are placed on CPU as well,
the optimiser should be used with argument
``colocate_gradients_with_ops=True``, e.g.,::
train_op = tf.train.GradientDescentOptimizer(.5).minimise(
training_loss, colocate_gradients_with_ops=True)
:param alpha: bandwidth for spatial coordinates in bilateral kernel.
Higher values cause more spatial blurring
:param beta: bandwidth for feature coordinates in bilateral kernel
Higher values cause more feature blurring
:param gamma: bandwidth for spatial coordinates in spatial kernel
Higher values cause more spatial blurring
:param T: number of stacked layers in the RNN
:param aspect_ratio: spacing of adjacent voxels
(allows isotropic spatial smoothing when voxels are not isotropic)
:param mu_init: initial compatibility matrix [n_classes x n_classes]
default value: `-1.0 * eye(n_classes)`
:param w_init: initial kernel weights [2 x n_classes]
where w_init[0] are the weights for the bilateral kernel,
w_init[1] are the weights for the spatial kernel.
default value: `[ones(n_classes), ones(n_classes)]`
:param name:
"""
super(CRFAsRNNLayer, self).__init__(name=name)
self._alpha = alpha
self._beta = beta
self._gamma = gamma
self._T = T
self._aspect_ratio = aspect_ratio
self._mu_init = mu_init
self._w_init = w_init
assert self._alpha > 0, 'alpha should be positive'
assert self._beta > 0, 'beta should be positive'
assert self._gamma > 0, 'gamma should be positive'
def layer_op(self, I, U):
"""
Compute `T` iterations of mean field update given a dense CRF.
This layer maintains trainable CRF model parameters
(a compatibility function and `m` kernel weights).
:param I: feature maps used in the dense pairwise term of CRF
:param U: activation maps used in the unary term of CRF (before softmax)
:return: Maximum a posteriori labeling (before softmax)
"""
spatial_rank = infer_spatial_rank(U)
all_shape = U.shape.as_list()
batch_size, spatial_shape, n_ch = \
all_shape[0], all_shape[1:-1], all_shape[-1]
n_feat = I.shape.as_list()[-1]
if self._aspect_ratio is None:
self._aspect_ratio = [1.] * spatial_rank
self._aspect_ratio = expand_spatial_params(
self._aspect_ratio, spatial_rank, float)
# constructing the scaled regular grid
spatial_grid = tf.meshgrid(
*[np.arange(i, dtype=np.float32) * a
for i, a in zip(spatial_shape, self._aspect_ratio)],
indexing='ij')
spatial_coords = tf.stack(spatial_grid[::-1], spatial_rank)
spatial_coords = tf.tile(
tf.expand_dims(spatial_coords, 0),
[batch_size] + [1] * spatial_rank + [1])
# concatenating spatial coordinates and features
# (and squeeze spatially)
# for the bilateral kernel
bilateral_coords = tf.reshape(
tf.concat([spatial_coords / self._alpha, I / self._beta], -1),
[batch_size, -1, n_feat + spatial_rank])
# for the spatial kernel
spatial_coords = tf.reshape(
spatial_coords / self._gamma,
[batch_size, -1, spatial_rank])
# Build permutohedral structures for smoothing
permutohedrals = [
permutohedral_prepare(coords)
for coords in (bilateral_coords, spatial_coords)]
# squeeze the spatial shapes and recover them in the end
U = tf.reshape(U, [batch_size, -1, n_ch])
n_voxels = U.shape.as_list()[1]
# normalisation factor
norms = []
for idx, permutohedral in enumerate(permutohedrals):
spatial_norm = _permutohedral_gen(
permutohedral,
tf.ones((batch_size, n_voxels, 1)),
'spatial_norms' + str(idx))
spatial_norm.set_shape([batch_size, n_voxels, 1])
spatial_norm = 1.0 / tf.sqrt(spatial_norm + 1e-20)
norms.append(spatial_norm)
# trainable compatibility matrix mu (initialised as identity * -1)
mu_shape = [n_ch, n_ch]
if self._mu_init is None:
self._mu_init = -np.eye(n_ch)
self._mu_init = np.reshape(self._mu_init, mu_shape)
mu = tf.compat.v1.get_variable(
'Compatibility',
initializer=tf.constant(self._mu_init, dtype=tf.float32))
# trainable kernel weights
weight_shape = [n_ch]
if self._w_init is None:
self._w_init = [np.ones(n_ch), np.ones(n_ch)]
self._w_init = [
np.reshape(_w, weight_shape) for _w in self._w_init]
kernel_weights = [tf.compat.v1.get_variable(
'FilterWeights{}'.format(idx),
initializer=tf.constant(self._w_init[idx], dtype=tf.float32))
for idx, k in enumerate(permutohedrals)]
H1 = U
for t in range(self._T):
H1 = ftheta(U, H1, permutohedrals, mu, kernel_weights, norms,
name='{}{}'.format(self.name, t))
return tf.reshape(H1, all_shape)
def ftheta(U, H1, permutohedrals, mu, kernel_weights, norms, name):
"""
A mean-field update
:param U: the unary potentials (before softmax)
:param H1: the previous mean-field approximation to be updated
:param permutohedrals: fixed position vectors for fast filtering
:param mu: compatibility function
:param kernel_weights: weights bilateral/spatial kernels
:param norms: precomputed normalisation factor
:param name: layer name
:return: updated mean-field distribution
"""
unary_shape = U.shape.as_list()
n_ch = unary_shape[-1]
H1 = tf.nn.softmax(H1)
Q1 = 0
for idx, permutohedral in enumerate(permutohedrals):
# Message Passing
Q = _permutohedral_gen(permutohedral, H1 * norms[idx], name + str(idx))
Q.set_shape(unary_shape)
# Weighting Filtered Outputs
Q1 += Q * kernel_weights[idx] * norms[idx]
# Compatibility Transform, Adding Unary Potentials
# output logits, not the softmax
Q1 = tf.reshape(tf.matmul(tf.reshape(Q1, [-1, n_ch]), mu), unary_shape)
return U - Q1
def permutohedral_prepare(position_vectors):
"""
Embedding the position vectors in a high-dimensional space,
the lattice points are stored in hash tables.
The function computes:
- translation by the nearest reminder-0
- ranking permutation to the canonical simplex
- barycentric weights in the canonical simplex
:param position_vectors: N x d position
:return: barycentric weights, blur neighbours points in the hyperplane
"""
batch_size, n_voxels, n_ch = position_vectors.shape.as_list()
n_ch_1 = n_ch + 1
# reshaping batches and voxels into one dimension
# means we can use 1D gather and hashing easily
position_vectors = tf.reshape(position_vectors, [-1, n_ch])
# Generate position vectors in lattice space
# first rotate position into the (n_ch+1)-dimensional hyperplane
inv_std_dev = np.sqrt(2 / 3.) * n_ch_1
scale_factor = tf.constant([
inv_std_dev / np.sqrt((i + 1) * (i + 2)) for i in range(n_ch)])
Ex = [None] * n_ch_1
cum_sum = 0.0
for dit in range(n_ch, 0, -1):
scaled_vectors = position_vectors[:, dit - 1] * scale_factor[dit - 1]
Ex[dit] = cum_sum - scaled_vectors * dit
cum_sum += scaled_vectors
Ex[0] = cum_sum
Ex = tf.stack(Ex, -1)
# Compute coordinates
# Get closest remainder-0 point
v = tf.cast(tf.round(Ex / float(n_ch_1)), dtype=tf.int32)
rem0 = v * n_ch_1
# Find the simplex we are in and store it in rank
# (where rank describes what position coordinate i has
# in the sorted order of the features values).
# This can be done more efficiently
# if necessary following the permutohedral paper.
index = tf.nn.top_k(Ex - tf.cast(rem0, dtype=tf.float32), n_ch_1, sorted=True).indices
rank = tf.nn.top_k(-index, n_ch_1, sorted=True).indices
# if the point doesn't lie on the plane (sum != 0) bring it back
# (sum(v) != 0) meaning off the plane
rank = rank + tf.reduce_sum(input_tensor=v, axis=1, keepdims=True)
add_minus_sub = tf.cast(rank < 0, dtype=tf.int32) - tf.cast(rank > n_ch, dtype=tf.int32)
add_minus_sub *= n_ch_1
rem0 = rem0 + add_minus_sub
rank = rank + add_minus_sub
# Compute the barycentric coordinates (p.10 in [Adams et al 2010])
v2 = (Ex - tf.cast(rem0, dtype=tf.float32)) / float(n_ch_1)
# CRF2RNN uses the calculated ranks to get v2 sorted in O(n_ch) time
# We cheat here by using the easy to implement
# but slower method of sorting again in O(n_ch log n_ch)
# we might get this even more efficient
# if we correct the original sorted data above
v_sorted = -tf.nn.top_k(-v2, n_ch_1, sorted=True).values
# weighted against the canonical simplex vertices
barycentric = \
v_sorted - tf.concat([v_sorted[:, -1:] - 1., v_sorted[:, :-1]], 1)
# Compute all vertices and their offset
def _simple_hash(key):
# WARNING: This hash function does not guarantee
# uniqueness of different position_vectors
hash_vector = np.power(
int(np.floor(np.power(tf.int64.max, 1. / (n_ch + 2)))),
[range(1, n_ch_1)])
hash_vector = tf.constant(hash_vector, dtype=tf.int64)
return tf.reduce_sum(input_tensor=tf.cast(key, dtype=tf.int64) * hash_vector, axis=1)
# This is done so if the user had TF 1.12.1 or a new version the code
# does not brake. First part of the try is for TF 1.12.1 where the
# deleted_key keyword was missing, while the second is just a normal
# usage for TF 1.13.1>=
try:
hash_table = tf.contrib.lookup.MutableDenseHashTable(
tf.int64, tf.int64,
default_value=tf.constant([-1] * 100, dtype=tf.int64),
empty_key=-2,
initial_num_buckets=8,
checkpoint=False
)
except TypeError:
hash_table = tf.contrib.lookup.MutableDenseHashTable(
tf.int64, tf.int64,
default_value=tf.constant([-1] * n_ch, dtype=tf.int64),
empty_key=-3,
deleted_key=-2,
initial_num_buckets=8,
checkpoint=False
)
try:
index_table = tf.contrib.lookup.MutableDenseHashTable(
tf.int64, tf.int64,
default_value=0,
empty_key=-1,
initial_num_buckets=8,
checkpoint=False
)
except TypeError:
index_table = tf.contrib.lookup.MutableDenseHashTable(
tf.int64, tf.int64,
default_value=0,
empty_key=-2,
deleted_key=-1,
initial_num_buckets=8,
checkpoint=False
)
# canonical simplex (p.4 in [Adams et al 2010])
canonical = \
[[i] * (n_ch_1 - i) + [i - n_ch - 1] * i for i in range(n_ch_1)]
insert_ops = []
loc = [None] * n_ch_1
loc_hash = [None] * n_ch_1
for scit in range(n_ch_1):
# Compute the location of the lattice point explicitly
# (all but the last coordinate -
# it's redundant because they sum to zero)
loc[scit] = tf.gather(canonical[scit], rank[:, :-1]) + rem0[:, :-1]
loc_hash[scit] = _simple_hash(loc[scit])
insert_ops.append(
hash_table.insert(loc_hash[scit], tf.cast(loc[scit], dtype=tf.int64)))
with tf.control_dependencies(insert_ops):
fused_loc_hash, fused_loc = hash_table.export()
is_good_key = tf.compat.v1.where(tf.not_equal(fused_loc_hash, -2))[:, 0]
fused_loc = tf.gather(fused_loc, is_good_key)
fused_loc_hash = tf.gather(fused_loc_hash, is_good_key)
# The additional index hash table is used to
# linearise the hash table so that we can `tf.scatter` and `tf.gather`
# (range_id 0 reserved for the indextable's default value)
range_id = tf.range(
1, tf.size(input=fused_loc_hash, out_type=tf.int64) + 1, dtype=tf.int64)
range_id = tf.expand_dims(range_id, 1)
insert_indices = index_table.insert(fused_loc_hash, range_id)
# linearised [batch, spatial_dim] indices
# where in the splat variable each simplex vertex is
batch_index = tf.range(batch_size, dtype=tf.int32)
batch_index = tf.expand_dims(batch_index, 1)
batch_index = tf.tile(batch_index, [1, n_voxels])
batch_index = tf.cast(tf.reshape(batch_index, [-1]), dtype=tf.int64)
indices = [None] * n_ch_1
blur_neighbours1 = [None] * n_ch_1
blur_neighbours2 = [None] * n_ch_1
with tf.control_dependencies([insert_indices]):
for dit in range(n_ch_1):
# the neighbors along each axis.
offset = [n_ch if i == dit else -1 for i in range(n_ch)]
offset = tf.constant(offset, dtype=tf.int64)
blur_neighbours1[dit] = \
index_table.lookup(_simple_hash(fused_loc + offset))
blur_neighbours2[dit] = \
index_table.lookup(_simple_hash(fused_loc - offset))
indices[dit] = tf.stack([
index_table.lookup(loc_hash[dit]), batch_index], 1)
return barycentric, blur_neighbours1, blur_neighbours2, indices
def permutohedral_compute(data_vectors,
barycentric,
blur_neighbours1,
blur_neighbours2,
indices,
name,
reverse):
"""
Splat, Gaussian blur, and slice
:param data_vectors: value map to be filtered
:param barycentric: embedding coordinates
:param blur_neighbours1: first neighbours' coordinates relative to indices
:param blur_neighbours2: second neighbours' coordinates relative to indices
:param indices: corresponding locations of data_vectors
:param name: layer name
:param reverse: transpose the Gaussian kernel if True
:return: filtered data_vectors (sliced to the original space)
"""
num_simplex_corners = barycentric.shape.as_list()[-1]
n_ch = num_simplex_corners - 1
batch_size, n_voxels, n_ch_data = data_vectors.shape.as_list()
data_vectors = tf.reshape(data_vectors, [-1, n_ch_data])
# Splatting
with tf.compat.v1.variable_scope(name):
splat = tf.contrib.framework.local_variable(
tf.constant(0.0), validate_shape=False, name='splatbuffer')
# with tf.control_dependencies([splat.initialized_value()]):
initial_splat = tf.zeros(
[tf.shape(input=blur_neighbours1[0])[0] + 1, batch_size, n_ch_data])
reset_splat = tf.compat.v1.assign(splat, initial_splat, validate_shape=False)
with tf.control_dependencies([reset_splat]):
for scit in range(num_simplex_corners):
data = data_vectors * barycentric[:, scit:scit + 1]
splat = tf.compat.v1.scatter_nd_add(splat, indices[scit], data)
# Blur with 1D kernels
for dit in range(n_ch, -1, -1) if reverse else range(n_ch + 1):
b1 = tf.gather(splat, blur_neighbours1[dit])
b3 = tf.gather(splat, blur_neighbours2[dit])
splat = tf.concat([
splat[:1, ...], splat[1:, ...] + 0.5 * (b1 + b3)], 0)
# Slice
sliced = 0.0
# Alpha is a magic scaling constant from CRFAsRNN code
alpha = 1. / (1. + np.power(2., -n_ch))
for scit in range(0, num_simplex_corners):
sliced += tf.gather_nd(splat, indices[scit]) * \
barycentric[:, scit:scit + 1] * alpha
sliced = tf.reshape(sliced, [batch_size, n_voxels, n_ch_data])
return sliced
def _py_func_with_grads(func, inp, Tout, stateful=True, name=None, grad=None):
"""
To get this to work with automatic differentiation
we use a hack attributed to Sergey Ioffe
mentioned here: http://stackoverflow.com/questions/36456436
Define custom _py_func_with_grads which takes also a grad op as argument:
from https://gist.github.com/harpone/3453185b41d8d985356cbe5e57d67342
:param func:
:param inp:
:param Tout:
:param stateful:
:param name:
:param grad:
:return:
"""
# Need to generate a unique name to avoid duplicates:
import uuid
rnd_name = 'PyFuncGrad' + str(uuid.uuid4())
# tf.logging.info('CRFasRNN layer iteration {}'.format(rnd_name))
tf.RegisterGradient(rnd_name)(grad) # see _MySquareGrad for grad example
with tf.compat.v1.get_default_graph().gradient_override_map({"PyFunc": rnd_name}):
return tf.compat.v1.py_func(func, inp, Tout, stateful=stateful, name=name)[0]
def _gradient_stub(data_vectors,
barycentric,
blur_neighbours1,
blur_neighbours2,
indices,
name):
"""
This is a stub operator whose purpose is
to allow us to overwrite the gradient.
The forward pass gives zeros and
the backward pass gives the correct gradients
for the permutohedral_compute function
:param data_vectors:
:param barycentric:
:param blur_neighbours1:
:param blur_neighbours2:
:param indices:
:param name:
:return:
"""
def _dummy_wrapper(*_unused):
return np.float32(0)
def _permutohedral_grad_wrapper(op, grad):
# Differentiation can be done using permutohedral lattice
# with Gaussian filter order reversed
filtering_grad = permutohedral_compute(
grad, op.inputs[1], op.inputs[2], op.inputs[3], op.inputs[4],
name, reverse=True)
return [filtering_grad] + [None for i in op.inputs[1:]]
_inputs = [
data_vectors, barycentric, blur_neighbours1, blur_neighbours2, indices]
partial_grads_func = _py_func_with_grads(
_dummy_wrapper,
_inputs,
[tf.float32],
name=name,
grad=_permutohedral_grad_wrapper)
partial_grads_func.set_shape(data_vectors.shape.as_list())
return partial_grads_func
def _permutohedral_gen(permutohedral, data_vectors, name):
"""
a wrapper combines permutohedral_compute and a customised gradient op.
:param permutohedral:
:param data_vectors:
:param name:
:return:
"""
barycentric, blur_neighbours1, blur_neighbours2, indices = permutohedral
backward_branch = _gradient_stub(
data_vectors,
barycentric,
blur_neighbours1,
blur_neighbours2,
indices,
name)
forward_branch = permutohedral_compute(
data_vectors,
barycentric,
blur_neighbours1,
blur_neighbours2,
indices,
name,
reverse=False)
return backward_branch + tf.stop_gradient(forward_branch)
| 38.662289 | 93 | 0.637647 |
2ab698b9ee69870da9d70ee982fff61a7efee14a
| 2,200 |
py
|
Python
|
rlgraph/agents/random_agent.py
|
RLGraph/RLGraph
|
428fc136a9a075f29a397495b4226a491a287be2
|
[
"Apache-2.0"
] | 290 |
2018-07-29T15:30:57.000Z
|
2022-03-19T02:46:53.000Z
|
rlgraph/agents/random_agent.py
|
RLGraph/RLGraph
|
428fc136a9a075f29a397495b4226a491a287be2
|
[
"Apache-2.0"
] | 76 |
2018-10-19T08:42:01.000Z
|
2020-05-03T08:34:21.000Z
|
rlgraph/agents/random_agent.py
|
RLGraph/RLGraph
|
428fc136a9a075f29a397495b4226a491a287be2
|
[
"Apache-2.0"
] | 41 |
2018-10-30T07:05:05.000Z
|
2022-03-01T08:28:24.000Z
|
# Copyright 2018/2019 The RLgraph authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function
from rlgraph.agents import Agent
class RandomAgent(Agent):
"""
An Agent that picks random actions from the action Space.
"""
def __init__(self, state_space, action_space, name="random-agent", **kwargs):
super(RandomAgent, self).__init__(
update_spec=dict(do_updates=False), state_space=state_space, action_space=action_space, name=name, **kwargs
)
self.action_space_batched = self.action_space.with_batch_rank()
def get_action(self, states, internals=None, use_exploration=False, apply_preprocessing=True, extra_returns=None,
time_percentage=None):
a = self.action_space_batched.sample(size=len(states[0]))
if extra_returns is not None and "preprocessed_states" in extra_returns:
return a, states
else:
return a
def update(self, batch=None, time_percentage=None, **kwargs):
# Return fake loss and loss-per-item.
return 0.0, 0.0
def _observe_graph(self, preprocessed_states, actions, internals, rewards, next_states, terminals):
pass
# Override these with pass so we can use them when testing distributed strategies.
def set_weights(self, policy_weights, value_function_weights=None):
pass
def get_weights(self):
pass
def call_api_method(self, op, inputs=None, return_ops=None):
pass
def __repr__(self):
return "RandomAgent()"
| 37.931034 | 119 | 0.683636 |
d3008e8dc00832476b28d24880a822d71170e5c7
| 666 |
py
|
Python
|
src/test/directory_lister_test.py
|
pgecsenyi/fst
|
1d4f579fb3cccd022fe1ab0e61aa00693e7234c1
|
[
"MIT"
] | 1 |
2019-12-04T20:35:34.000Z
|
2019-12-04T20:35:34.000Z
|
src/test/directory_lister_test.py
|
pgecsenyi/router-fs
|
1d4f579fb3cccd022fe1ab0e61aa00693e7234c1
|
[
"MIT"
] | null | null | null |
src/test/directory_lister_test.py
|
pgecsenyi/router-fs
|
1d4f579fb3cccd022fe1ab0e61aa00693e7234c1
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import patch
from filesystem.transformation.directory_lister import DirectoryLister
class DirectoryListerTest(unittest.TestCase):
@patch('os.walk')
def test_list_directory(self, mock_walk):
dirpath = '/home/root/doc/fruits'
expected_files = [dirpath + '/apple.txt', dirpath + '/banana.txt']
mock_walk.return_value = [(dirpath, [], ['apple.txt', 'banana.txt'])]
directory_lister = DirectoryLister(dirpath)
result = [i for i in directory_lister.list_directory()]
mock_walk.assert_called_once_with(dirpath)
self.assertEqual(sorted(expected_files), sorted(result))
| 33.3 | 77 | 0.71021 |
381e76294379ee6653bae33ffd292b0674b44964
| 6,228 |
py
|
Python
|
renderer-process/py/script3.py
|
SenorProject/Fileception
|
f94c92187d9d60d4168e708a1bba436442ff2adc
|
[
"MIT"
] | null | null | null |
renderer-process/py/script3.py
|
SenorProject/Fileception
|
f94c92187d9d60d4168e708a1bba436442ff2adc
|
[
"MIT"
] | null | null | null |
renderer-process/py/script3.py
|
SenorProject/Fileception
|
f94c92187d9d60d4168e708a1bba436442ff2adc
|
[
"MIT"
] | 1 |
2021-08-10T20:54:20.000Z
|
2021-08-10T20:54:20.000Z
|
#!/usr/bin/python
#
# Fileception: Angecryption CLI tool
#
# Written for Python 3
#
# Pedro Sicilia, Mustafa Mohamed, Jacob Frank, Kevin Thomas, Omar Nasir
# CIS4914 Senior Project
#
#
# Usage: ./script3 [hidden.pdf] [cover.pdf] [combined.pdf] [Generate key]
# -Output file combined.pdf will contain encrypted data from hidden.pdf
# but will display as cover.pdf, can be decrypted to show hidden.pdf
# -Will generate key if "yes" is inputted under [Generate key], otherwise will
# use default key
#
# Encrypt: ./script3 -e [combined.pdf] [key.txt]
# -Intakes user-specified file and encrypts to reveal the other pdf
# -If no key path is given, will use default key
#
# Decrypt: ./script3 -d [combined.pdf] [key.txt]
# -Intakes user-specified file and decrypts to reveal the other pdf
# -If no key path is given, will use default key
#
import sys
import os
from Crypto.Cipher import AES
import Crypto.Util.py3compat as cryutil
import secrets
c0 = "%PDF-obj\nstream\n"
chunk_end = "\nendstream\nendobj\n"
CHUNK_END_SIZE = len(chunk_end)
cphr = AES
pdfmagic = "%PDF-"
sym_key = b'ABCDEFGHIJKLMNOP'
EOF = "%%EOF"
EOF_SIZE = len(EOF)
# takes argument vector as parameter
def checkArgs(v):
if(len(v) == 4 or len(v) == 5):
return True
elif(len(v) > 5):
print ("Error: excess arguments\n")
elif(len(v) < 4):
print ("Error: missing arguments\n")
return False
# takes argument vector as parameter
def checkFlag(v):
if(len(v) > 1 and v[1][0] != '-'):
return False
else:
if(len(v) < 3 and len(v) > 4 ):
print ("Error: number of arguments\n")
return False
else:
if(v[1] == "-e"):
enc(v[2])
elif(v[1] == "-d"):
dec(v[2])
else:
print ("Error: unknown flag\n")
return False
return True
# encrypts result file
def enc(file):
# choice = input("Would you like to use a keyfile to encrypt [Y/n]? ")
if len(sys.argv) == 4:
key_file_path = sys.argv[3]
key_file = open(key_file_path, 'rb')
key = key_file.read()
else:
key = sym_key
with open(file, "rb") as stream:
data = stream.read()
print ("Encrypting file", file, "\nWriting output to encrypted.pdf\n")
#retrieve and remove IV
iv = data[(-cphr.block_size):]
data = data[:(-cphr.block_size)]
cbc_e = cphr.new(key, cphr.MODE_CBC, iv)
enc = cbc_e.encrypt(data)
i = (enc.find(cryutil.b(pdfmagic), cphr.block_size))
# insert dummy chunk ending and IV
enc = enc[:i] + cryutil.b(chunk_end) + enc[i:] + iv
if os.name == 'nt':
with open(file.rsplit('\\',1)[0]+'\\'+"encrypted.pdf", "wb") as o:
o.write(enc)
else:
with open(file.rsplit('/',1)[0]+"/"+"encrypted.pdf", "wb") as o:
o.write(enc)
# decrypts result file
def dec(cfile):
if len(sys.argv) == 4:
key_file_path = sys.argv[3]
key_file = open(key_file_path,"rb")
key = key_file.read()
else:
key = sym_key
with open(cfile, "rb") as stream:
data = stream.read()
print ("Decrypting file", cfile, "\nWriting output to file\n")
# remove dummy chunk ending
i = (data.find(cryutil.b(pdfmagic), cphr.block_size))
dec = data[:(i - CHUNK_END_SIZE)] + data[i:]
# retrieve and remove IV
iv = dec[(-cphr.block_size):]
dec = dec[:(-cphr.block_size)]
cbc_d = cphr.new(key, cphr.MODE_CBC, iv)
dec = cbc_d.decrypt(dec)
header = str(dec[:14])
#print(header)
magics = {'PDF', 'PNG', 'JFIF', 'MZ', 'PK'}
fileType = ''
for t in magics:
if t in header:
fileType = t.lower()
break
#print(fileType)
if fileType == 'jfif':
fileType = 'jpg'
elif fileType == 'mz':
fileType = 'exe'
elif fileType == 'pk':
fileType = 'zip'
if fileType != '':
dec += iv
fileName = "decrypted." + fileType
print(cfile.rsplit('/',1)[0]+fileName)
if os.name == 'nt':
with open(cfile.rsplit('\\',1)[0]+'\\'+fileName, "wb") as o:
o.write(dec)
else:
with open(cfile.rsplit('/',1)[0]+"/"+fileName, "wb") as o:
o.write(dec)
print("File Saved as:",fileName)
else:
print("Filetype not recognized. You may be using the wrong key, or are attempting to convert an unsupported file.")
sys.exit(0)
# pads data to a multiple of cipher block size
def pad(fdata):
return fdata + ((cphr.block_size - len(fdata) % cphr.block_size) * chr(cphr.block_size - len(fdata) % cphr.block_size)).encode()
#----------Begin main script----------
if(checkFlag(sys.argv)):
print ("Successfully converted file!")
elif(checkArgs(sys.argv)):
infile1, infile2, outfile = sys.argv[1:4]
generate = False
if len(sys.argv) == 5:
if sys.argv[4] == "yes":
generate = True
if generate:
key = secrets.token_bytes(32) # 32 byte AES key
if os.name == 'nt':
key_file = open(outfile.rsplit('\\',1)[0]+"\key.txt", "wb")
else:
key_file = open(outfile.rsplit('/',1)[0]+"/key.txt", "wb")
key_file.write(key)
print(key)
else:
key = sym_key
print(key)
with open(infile1, "rb") as data:
infile1 = pad(data.read())
with open(infile2, "rb") as data:
infile2 = pad(data.read())
ptxt = infile1[:cphr.block_size]
ecb = cphr.new(key, cphr.MODE_ECB)
c0 = ecb.decrypt(c0.encode())
initV = ""
for i in range(cphr.block_size):
# x = ord(c0[i]) ^ ord(ptxt[i])
x = c0[i] ^ ptxt[i]
initV += chr(x)
cbc_init = cphr.new(key, cphr.MODE_CBC, cryutil.tobytes(initV))
combo = cbc_init.encrypt(infile1)
combo = combo + cryutil.tobytes(chunk_end) + infile2 + cryutil.tobytes(initV)
with open(outfile, "wb") as o:
o.write(combo)
print ("\nSuccessfully wrote combined file", outfile)
print("\nExiting...\n")
#-----------------End-----------------
| 26.615385 | 132 | 0.566635 |
75ec090fc9f80765b7bd28889944deeb4de930c2
| 205 |
py
|
Python
|
setup.py
|
alchermd/headlines
|
bb19459f570e05691f05654633e7615ed2d84085
|
[
"MIT"
] | 1 |
2017-12-20T01:27:49.000Z
|
2017-12-20T01:27:49.000Z
|
setup.py
|
alchermd/headlines
|
bb19459f570e05691f05654633e7615ed2d84085
|
[
"MIT"
] | 4 |
2017-10-22T15:18:30.000Z
|
2017-10-27T18:48:56.000Z
|
setup.py
|
alchermd/headlines
|
bb19459f570e05691f05654633e7615ed2d84085
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name="headlines",
version="0.1",
packages=["headlines"],
install_requires=[
"flask",
"requests"
],
include_package_data=True,
)
| 17.083333 | 30 | 0.595122 |
9b2214994ebbabce9640a0c5054f817efc819add
| 6,066 |
py
|
Python
|
pibooth/config/menu.py
|
chapipo/pibooth
|
a1a77e03b383f94c2a4c0406afee8221a75b3090
|
[
"MIT"
] | null | null | null |
pibooth/config/menu.py
|
chapipo/pibooth
|
a1a77e03b383f94c2a4c0406afee8221a75b3090
|
[
"MIT"
] | 1 |
2019-12-13T18:29:47.000Z
|
2019-12-13T18:29:47.000Z
|
pibooth/config/menu.py
|
chapipo/pibooth
|
a1a77e03b383f94c2a4c0406afee8221a75b3090
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Pibooth config menu.
"""
import pygame
import pygameMenu as pgm
from pygameMenu import controls as pgmctrl
from pygameMenu import events as pgmevt
from pibooth import fonts
from pibooth.config.parser import DEFAULT
pgmctrl.KEY_BACK = pygame.K_ESCAPE
def _find(choices, value):
"""Find index for the given value in choices.
"""
i = 0
for val in choices:
if val[0] == value:
return i
i += 1
return 0
class PiConfigMenu(object):
def __init__(self, window, config, fps):
self.window = window
self.config = config
self._main_menu = None
width = self.window.get_rect().width
height = self.window.get_rect().height
self._main_menu = pgm.Menu(self.window.surface,
width,
height,
fonts.get_filename("Amatic-Bold"),
'Settings',
draw_region_y=55,
font_color=(255, 255, 255),
font_title = fonts.get_filename("Amatic-Bold"),
color_selected=(38, 139, 210),
menu_color=(0, 0, 51),
menu_color_title=(90, 90, 140),
enabled=False,
onclose=self._on_close,
dopause=False,
)
for name in ('GENERAL', 'WINDOW', 'PICTURE', 'PRINTER'):
submenu = self._build_submenu(name, width, height)
self._main_menu.add_option(submenu.get_title(), submenu)
self._main_menu.add_option('Exit Pibooth', pgmevt.EXIT)
self._main_menu.set_fps(fps)
def _build_submenu(self, section, width, height):
"""Build sub-menu"""
menu = pgm.Menu(self.window.surface,
width,
height,
fonts.get_filename("Amatic-Bold"),
section.capitalize(),
font_size=30,
font_color=(255, 255, 255),
font_title = fonts.get_filename("Amatic-Bold"),
color_selected=(38, 139, 100),
menu_color=(0, 0, 51),
menu_color_title=(60, 90, 140),
dopause=False,
)
for name, option in DEFAULT[section].items():
if option[2]:
if isinstance(option[3], str):
menu.add_text_input(option[2],
cursor_color=(255, 255, 255),
onchange=self._on_text_changed,
default=self.config.get(section, name).strip('"'),
# Additional parameters:
section=section,
option=name)
else:
values = [(v,) for v in option[3]]
menu.add_selector(option[2],
values,
onchange=self._on_selector_changed,
default=_find(values, self.config.get(section, name)),
# Additional parameters:
section=section,
option=name)
return menu
def _on_selector_changed(self, value, **kwargs):
"""Called after each option changed.
"""
if self._main_menu.is_enabled():
self.config.set(kwargs['section'], kwargs['option'], str(value[0]))
def _on_text_changed(self, value, **kwargs):
"""Called after each text input changed.
"""
if self._main_menu.is_enabled():
self.config.set(kwargs['section'], kwargs['option'], '"{}"'.format(str(value)))
def _on_close(self):
"""Called when the menu is closed.
"""
self._main_menu.disable()
self.config.save()
def show(self):
"""Show the menu.
"""
self._main_menu.enable()
def is_shown(self):
"""Return True if the menu is shown.
"""
return self._main_menu.is_enabled()
def create_click_event(self):
"""Create a pygame event to click on the currently selected
widget on the menu. If the widget is a button, ENTER event
is created, else LEFT event is created.
"""
if isinstance(self._main_menu.get_selected_widget(), pgm.widgets.Button):
return pygame.event.Event(pygame.KEYDOWN, key=pgmctrl.KEY_APPLY,
unicode='\r', mod=0, scancode=36,
window=None, test=True)
else:
return pygame.event.Event(pygame.KEYDOWN, key=pgmctrl.KEY_RIGHT,
unicode='\uf703', mod=0, scancode=124,
window=None, test=True)
def create_next_event(self):
"""Create a pygame event to select the next widget.
"""
return pygame.event.Event(pygame.KEYDOWN, key=pgmctrl.KEY_MOVE_UP,
unicode='\uf701', mod=0, scancode=125,
window=None, test=True)
def create_back_event(self):
"""Create a pygame event to back to the previous menu.
"""
return pygame.event.Event(pygame.KEYDOWN, key=pgmctrl.KEY_BACK,
unicode='\x1b', mod=0, scancode=53,
window=None, test=True)
def process(self, events):
"""Process the events related to the menu.
"""
self._main_menu.mainloop(events) # block until exit menu (dopause=True)
| 38.150943 | 92 | 0.479558 |
1fa4870af0ebd3fc3d849b446e62127c9ecbb433
| 1,469 |
py
|
Python
|
src/ospeople/scrape/nc.py
|
Rypo/people
|
34cf10bf10313f154f5065dde578bed9b4127ae8
|
[
"CC0-1.0"
] | null | null | null |
src/ospeople/scrape/nc.py
|
Rypo/people
|
34cf10bf10313f154f5065dde578bed9b4127ae8
|
[
"CC0-1.0"
] | null | null | null |
src/ospeople/scrape/nc.py
|
Rypo/people
|
34cf10bf10313f154f5065dde578bed9b4127ae8
|
[
"CC0-1.0"
] | null | null | null |
from spatula import HtmlPage, HtmlListPage, CSS
from ..models.committees import ScrapeCommittee
class CommitteeDetail(HtmlPage):
example_source = "https://www.ncleg.gov/Committees/CommitteeInfo/SenateStanding/1162"
def get_role(self, text):
if text.endswith("s"):
text = text[:-1]
return text.lower()
def process_page(self):
com = self.input
com.add_source(self.source.url)
for membership_type in CSS("div#Membership h5").match(self.root):
role = self.get_role(membership_type.text_content())
# sibling div contains members
members = [p.text_content() for p in CSS("a p").match(membership_type.getnext())]
for member in members:
member = member.replace("Rep.", "").replace("Sen.", "").strip()
com.add_member(member, role)
return com
class CommitteeList(HtmlListPage):
source = "https://www.ncleg.gov/committees"
def process_item(self, item):
return CommitteeDetail(
ScrapeCommittee(
name=item.text_content(),
parent=self.chamber,
),
source=item.get("href"),
)
class HouseCommitteeList(CommitteeList):
selector = CSS("#houseStandingSection a.list-group-item")
chamber = "lower"
class SenateCommitteeList(CommitteeList):
selector = CSS("#senateStandingSection a.list-group-item")
chamber = "upper"
| 29.979592 | 93 | 0.627638 |
d4f3f8df2f1080c4e6ff34a135c097aa88a362ba
| 12,282 |
py
|
Python
|
src/cuteSV/cuteSV_resolveINDEL.py
|
bnoyvert/cuteSV
|
58ca0fa051f80f716ef69a39924102abdd4249a0
|
[
"MIT"
] | null | null | null |
src/cuteSV/cuteSV_resolveINDEL.py
|
bnoyvert/cuteSV
|
58ca0fa051f80f716ef69a39924102abdd4249a0
|
[
"MIT"
] | null | null | null |
src/cuteSV/cuteSV_resolveINDEL.py
|
bnoyvert/cuteSV
|
58ca0fa051f80f716ef69a39924102abdd4249a0
|
[
"MIT"
] | null | null | null |
import sys
import numpy as np
from collections import Counter
from cuteSV.cuteSV_genotype import cal_GL, cal_CIPOS, threshold_ref_count, count_coverage
import time
'''
*******************************************
TO DO LIST
*******************************************
1. Identify DP with samfile pointer;
2. Add CIPOS, CILEN and/or CIEND;
3. Determine (IM)PRECISE type.
*******************************************
'''
def resolution_DEL(path, chr, svtype, read_count, threshold_gloab, max_cluster_bias,
minimum_support_reads, bam_path, action, gt_round):
'''
cluster DEL
********************************************************************************************
path: DEL.sigs
chr: chromosome id
svtype: <DEL>
SEQTYPE read_count max_cluster_bias sv_size threshold_gloab threshold_local
--------------------------------------------------------------------------------------------
CCS 3 200 bp (<500 bp) 30 bp 0.4 0.5
CLR 5/10 200 bp (<500 bp) 50 bp 0.3 0.7
--------------------------------------------------------------------------------------------
Input file format
--------------------------------------------------------------------------------------------
column #1 #2 #3 #4 #5
DEL CHR BP LEN ID
#1 deletion type
#2 chromosome number
#3 breakpoint in each read
#4 DEL_len in each read
#5 read ID
********************************************************************************************
'''
semi_del_cluster = list()
semi_del_cluster.append([0,0,''])
candidate_single_SV = list()
file = open(path, 'r')
for line in file:
seq = line.strip('\n').split('\t')
if seq[1] != chr:
continue
pos = int(seq[2])
indel_len = int(seq[3])
read_id = seq[4]
if pos - semi_del_cluster[-1][0] > max_cluster_bias:
if len(semi_del_cluster) >= read_count:
if semi_del_cluster[-1][0] == semi_del_cluster[-1][1] == 0:
pass
else:
generate_del_cluster(semi_del_cluster,
chr,
svtype,
read_count,
threshold_gloab,
# threshold_local,
minimum_support_reads,
candidate_single_SV,
bam_path,
max_cluster_bias,
action,
gt_round)
semi_del_cluster = []
semi_del_cluster.append([pos, indel_len, read_id])
else:
if semi_del_cluster[-1][0] == semi_del_cluster[-1][1] == 0:
semi_del_cluster = []
semi_del_cluster.append([pos, indel_len, read_id])
else:
semi_del_cluster.append([pos, indel_len, read_id])
if len(semi_del_cluster) >= read_count:
if semi_del_cluster[-1][0] == semi_del_cluster[-1][1] == 0:
pass
else:
generate_del_cluster(semi_del_cluster,
chr,
svtype,
read_count,
threshold_gloab,
# threshold_local,
minimum_support_reads,
candidate_single_SV,
bam_path,
max_cluster_bias,
action,
gt_round)
file.close()
return candidate_single_SV
def generate_del_cluster(semi_del_cluster, chr, svtype, read_count,
threshold_gloab, minimum_support_reads, candidate_single_SV,
bam_path, max_cluster_bias, action, gt_round):
'''
generate deletion
*************************************************************
threshold_gloab threshold_local minimum_support_reads
-------------------------------------------------------------
0.3 0.7 5 CLR
0.4 0.5 <=5 CCS
*************************************************************
'''
# Remove duplicates
read_tag = dict()
for element in semi_del_cluster:
if element[2] not in read_tag:
read_tag[element[2]] = element
else:
if element[1] > read_tag[element[2]][1]:
read_tag[element[2]] = element
if len(read_tag) < read_count:
return
read_tag2SortedList = sorted(list(read_tag.values()), key = lambda x:x[1])
global_len = [i[1] for i in read_tag2SortedList]
DISCRETE_THRESHOLD_LEN_CLUSTER_DEL_TEMP = threshold_gloab * np.mean(global_len)
last_len = read_tag2SortedList[0][1]
allele_collect = list()
'''
*************************************************************
#1 #2 #3 #4
-------------------------------------------------------------
del-breakpoint del-len #support read-id
*************************************************************
'''
allele_collect.append([[read_tag2SortedList[0][0]],[read_tag2SortedList[0][1]],[],
[read_tag2SortedList[0][2]]])
for i in read_tag2SortedList[1:]:
if i[1] - last_len > DISCRETE_THRESHOLD_LEN_CLUSTER_DEL_TEMP:
allele_collect[-1][2].append(len(allele_collect[-1][0]))
allele_collect.append([[],[],[],[]])
allele_collect[-1][0].append(i[0])
allele_collect[-1][1].append(i[1])
allele_collect[-1][3].append(i[2])
last_len = i[1]
allele_collect[-1][2].append(len(allele_collect[-1][0]))
allele_sort = sorted(allele_collect, key = lambda x:x[2])
for allele in allele_sort:
if allele[2][0] >= minimum_support_reads:
breakpointStart = np.mean(allele[0])
search_threshold = np.min(allele[0])
CIPOS = cal_CIPOS(np.std(allele[0]), len(allele[0]))
signalLen = np.mean(allele[1])
signalLen_STD = np.std(allele[1])
CILEN = cal_CIPOS(np.std(allele[1]), len(allele[1]))
if action:
DV, DR, GT, GL, GQ, QUAL = call_gt(bam_path,
int(search_threshold),
chr,
allele[3],
max_cluster_bias,
gt_round)
else:
DR = '.'
GT = './.'
GL = '.,.,.'
GQ = "."
QUAL = "."
candidate_single_SV.append([chr,
svtype,
str(int(breakpointStart)),
str(int(-signalLen)),
str(allele[2][0]),
str(CIPOS),
str(CILEN),
str(DR),
str(GT),
str(GL),
str(GQ),
str(QUAL),
str(','.join(allele[3]))])
def resolution_INS(path, chr, svtype, read_count, threshold_gloab,
max_cluster_bias, minimum_support_reads, bam_path, action, gt_round):
'''
cluster INS
********************************************************************************************
path: INS.sigs
chr: chromosome id
svtype: <INS>
SEQTYPE read_count max_cluster_bias sv_size threshold_gloab threshold_local
--------------------------------------------------------------------------------------------
CCS 3 200 bp (<500 bp) 30 bp 0.65 0.7
CLR 5/10 100 bp (<500 bp) 50 bp 0.2 0.6
--------------------------------------------------------------------------------------------
Input file format
--------------------------------------------------------------------------------------------
column #1 #2 #3 #4 #5
INS CHR BP LEN ID
#1 insertion type
#2 chromosome number
#3 breakpoint in each read
#4 DEL_len in each read
#5 read ID
********************************************************************************************
'''
semi_ins_cluster = list()
semi_ins_cluster.append([0,0,'',''])
candidate_single_SV = list()
file = open(path, 'r')
for line in file:
seq = line.strip('\n').split('\t')
if seq[1] != chr:
continue
pos = int(seq[2])
indel_len = int(seq[3])
read_id = seq[4]
try:
ins_seq = seq[5]
except:
ins_seq = ''
if pos - semi_ins_cluster[-1][0] > max_cluster_bias:
if len(semi_ins_cluster) >= read_count:
if semi_ins_cluster[-1][0] == semi_ins_cluster[-1][1] == 0:
pass
else:
generate_ins_cluster(semi_ins_cluster,
chr,
svtype,
read_count,
threshold_gloab,
# threshold_local,
minimum_support_reads,
candidate_single_SV,
bam_path,
max_cluster_bias,
action,
gt_round)
semi_ins_cluster = []
semi_ins_cluster.append([pos, indel_len, read_id, ins_seq])
else:
if semi_ins_cluster[-1][0] == semi_ins_cluster[-1][1] == 0:
semi_ins_cluster = []
semi_ins_cluster.append([pos, indel_len, read_id, ins_seq])
else:
semi_ins_cluster.append([pos, indel_len, read_id, ins_seq])
if len(semi_ins_cluster) >= read_count:
if semi_ins_cluster[-1][0] == semi_ins_cluster[-1][1] == 0:
pass
else:
generate_ins_cluster(semi_ins_cluster,
chr,
svtype,
read_count,
threshold_gloab,
# threshold_local,
minimum_support_reads,
candidate_single_SV,
bam_path,
max_cluster_bias,
action,
gt_round)
file.close()
return candidate_single_SV
def generate_ins_cluster(semi_ins_cluster, chr, svtype, read_count,
threshold_gloab, minimum_support_reads, candidate_single_SV,
bam_path, max_cluster_bias, action, gt_round):
'''
generate deletion
*************************************************************
threshold_gloab threshold_local minimum_support_reads
-------------------------------------------------------------
0.2 0.6 5 CLR
0.65 0.7 <=5 CCS
*************************************************************
'''
# Remove duplicates
read_tag = dict()
for element in semi_ins_cluster:
if element[2] not in read_tag:
read_tag[element[2]] = element
else:
if element[1] > read_tag[element[2]][1]:
read_tag[element[2]] = element
if len(read_tag) < read_count:
return
read_tag2SortedList = sorted(list(read_tag.values()), key = lambda x:x[1])
# start&end breakpoint
global_len = [i[1] for i in read_tag2SortedList]
DISCRETE_THRESHOLD_LEN_CLUSTER_INS_TEMP = threshold_gloab * np.mean(global_len)
last_len = read_tag2SortedList[0][1]
allele_collect = list()
allele_collect.append([[read_tag2SortedList[0][0]],
[read_tag2SortedList[0][1]],
[],
[read_tag2SortedList[0][2]],
[read_tag2SortedList[0][3]]])
for i in read_tag2SortedList[1:]:
if i[1] - last_len > DISCRETE_THRESHOLD_LEN_CLUSTER_INS_TEMP:
allele_collect[-1][2].append(len(allele_collect[-1][0]))
allele_collect.append([[],[],[],[],[]])
allele_collect[-1][0].append(i[0])
allele_collect[-1][1].append(i[1])
allele_collect[-1][3].append(i[2])
allele_collect[-1][4].append(i[3])
last_len = i[1]
allele_collect[-1][2].append(len(allele_collect[-1][0]))
allele_sort = sorted(allele_collect, key = lambda x:x[2])
for allele in allele_sort:
if allele[2][0] >= minimum_support_reads:
breakpointStart = np.mean(allele[0])
CIPOS = cal_CIPOS(np.std(allele[0]), len(allele[0]))
signalLen = np.mean(allele[1])
signalLen_STD = np.std(allele[1])
CILEN = cal_CIPOS(np.std(allele[1]), len(allele[1]))
ideal_ins_seq = '<INS>'
for i in allele[4]:
if len(i) >= int(signalLen):
ideal_ins_seq = i[0:int(signalLen)]
break
if ideal_ins_seq == '<INS>':
continue
if action:
DV, DR, GT, GL, GQ, QUAL = call_gt(bam_path,
int(breakpointStart),
chr,
allele[3],
# max_cluster_bias,
1000,
gt_round)
else:
DR = '.'
GT = './.'
GL = '.,.,.'
GQ = "."
QUAL = "."
candidate_single_SV.append([chr,
svtype,
str(int(breakpointStart)),
str(int(signalLen)),
str(allele[2][0]),
str(CIPOS),
str(CILEN),
str(DR),
str(GT),
str(GL),
str(GQ),
str(QUAL),
str(','.join(allele[3])),
ideal_ins_seq])
def run_del(args):
return resolution_DEL(*args)
def run_ins(args):
return resolution_INS(*args)
def call_gt(bam_path, search_threshold, chr, read_id_list, max_cluster_bias, gt_round):
import pysam
querydata = set()
bamfile = pysam.AlignmentFile(bam_path)
search_start = max(int(search_threshold) - max_cluster_bias, 0)
search_end = min(int(search_threshold) + max_cluster_bias, bamfile.get_reference_length(chr))
up_bound = threshold_ref_count(len(read_id_list))
status = count_coverage(chr,
search_start,
search_end,
bamfile,
querydata,
up_bound,
gt_round)
bamfile.close()
if status == -1:
DR = '.'
GT = "./."
GL = ".,.,."
GQ = "."
QUAL = "."
# elif status == 1:
# pass
else:
DR = 0
for query in querydata:
if query not in read_id_list:
DR += 1
GT, GL, GQ, QUAL = cal_GL(DR, len(read_id_list))
return len(read_id_list), DR, GT, GL, GQ, QUAL
| 28.830986 | 94 | 0.544537 |
664051c14160506337dbb2c05bc94647b9517e0a
| 1,074 |
py
|
Python
|
mne/datasets/spm_face/spm_data.py
|
Anevar/mne-python
|
15b19ed6b9364ae4787f0df2fd7e689b3c0a30bb
|
[
"BSD-3-Clause"
] | null | null | null |
mne/datasets/spm_face/spm_data.py
|
Anevar/mne-python
|
15b19ed6b9364ae4787f0df2fd7e689b3c0a30bb
|
[
"BSD-3-Clause"
] | null | null | null |
mne/datasets/spm_face/spm_data.py
|
Anevar/mne-python
|
15b19ed6b9364ae4787f0df2fd7e689b3c0a30bb
|
[
"BSD-3-Clause"
] | 1 |
2019-10-23T06:11:28.000Z
|
2019-10-23T06:11:28.000Z
|
# Authors: Denis Engemann <[email protected]>
#
# License: BSD Style.
import numpy as np
from ...utils import get_config, verbose
from ...fixes import partial
from ..utils import has_dataset, _data_path, _doc
has_spm_data = partial(has_dataset, name='spm')
@verbose
def data_path(path=None, force_update=False, update_path=True,
download=True, verbose=None):
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='spm',
download=download,
verbose=verbose)
data_path.__doc__ = _doc.format(name='spm',
conf='MNE_DATASETS_SPM_DATA_PATH')
# Allow forcing of sample dataset skip (for tests) using:
# `make test-no-sample`
def _skip_spm_sample_data():
skip_spm = get_config('MNE_SKIP_SPM_DATASET_TESTS', 'false') == 'true'
skip = skip_spm or not has_spm_data()
return skip
requires_spm_data = np.testing.dec.skipif(_skip_spm_sample_data,
'Requires spm dataset')
| 30.685714 | 74 | 0.654562 |
223254a99bb002650fb736223ce11cae966b956c
| 3,040 |
py
|
Python
|
coregistration/eolearn/tests/test_coregistration.py
|
mohammadrezabk/eo-learn
|
8de3cfd64e74c1e4832e585954cdbf0ee9676eb3
|
[
"MIT"
] | null | null | null |
coregistration/eolearn/tests/test_coregistration.py
|
mohammadrezabk/eo-learn
|
8de3cfd64e74c1e4832e585954cdbf0ee9676eb3
|
[
"MIT"
] | null | null | null |
coregistration/eolearn/tests/test_coregistration.py
|
mohammadrezabk/eo-learn
|
8de3cfd64e74c1e4832e585954cdbf0ee9676eb3
|
[
"MIT"
] | null | null | null |
"""
Credits:
Copyright (c) 2017-2019 Matej Aleksandrov, Matej Batič, Andrej Burja, Eva Erzin (Sinergise)
Copyright (c) 2017-2019 Grega Milčinski, Matic Lubej, Devis Peresutti, Jernej Puc, Tomislav Slijepčević (Sinergise)
Copyright (c) 2017-2019 Blaž Sovdat, Nejc Vesel, Jovan Višnjić, Anže Zupanc, Lojze Žust (Sinergise)
This source code is licensed under the MIT license found in the LICENSE
file in the root directory of this source tree.
"""
import logging
import unittest
import numpy as np
from eolearn.core import EOPatch, FeatureType
from eolearn.coregistration import ECCRegistration, InterpolationType
logging.basicConfig(level=logging.DEBUG)
class TestEOPatch(unittest.TestCase):
def test_registration(self):
# Set up a dummy EOPatch to test execution of registration
bands = np.zeros((2, 20, 20, 1))
bands[1] = np.arange(400).reshape(1, 20, 20, 1) / 400
bands[0] = bands[1]
bands[1, 5:15, 5:15, :] = .5
bands[0, 7:17, 5:15, :] = .5
mask = np.ones((2, 20, 20, 1), dtype=np.int16)
ndvi = np.ones((2, 20, 20, 1))
dem = np.ones((20, 20, 1))
eop = EOPatch()
eop.add_feature(FeatureType.DATA, 'bands', value=bands)
eop.add_feature(FeatureType.DATA, 'ndvi', value=ndvi)
eop.add_feature(FeatureType.MASK, 'cm', value=mask)
eop.add_feature(FeatureType.DATA_TIMELESS, 'dem', value=dem)
reg = ECCRegistration((FeatureType.DATA, 'bands'), valid_mask_feature='cm',
interpolation_type=InterpolationType.NEAREST,
apply_to_features={
FeatureType.DATA: {'bands', 'ndvi'},
FeatureType.MASK: {'cm'}
})
reop = reg.execute(eop)
self.assertEqual(eop.data['bands'].shape, reop.data['bands'].shape,
msg='Shapes of .data[''bands''] do not match')
self.assertEqual(eop.data['ndvi'].shape, reop.data['ndvi'].shape,
msg='Shapes of .data[''ndvi''] do not match')
self.assertEqual(eop.mask['cm'].shape, reop.mask['cm'].shape,
msg='Shapes of .mask[''cm''] do not match')
self.assertEqual(eop.data_timeless['dem'].shape, reop.data_timeless['dem'].shape,
msg='Shapes of .data[''bands''] do not match')
self.assertFalse(np.allclose(eop.data['bands'], reop.data['bands']),
msg='Registration did not warp .data[''bands'']')
self.assertFalse(np.allclose(eop.data['ndvi'], reop.data['ndvi']),
msg='Registration did not warp .data[''ndvi'']')
self.assertFalse(np.allclose(eop.mask['cm'], reop.mask['cm']),
msg='Registration did not warp .mask[''cm'']')
self.assertTrue(np.allclose(eop.data_timeless['dem'], reop.data_timeless['dem']),
msg='Registration did warp data_timeless')
if __name__ == '__main__':
unittest.main()
| 44.705882 | 115 | 0.600329 |
0021df6e7b79e8340120e3a857170ffa947bc0af
| 47,596 |
py
|
Python
|
pdb2pqr-1.9.0/scons/scons-local-2.3.0/SCons/Action.py
|
Acpharis/protein_prep
|
8cc2f0caedefd5a3fdaa764ed013c2660a4df1b8
|
[
"BSD-3-Clause"
] | 9 |
2016-08-17T06:52:10.000Z
|
2020-04-28T04:20:07.000Z
|
pdb2pqr-1.9.0/scons/scons-local-2.3.0/SCons/Action.py
|
Acpharis/protein_prep
|
8cc2f0caedefd5a3fdaa764ed013c2660a4df1b8
|
[
"BSD-3-Clause"
] | null | null | null |
pdb2pqr-1.9.0/scons/scons-local-2.3.0/SCons/Action.py
|
Acpharis/protein_prep
|
8cc2f0caedefd5a3fdaa764ed013c2660a4df1b8
|
[
"BSD-3-Clause"
] | 1 |
2021-03-03T23:20:25.000Z
|
2021-03-03T23:20:25.000Z
|
"""SCons.Action
This encapsulates information about executing any sort of action that
can build one or more target Nodes (typically files) from one or more
source Nodes (also typically files) given a specific Environment.
The base class here is ActionBase. The base class supplies just a few
OO utility methods and some generic methods for displaying information
about an Action in response to the various commands that control printing.
A second-level base class is _ActionAction. This extends ActionBase
by providing the methods that can be used to show and perform an
action. True Action objects will subclass _ActionAction; Action
factory class objects will subclass ActionBase.
The heavy lifting is handled by subclasses for the different types of
actions we might execute:
CommandAction
CommandGeneratorAction
FunctionAction
ListAction
The subclasses supply the following public interface methods used by
other modules:
__call__()
THE public interface, "calling" an Action object executes the
command or Python function. This also takes care of printing
a pre-substitution command for debugging purposes.
get_contents()
Fetches the "contents" of an Action for signature calculation
plus the varlist. This is what gets MD5 checksummed to decide
if a target needs to be rebuilt because its action changed.
genstring()
Returns a string representation of the Action *without*
command substitution, but allows a CommandGeneratorAction to
generate the right action based on the specified target,
source and env. This is used by the Signature subsystem
(through the Executor) to obtain an (imprecise) representation
of the Action operation for informative purposes.
Subclasses also supply the following methods for internal use within
this module:
__str__()
Returns a string approximation of the Action; no variable
substitution is performed.
execute()
The internal method that really, truly, actually handles the
execution of a command or Python function. This is used so
that the __call__() methods can take care of displaying any
pre-substitution representations, and *then* execute an action
without worrying about the specific Actions involved.
get_presig()
Fetches the "contents" of a subclass for signature calculation.
The varlist is added to this to produce the Action's contents.
strfunction()
Returns a substituted string representation of the Action.
This is used by the _ActionAction.show() command to display the
command/function that will be executed to generate the target(s).
There is a related independent ActionCaller class that looks like a
regular Action, and which serves as a wrapper for arbitrary functions
that we want to let the user specify the arguments to now, but actually
execute later (when an out-of-date check determines that it's needed to
be executed, for example). Objects of this class are returned by an
ActionFactory class that provides a __call__() method as a convenient
way for wrapping up the functions.
"""
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Action.py 2013/03/03 09:48:35 garyo"
import SCons.compat
import dis
import os
# compat layer imports "cPickle" for us if it's available.
import pickle
import re
import sys
import subprocess
from SCons.Debug import logInstanceCreation
import SCons.Errors
import SCons.Executor
import SCons.Util
import SCons.Subst
# we use these a lot, so try to optimize them
is_String = SCons.Util.is_String
is_List = SCons.Util.is_List
class _null(object):
pass
print_actions = 1
execute_actions = 1
print_actions_presub = 0
def rfile(n):
try:
return n.rfile()
except AttributeError:
return n
def default_exitstatfunc(s):
return s
try:
SET_LINENO = dis.SET_LINENO
HAVE_ARGUMENT = dis.HAVE_ARGUMENT
except AttributeError:
remove_set_lineno_codes = lambda x: x
else:
def remove_set_lineno_codes(code):
result = []
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
if op >= HAVE_ARGUMENT:
if op != SET_LINENO:
result.append(code[i:i+3])
i = i+3
else:
result.append(c)
i = i+1
return ''.join(result)
strip_quotes = re.compile('^[\'"](.*)[\'"]$')
def _callable_contents(obj):
"""Return the signature contents of a callable Python object.
"""
try:
# Test if obj is a method.
return _function_contents(obj.im_func)
except AttributeError:
try:
# Test if obj is a callable object.
return _function_contents(obj.__call__.im_func)
except AttributeError:
try:
# Test if obj is a code object.
return _code_contents(obj)
except AttributeError:
# Test if obj is a function object.
return _function_contents(obj)
def _object_contents(obj):
"""Return the signature contents of any Python object.
We have to handle the case where object contains a code object
since it can be pickled directly.
"""
try:
# Test if obj is a method.
return _function_contents(obj.im_func)
except AttributeError:
try:
# Test if obj is a callable object.
return _function_contents(obj.__call__.im_func)
except AttributeError:
try:
# Test if obj is a code object.
return _code_contents(obj)
except AttributeError:
try:
# Test if obj is a function object.
return _function_contents(obj)
except AttributeError:
# Should be a pickable Python object.
try:
return pickle.dumps(obj)
except (pickle.PicklingError, TypeError):
# This is weird, but it seems that nested classes
# are unpickable. The Python docs say it should
# always be a PicklingError, but some Python
# versions seem to return TypeError. Just do
# the best we can.
return str(obj)
def _code_contents(code):
"""Return the signature contents of a code object.
By providing direct access to the code object of the
function, Python makes this extremely easy. Hooray!
Unfortunately, older versions of Python include line
number indications in the compiled byte code. Boo!
So we remove the line number byte codes to prevent
recompilations from moving a Python function.
"""
contents = []
# The code contents depends on the number of local variables
# but not their actual names.
contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames)))
try:
contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars)))
except AttributeError:
# Older versions of Python do not support closures.
contents.append(",0,0")
# The code contents depends on any constants accessed by the
# function. Note that we have to call _object_contents on each
# constants because the code object of nested functions can
# show-up among the constants.
#
# Note that we also always ignore the first entry of co_consts
# which contains the function doc string. We assume that the
# function does not access its doc string.
contents.append(',(' + ','.join(map(_object_contents,code.co_consts[1:])) + ')')
# The code contents depends on the variable names used to
# accessed global variable, as changing the variable name changes
# the variable actually accessed and therefore changes the
# function result.
contents.append(',(' + ','.join(map(_object_contents,code.co_names)) + ')')
# The code contents depends on its actual code!!!
contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')')
return ''.join(contents)
def _function_contents(func):
"""Return the signature contents of a function."""
contents = [_code_contents(func.func_code)]
# The function contents depends on the value of defaults arguments
if func.func_defaults:
contents.append(',(' + ','.join(map(_object_contents,func.func_defaults)) + ')')
else:
contents.append(',()')
# The function contents depends on the closure captured cell values.
try:
closure = func.func_closure or []
except AttributeError:
# Older versions of Python do not support closures.
closure = []
#xxx = [_object_contents(x.cell_contents) for x in closure]
try:
xxx = [_object_contents(x.cell_contents) for x in closure]
except AttributeError:
xxx = []
contents.append(',(' + ','.join(xxx) + ')')
return ''.join(contents)
def _actionAppend(act1, act2):
# This function knows how to slap two actions together.
# Mainly, it handles ListActions by concatenating into
# a single ListAction.
a1 = Action(act1)
a2 = Action(act2)
if a1 is None:
return a2
if a2 is None:
return a1
if isinstance(a1, ListAction):
if isinstance(a2, ListAction):
return ListAction(a1.list + a2.list)
else:
return ListAction(a1.list + [ a2 ])
else:
if isinstance(a2, ListAction):
return ListAction([ a1 ] + a2.list)
else:
return ListAction([ a1, a2 ])
def _do_create_keywords(args, kw):
"""This converts any arguments after the action argument into
their equivalent keywords and adds them to the kw argument.
"""
v = kw.get('varlist', ())
# prevent varlist="FOO" from being interpreted as ['F', 'O', 'O']
if is_String(v): v = (v,)
kw['varlist'] = tuple(v)
if args:
# turn positional args into equivalent keywords
cmdstrfunc = args[0]
if cmdstrfunc is None or is_String(cmdstrfunc):
kw['cmdstr'] = cmdstrfunc
elif callable(cmdstrfunc):
kw['strfunction'] = cmdstrfunc
else:
raise SCons.Errors.UserError(
'Invalid command display variable type. '
'You must either pass a string or a callback which '
'accepts (target, source, env) as parameters.')
if len(args) > 1:
kw['varlist'] = args[1:] + kw['varlist']
if kw.get('strfunction', _null) is not _null \
and kw.get('cmdstr', _null) is not _null:
raise SCons.Errors.UserError(
'Cannot have both strfunction and cmdstr args to Action()')
def _do_create_action(act, kw):
"""This is the actual "implementation" for the
Action factory method, below. This handles the
fact that passing lists to Action() itself has
different semantics than passing lists as elements
of lists.
The former will create a ListAction, the latter
will create a CommandAction by converting the inner
list elements to strings."""
if isinstance(act, ActionBase):
return act
if is_List(act):
return CommandAction(act, **kw)
if callable(act):
try:
gen = kw['generator']
del kw['generator']
except KeyError:
gen = 0
if gen:
action_type = CommandGeneratorAction
else:
action_type = FunctionAction
return action_type(act, kw)
if is_String(act):
var=SCons.Util.get_environment_var(act)
if var:
# This looks like a string that is purely an Environment
# variable reference, like "$FOO" or "${FOO}". We do
# something special here...we lazily evaluate the contents
# of that Environment variable, so a user could put something
# like a function or a CommandGenerator in that variable
# instead of a string.
return LazyAction(var, kw)
commands = str(act).split('\n')
if len(commands) == 1:
return CommandAction(commands[0], **kw)
# The list of string commands may include a LazyAction, so we
# reprocess them via _do_create_list_action.
return _do_create_list_action(commands, kw)
# Catch a common error case with a nice message:
if isinstance(act, int) or isinstance(act, float):
raise TypeError("Don't know how to create an Action from a number (%s)"%act)
# Else fail silently (???)
return None
def _do_create_list_action(act, kw):
"""A factory for list actions. Convert the input list into Actions
and then wrap them in a ListAction."""
acts = []
for a in act:
aa = _do_create_action(a, kw)
if aa is not None: acts.append(aa)
if not acts:
return ListAction([])
elif len(acts) == 1:
return acts[0]
else:
return ListAction(acts)
def Action(act, *args, **kw):
"""A factory for action objects."""
# Really simple: the _do_create_* routines do the heavy lifting.
_do_create_keywords(args, kw)
if is_List(act):
return _do_create_list_action(act, kw)
return _do_create_action(act, kw)
class ActionBase(object):
"""Base class for all types of action objects that can be held by
other objects (Builders, Executors, etc.) This provides the
common methods for manipulating and combining those actions."""
def __cmp__(self, other):
return cmp(self.__dict__, other)
def no_batch_key(self, env, target, source):
return None
batch_key = no_batch_key
def genstring(self, target, source, env):
return str(self)
def get_contents(self, target, source, env):
result = [ self.get_presig(target, source, env) ]
# This should never happen, as the Action() factory should wrap
# the varlist, but just in case an action is created directly,
# we duplicate this check here.
vl = self.get_varlist(target, source, env)
if is_String(vl): vl = (vl,)
for v in vl:
result.append(env.subst('${'+v+'}'))
return ''.join(result)
def __add__(self, other):
return _actionAppend(self, other)
def __radd__(self, other):
return _actionAppend(other, self)
def presub_lines(self, env):
# CommandGeneratorAction needs a real environment
# in order to return the proper string here, since
# it may call LazyAction, which looks up a key
# in that env. So we temporarily remember the env here,
# and CommandGeneratorAction will use this env
# when it calls its _generate method.
self.presub_env = env
lines = str(self).split('\n')
self.presub_env = None # don't need this any more
return lines
def get_varlist(self, target, source, env, executor=None):
return self.varlist
def get_targets(self, env, executor):
"""
Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used
by this action.
"""
return self.targets
class _ActionAction(ActionBase):
"""Base class for actions that create output objects."""
def __init__(self, cmdstr=_null, strfunction=_null, varlist=(),
presub=_null, chdir=None, exitstatfunc=None,
batch_key=None, targets='$TARGETS',
**kw):
self.cmdstr = cmdstr
if strfunction is not _null:
if strfunction is None:
self.cmdstr = None
else:
self.strfunction = strfunction
self.varlist = varlist
self.presub = presub
self.chdir = chdir
if not exitstatfunc:
exitstatfunc = default_exitstatfunc
self.exitstatfunc = exitstatfunc
self.targets = targets
if batch_key:
if not callable(batch_key):
# They have set batch_key, but not to their own
# callable. The default behavior here will batch
# *all* targets+sources using this action, separated
# for each construction environment.
def default_batch_key(self, env, target, source):
return (id(self), id(env))
batch_key = default_batch_key
SCons.Util.AddMethod(self, batch_key, 'batch_key')
def print_cmd_line(self, s, target, source, env):
# In python 3, and in some of our tests, sys.stdout is
# a String io object, and it takes unicode strings only
# In other cases it's a regular Python 2.x file object
# which takes strings (bytes), and if you pass those a
# unicode object they try to decode with 'ascii' codec
# which fails if the cmd line has any hi-bit-set chars.
# This code assumes s is a regular string, but should
# work if it's unicode too.
try:
sys.stdout.write(unicode(s + "\n"))
except UnicodeDecodeError:
sys.stdout.write(s + "\n")
def __call__(self, target, source, env,
exitstatfunc=_null,
presub=_null,
show=_null,
execute=_null,
chdir=_null,
executor=None):
if not is_List(target):
target = [target]
if not is_List(source):
source = [source]
if presub is _null:
presub = self.presub
if presub is _null:
presub = print_actions_presub
if exitstatfunc is _null: exitstatfunc = self.exitstatfunc
if show is _null: show = print_actions
if execute is _null: execute = execute_actions
if chdir is _null: chdir = self.chdir
save_cwd = None
if chdir:
save_cwd = os.getcwd()
try:
chdir = str(chdir.abspath)
except AttributeError:
if not is_String(chdir):
if executor:
chdir = str(executor.batches[0].targets[0].dir)
else:
chdir = str(target[0].dir)
if presub:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
t = ' and '.join(map(str, target))
l = '\n '.join(self.presub_lines(env))
out = u"Building %s with action:\n %s\n" % (t, l)
sys.stdout.write(out)
cmd = None
if show and self.strfunction:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
try:
cmd = self.strfunction(target, source, env, executor)
except TypeError:
cmd = self.strfunction(target, source, env)
if cmd:
if chdir:
cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd
try:
get = env.get
except AttributeError:
print_func = self.print_cmd_line
else:
print_func = get('PRINT_CMD_LINE_FUNC')
if not print_func:
print_func = self.print_cmd_line
print_func(cmd, target, source, env)
stat = 0
if execute:
if chdir:
os.chdir(chdir)
try:
stat = self.execute(target, source, env, executor=executor)
if isinstance(stat, SCons.Errors.BuildError):
s = exitstatfunc(stat.status)
if s:
stat.status = s
else:
stat = s
else:
stat = exitstatfunc(stat)
finally:
if save_cwd:
os.chdir(save_cwd)
if cmd and save_cwd:
print_func('os.chdir(%s)' % repr(save_cwd), target, source, env)
return stat
def _string_from_cmd_list(cmd_list):
"""Takes a list of command line arguments and returns a pretty
representation for printing."""
cl = []
for arg in map(str, cmd_list):
if ' ' in arg or '\t' in arg:
arg = '"' + arg + '"'
cl.append(arg)
return ' '.join(cl)
# A fiddlin' little function that has an 'import SCons.Environment' which
# can't be moved to the top level without creating an import loop. Since
# this import creates a local variable named 'SCons', it blocks access to
# the global variable, so we move it here to prevent complaints about local
# variables being used uninitialized.
default_ENV = None
def get_default_ENV(env):
global default_ENV
try:
return env['ENV']
except KeyError:
if not default_ENV:
import SCons.Environment
# This is a hideously expensive way to get a default shell
# environment. What it really should do is run the platform
# setup to get the default ENV. Fortunately, it's incredibly
# rare for an Environment not to have a shell environment, so
# we're not going to worry about it overmuch.
default_ENV = SCons.Environment.Environment()['ENV']
return default_ENV
# This function is still in draft mode. We're going to need something like
# it in the long run as more and more places use subprocess, but I'm sure
# it'll have to be tweaked to get the full desired functionality.
# one special arg (so far?), 'error', to tell what to do with exceptions.
def _subproc(scons_env, cmd, error = 'ignore', **kw):
"""Do common setup for a subprocess.Popen() call"""
# allow std{in,out,err} to be "'devnull'"
io = kw.get('stdin')
if is_String(io) and io == 'devnull':
kw['stdin'] = open(os.devnull)
io = kw.get('stdout')
if is_String(io) and io == 'devnull':
kw['stdout'] = open(os.devnull, 'w')
io = kw.get('stderr')
if is_String(io) and io == 'devnull':
kw['stderr'] = open(os.devnull, 'w')
# Figure out what shell environment to use
ENV = kw.get('env', None)
if ENV is None: ENV = get_default_ENV(scons_env)
# Ensure that the ENV values are all strings:
new_env = {}
for key, value in ENV.items():
if is_List(value):
# If the value is a list, then we assume it is a path list,
# because that's a pretty common list-like value to stick
# in an environment variable:
value = SCons.Util.flatten_sequence(value)
new_env[key] = os.pathsep.join(map(str, value))
else:
# It's either a string or something else. If it's a string,
# we still want to call str() because it might be a *Unicode*
# string, which makes subprocess.Popen() gag. If it isn't a
# string or a list, then we just coerce it to a string, which
# is the proper way to handle Dir and File instances and will
# produce something reasonable for just about everything else:
new_env[key] = str(value)
kw['env'] = new_env
try:
return subprocess.Popen(cmd, **kw)
except EnvironmentError, e:
if error == 'raise': raise
# return a dummy Popen instance that only returns error
class dummyPopen(object):
def __init__(self, e): self.exception = e
def communicate(self): return ('','')
def wait(self): return -self.exception.errno
stdin = None
class f(object):
def read(self): return ''
def readline(self): return ''
stdout = stderr = f()
return dummyPopen(e)
class CommandAction(_ActionAction):
"""Class for command-execution actions."""
def __init__(self, cmd, **kw):
# Cmd can actually be a list or a single item; if it's a
# single item it should be the command string to execute; if a
# list then it should be the words of the command string to
# execute. Only a single command should be executed by this
# object; lists of commands should be handled by embedding
# these objects in a ListAction object (which the Action()
# factory above does). cmd will be passed to
# Environment.subst_list() for substituting environment
# variables.
if __debug__: logInstanceCreation(self, 'Action.CommandAction')
_ActionAction.__init__(self, **kw)
if is_List(cmd):
if list(filter(is_List, cmd)):
raise TypeError("CommandAction should be given only " \
"a single command")
self.cmd_list = cmd
def __str__(self):
if is_List(self.cmd_list):
return ' '.join(map(str, self.cmd_list))
return str(self.cmd_list)
def process(self, target, source, env, executor=None):
if executor:
result = env.subst_list(self.cmd_list, 0, executor=executor)
else:
result = env.subst_list(self.cmd_list, 0, target, source)
silent = None
ignore = None
while True:
try: c = result[0][0][0]
except IndexError: c = None
if c == '@': silent = 1
elif c == '-': ignore = 1
else: break
result[0][0] = result[0][0][1:]
try:
if not result[0][0]:
result[0] = result[0][1:]
except IndexError:
pass
return result, ignore, silent
def strfunction(self, target, source, env, executor=None):
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else:
c = env.subst(self.cmdstr, SUBST_RAW, target, source)
if c:
return c
cmd_list, ignore, silent = self.process(target, source, env, executor)
if silent:
return ''
return _string_from_cmd_list(cmd_list[0])
def execute(self, target, source, env, executor=None):
"""Execute a command action.
This will handle lists of commands as well as individual commands,
because construction variable substitution may turn a single
"command" into a list. This means that this class can actually
handle lists of commands, even though that's not how we use it
externally.
"""
escape_list = SCons.Subst.escape_list
flatten_sequence = SCons.Util.flatten_sequence
try:
shell = env['SHELL']
except KeyError:
raise SCons.Errors.UserError('Missing SHELL construction variable.')
try:
spawn = env['SPAWN']
except KeyError:
raise SCons.Errors.UserError('Missing SPAWN construction variable.')
else:
if is_String(spawn):
spawn = env.subst(spawn, raw=1, conv=lambda x: x)
escape = env.get('ESCAPE', lambda x: x)
ENV = get_default_ENV(env)
# Ensure that the ENV values are all strings:
for key, value in ENV.items():
if not is_String(value):
if is_List(value):
# If the value is a list, then we assume it is a
# path list, because that's a pretty common list-like
# value to stick in an environment variable:
value = flatten_sequence(value)
ENV[key] = os.pathsep.join(map(str, value))
else:
# If it isn't a string or a list, then we just coerce
# it to a string, which is the proper way to handle
# Dir and File instances and will produce something
# reasonable for just about everything else:
ENV[key] = str(value)
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
cmd_list, ignore, silent = self.process(target, list(map(rfile, source)), env, executor)
# Use len() to filter out any "command" that's zero-length.
for cmd_line in filter(len, cmd_list):
# Escape the command line for the interpreter we are using.
cmd_line = escape_list(cmd_line, escape)
result = spawn(shell, escape, cmd_line[0], cmd_line, ENV)
if not ignore and result:
msg = "Error %s" % result
return SCons.Errors.BuildError(errstr=msg,
status=result,
action=self,
command=cmd_line)
return 0
def get_presig(self, target, source, env, executor=None):
"""Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string,
since those parts don't affect signatures.
"""
from SCons.Subst import SUBST_SIG
cmd = self.cmd_list
if is_List(cmd):
cmd = ' '.join(map(str, cmd))
else:
cmd = str(cmd)
if executor:
return env.subst_target_source(cmd, SUBST_SIG, executor=executor)
else:
return env.subst_target_source(cmd, SUBST_SIG, target, source)
def get_implicit_deps(self, target, source, env, executor=None):
icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True)
if is_String(icd) and icd[:1] == '$':
icd = env.subst(icd)
if not icd or icd in ('0', 'None'):
return []
from SCons.Subst import SUBST_SIG
if executor:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor)
else:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source)
res = []
for cmd_line in cmd_list:
if cmd_line:
d = str(cmd_line[0])
m = strip_quotes.match(d)
if m:
d = m.group(1)
d = env.WhereIs(d)
if d:
res.append(env.fs.File(d))
return res
class CommandGeneratorAction(ActionBase):
"""Class for command-generator actions."""
def __init__(self, generator, kw):
if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction')
self.generator = generator
self.gen_kw = kw
self.varlist = kw.get('varlist', ())
self.targets = kw.get('targets', '$TARGETS')
def _generate(self, target, source, env, for_signature, executor=None):
# ensure that target is a list, to make it easier to write
# generator functions:
if not is_List(target):
target = [target]
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
ret = self.generator(target=target,
source=source,
env=env,
for_signature=for_signature)
gen_cmd = Action(ret, **self.gen_kw)
if not gen_cmd:
raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret))
return gen_cmd
def __str__(self):
try:
env = self.presub_env
except AttributeError:
env = None
if env is None:
env = SCons.Defaults.DefaultEnvironment()
act = self._generate([], [], env, 1)
return str(act)
def batch_key(self, env, target, source):
return self._generate(target, source, env, 1).batch_key(env, target, source)
def genstring(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).genstring(target, source, env)
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None):
act = self._generate(target, source, env, 0, executor)
if act is None:
raise SCons.Errors.UserError("While building `%s': "
"Cannot deduce file extension from source files: %s"
% (repr(list(map(str, target))), repr(list(map(str, source)))))
return act(target, source, env, exitstatfunc, presub,
show, execute, chdir, executor)
def get_presig(self, target, source, env, executor=None):
"""Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string,
since those parts don't affect signatures.
"""
return self._generate(target, source, env, 1, executor).get_presig(target, source, env)
def get_implicit_deps(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env)
def get_varlist(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).get_varlist(target, source, env, executor)
def get_targets(self, env, executor):
return self._generate(None, None, env, 1, executor).get_targets(env, executor)
# A LazyAction is a kind of hybrid generator and command action for
# strings of the form "$VAR". These strings normally expand to other
# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also
# want to be able to replace them with functions in the construction
# environment. Consequently, we want lazy evaluation and creation of
# an Action in the case of the function, but that's overkill in the more
# normal case of expansion to other strings.
#
# So we do this with a subclass that's both a generator *and*
# a command action. The overridden methods all do a quick check
# of the construction variable, and if it's a string we just call
# the corresponding CommandAction method to do the heavy lifting.
# If not, then we call the same-named CommandGeneratorAction method.
# The CommandGeneratorAction methods work by using the overridden
# _generate() method, that is, our own way of handling "generation" of
# an action based on what's in the construction variable.
class LazyAction(CommandGeneratorAction, CommandAction):
def __init__(self, var, kw):
if __debug__: logInstanceCreation(self, 'Action.LazyAction')
#FUTURE CommandAction.__init__(self, '${'+var+'}', **kw)
CommandAction.__init__(self, '${'+var+'}', **kw)
self.var = SCons.Util.to_String(var)
self.gen_kw = kw
def get_parent_class(self, env):
c = env.get(self.var)
if is_String(c) and not '\n' in c:
return CommandAction
return CommandGeneratorAction
def _generate_cache(self, env):
if env:
c = env.get(self.var, '')
else:
c = ''
gen_cmd = Action(c, **self.gen_kw)
if not gen_cmd:
raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c)))
return gen_cmd
def _generate(self, target, source, env, for_signature, executor=None):
return self._generate_cache(env)
def __call__(self, target, source, env, *args, **kw):
c = self.get_parent_class(env)
return c.__call__(self, target, source, env, *args, **kw)
def get_presig(self, target, source, env):
c = self.get_parent_class(env)
return c.get_presig(self, target, source, env)
def get_varlist(self, target, source, env, executor=None):
c = self.get_parent_class(env)
return c.get_varlist(self, target, source, env, executor)
class FunctionAction(_ActionAction):
"""Class for Python function actions."""
def __init__(self, execfunction, kw):
if __debug__: logInstanceCreation(self, 'Action.FunctionAction')
self.execfunction = execfunction
try:
self.funccontents = _callable_contents(execfunction)
except AttributeError:
try:
# See if execfunction will do the heavy lifting for us.
self.gc = execfunction.get_contents
except AttributeError:
# This is weird, just do the best we can.
self.funccontents = _object_contents(execfunction)
_ActionAction.__init__(self, **kw)
def function_name(self):
try:
return self.execfunction.__name__
except AttributeError:
try:
return self.execfunction.__class__.__name__
except AttributeError:
return "unknown_python_function"
def strfunction(self, target, source, env, executor=None):
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else:
c = env.subst(self.cmdstr, SUBST_RAW, target, source)
if c:
return c
def array(a):
def quote(s):
try:
str_for_display = s.str_for_display
except AttributeError:
s = repr(s)
else:
s = str_for_display()
return s
return '[' + ", ".join(map(quote, a)) + ']'
try:
strfunc = self.execfunction.strfunction
except AttributeError:
pass
else:
if strfunc is None:
return None
if callable(strfunc):
return strfunc(target, source, env)
name = self.function_name()
tstr = array(target)
sstr = array(source)
return "%s(%s, %s)" % (name, tstr, sstr)
def __str__(self):
name = self.function_name()
if name == 'ActionCaller':
return str(self.execfunction)
return "%s(target, source, env)" % name
def execute(self, target, source, env, executor=None):
exc_info = (None,None,None)
try:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
rsources = list(map(rfile, source))
try:
result = self.execfunction(target=target, source=rsources, env=env)
except KeyboardInterrupt, e:
raise
except SystemExit, e:
raise
except Exception, e:
result = e
exc_info = sys.exc_info()
if result:
result = SCons.Errors.convert_to_BuildError(result, exc_info)
result.node=target
result.action=self
try:
result.command=self.strfunction(target, source, env, executor)
except TypeError:
result.command=self.strfunction(target, source, env)
# FIXME: This maintains backward compatibility with respect to
# which type of exceptions were returned by raising an
# exception and which ones were returned by value. It would
# probably be best to always return them by value here, but
# some codes do not check the return value of Actions and I do
# not have the time to modify them at this point.
if (exc_info[1] and
not isinstance(exc_info[1],EnvironmentError)):
raise result
return result
finally:
# Break the cycle between the traceback object and this
# function stack frame. See the sys.exc_info() doc info for
# more information about this issue.
del exc_info
def get_presig(self, target, source, env):
"""Return the signature contents of this callable action."""
try:
return self.gc(target, source, env)
except AttributeError:
return self.funccontents
def get_implicit_deps(self, target, source, env):
return []
class ListAction(ActionBase):
"""Class for lists of other actions."""
def __init__(self, actionlist):
if __debug__: logInstanceCreation(self, 'Action.ListAction')
def list_of_actions(x):
if isinstance(x, ActionBase):
return x
return Action(x)
self.list = list(map(list_of_actions, actionlist))
# our children will have had any varlist
# applied; we don't need to do it again
self.varlist = ()
self.targets = '$TARGETS'
def genstring(self, target, source, env):
return '\n'.join([a.genstring(target, source, env) for a in self.list])
def __str__(self):
return '\n'.join(map(str, self.list))
def presub_lines(self, env):
return SCons.Util.flatten_sequence(
[a.presub_lines(env) for a in self.list])
def get_presig(self, target, source, env):
"""Return the signature contents of this action list.
Simple concatenation of the signatures of the elements.
"""
return "".join([x.get_contents(target, source, env) for x in self.list])
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None):
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
for act in self.list:
stat = act(target, source, env, exitstatfunc, presub,
show, execute, chdir, executor)
if stat:
return stat
return 0
def get_implicit_deps(self, target, source, env):
result = []
for act in self.list:
result.extend(act.get_implicit_deps(target, source, env))
return result
def get_varlist(self, target, source, env, executor=None):
result = SCons.Util.OrderedDict()
for act in self.list:
for var in act.get_varlist(target, source, env, executor):
result[var] = True
return list(result.keys())
class ActionCaller(object):
"""A class for delaying calling an Action function with specific
(positional and keyword) arguments until the Action is actually
executed.
This class looks to the rest of the world like a normal Action object,
but what it's really doing is hanging on to the arguments until we
have a target, source and env to use for the expansion.
"""
def __init__(self, parent, args, kw):
self.parent = parent
self.args = args
self.kw = kw
def get_contents(self, target, source, env):
actfunc = self.parent.actfunc
try:
# "self.actfunc" is a function.
contents = str(actfunc.func_code.co_code)
except AttributeError:
# "self.actfunc" is a callable object.
try:
contents = str(actfunc.__call__.im_func.func_code.co_code)
except AttributeError:
# No __call__() method, so it might be a builtin
# or something like that. Do the best we can.
contents = str(actfunc)
contents = remove_set_lineno_codes(contents)
return contents
def subst(self, s, target, source, env):
# If s is a list, recursively apply subst()
# to every element in the list
if is_List(s):
result = []
for elem in s:
result.append(self.subst(elem, target, source, env))
return self.parent.convert(result)
# Special-case hack: Let a custom function wrapped in an
# ActionCaller get at the environment through which the action
# was called by using this hard-coded value as a special return.
if s == '$__env__':
return env
elif is_String(s):
return env.subst(s, 1, target, source)
return self.parent.convert(s)
def subst_args(self, target, source, env):
return [self.subst(x, target, source, env) for x in self.args]
def subst_kw(self, target, source, env):
kw = {}
for key in self.kw.keys():
kw[key] = self.subst(self.kw[key], target, source, env)
return kw
def __call__(self, target, source, env, executor=None):
args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env)
return self.parent.actfunc(*args, **kw)
def strfunction(self, target, source, env):
args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env)
return self.parent.strfunc(*args, **kw)
def __str__(self):
return self.parent.strfunc(*self.args, **self.kw)
class ActionFactory(object):
"""A factory class that will wrap up an arbitrary function
as an SCons-executable Action object.
The real heavy lifting here is done by the ActionCaller class.
We just collect the (positional and keyword) arguments that we're
called with and give them to the ActionCaller object we create,
so it can hang onto them until it needs them.
"""
def __init__(self, actfunc, strfunc, convert=lambda x: x):
self.actfunc = actfunc
self.strfunc = strfunc
self.convert = convert
def __call__(self, *args, **kw):
ac = ActionCaller(self, args, kw)
action = Action(ac, strfunction=ac.strfunction)
return action
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 37.834658 | 134 | 0.605555 |
14321e804e2bc8a41a2004cfb9b0191664a4f0d4
| 5,419 |
py
|
Python
|
MP_WN_WE/util.py
|
albpurpura/PE4IR
|
54c5d471181cdb64225ecd738577b9f1f94c8d24
|
[
"Apache-2.0"
] | null | null | null |
MP_WN_WE/util.py
|
albpurpura/PE4IR
|
54c5d471181cdb64225ecd738577b9f1f94c8d24
|
[
"Apache-2.0"
] | null | null | null |
MP_WN_WE/util.py
|
albpurpura/PE4IR
|
54c5d471181cdb64225ecd738577b9f1f94c8d24
|
[
"Apache-2.0"
] | null | null | null |
"""
Author: Alberto Purpura
Copyright: (C) 2019-2020 <http://www.dei.unipd.it/
Department of Information Engineering> (DEI), <http://www.unipd.it/ University of Padua>, Italy
License: <http://www.apache.org/licenses/LICENSE-2.0 Apache License, Version 2.0>
"""
import csv
import io
import json
import os
import pickle
import platform
import subprocess
import numpy as np
import krovetz
import string
from tqdm import tqdm
from whoosh.analysis import StemmingAnalyzer, StandardAnalyzer
kstemmer = krovetz.PyKrovetzStemmer()
# choose correct variable values according to what pc I am using
TREC_EVAL_PATH = '../../trec_eval.8.1/trec_eval'
with open('../data/indri_stoplist_eng.txt', 'r') as slf:
sw = slf.readlines()
sw = [word.strip() for word in sw]
def save_json(model, output_path):
with open(output_path, 'w') as outfile:
json.dump(model, outfile)
def load_json(path):
with open(path, 'r') as json_file:
return json.load(json_file)
def save_model(model, output_path):
with open(output_path, 'wb') as handle:
pickle.dump(model, handle)
handle.close()
def load_model(path):
model = pickle.load(open(path, 'rb'))
return model
def run_trec_eval(trec_eval_path=TREC_EVAL_PATH,
qrels_file='data/cran/processed_corpus/cranfield.qrel',
run_to_eval='/Users/albertopurpura/PycharmProjects/ml4ir_git/results/re_ranking_output_lmnn.txt'):
print('using the qrels file: %s' % qrels_file)
if True or platform.node() == 'Albertos-MacBook-Pro.local' or platform.node() == 'hopper' or platform.node() == 'alberto-Alienware-15-R4':
command = os.path.join(os.getcwd(), trec_eval_path) + ' ' \
+ os.path.join(os.getcwd(), qrels_file) + ' ' \
+ os.path.join(os.getcwd(), run_to_eval) + ' | grep "^map" '
else:
command = os.path.join(os.getcwd(), trec_eval_path) + ' -m map ' \
+ os.path.join(os.getcwd(), qrels_file) + ' ' \
+ os.path.join(os.getcwd(), run_to_eval)
print(command)
(map_line, err) = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True).communicate()
map_line = map_line.decode("utf-8")
if len(map_line) > 0:
map_value = map_line.split('\t')[2]
else:
print('Error computing the map value!')
map_value = -1
return float(map_value)
def contains_digits(token):
for c in token:
if c.isdigit():
return True
return False
def tokenize(text, stemming=True, stoplist=None):
# kstemmer = Stemmer()
translator = str.maketrans(string.punctuation, ' ' * len(string.punctuation)) # map punctuation to space
text = text.translate(translator)
text = text.lower()
text = text.strip()
table = str.maketrans({key: None for key in string.punctuation})
text = text.translate(table)
if stemming:
analyzer = StemmingAnalyzer(stoplist=stoplist, minsize=2, stemfn=kstemmer.stem)
else:
analyzer = StandardAnalyzer(stoplist=stoplist, minsize=2)
tokens = [token.text for token in analyzer(text)]
tokens = [word for word in tokens if not contains_digits(word)]
return tokens
def stem(word):
return kstemmer.stem(word)
def create_evaluate_ranking(step, rel_docs_by_qry, sim_scores_by_qry, gt_file, prog_name,
output_folder=os.path.dirname(os.path.realpath(__file__))):
output_file = os.path.join(output_folder, prog_name + '_' + str(step) + '.txt')
out = open(output_file, 'w')
for q, rd in rel_docs_by_qry.items():
for i in range(len(rd)):
dname = rd[i]
sim_score = sim_scores_by_qry[q][i]
line = str(q) + ' Q0 ' + str(dname) + ' ' + str(i) + ' ' + str(sim_score) + ' ' + prog_name + '\n'
out.write(line)
out.close()
map_v = run_trec_eval(run_to_eval=output_file, qrels_file=gt_file)
# os.remove(output_file)
return map_v
def build_inv_index(docs):
"""
Builds the inverted index of the document collection, i.e. a dictionary with the words in the collection as keys and as values
the doc ids where the term appears with the corresponding frequency.
:param docs: a dictionary of tokenized documents accessible by doc id as key
:return: inverted index. i.e. a dictionary with the words in the collection as keys and as values the doc ids where the
term appears with the corresponding frequency
"""
inverted_idx = {}
for doc_id in tqdm(range(len(docs))):
d = docs[doc_id]
set_w_in_doc = set(d)
for w in set_w_in_doc:
if w in inverted_idx.keys():
inverted_idx[w].append((doc_id, d.count(w)))
else:
inverted_idx[w] = [(doc_id, d.count(w))]
return inverted_idx
def invert_wi(wi):
iwi = {}
for k, v in wi.items():
iwi[v] = k
return iwi
def _compute_idf(inv_idx, doc_n):
words = list(inv_idx.keys())
d_freqs = {}
for k in tqdm(words):
v = inv_idx[k]
df = len(v)
d_freqs[k] = df
idf_scores = {}
for w in words:
idf_scores[w] = np.log((1 + doc_n) / (d_freqs[w] + 1))
return idf_scores
def load_indri_stopwords():
fpath = './data/indri_stoplist_eng.txt'
sws = []
for line in open(fpath, 'r'):
sws.append(line.strip())
return sws
| 31.876471 | 142 | 0.639417 |
1522de44aaa5c4d600c314ae6aa76af83a9b1967
| 10,493 |
py
|
Python
|
jaxnerf/nerf/utils.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | null | null | null |
jaxnerf/nerf/utils.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | null | null | null |
jaxnerf/nerf/utils.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Non-differentiable utility functions."""
import os
from os import path
from absl import flags
import flax
from flax import nn
import jax
import jax.numpy as jnp
import numpy as np
from PIL import Image
import yaml
from jaxnerf.nerf import datasets
from jaxnerf.nerf import models
BASE_DIR = "jaxnerf"
INTERNAL = False
@flax.struct.dataclass
class Stats:
loss: float
psnr: float
def define_flags():
"""Define flags for both training and evaluation modes."""
flags.DEFINE_string("train_dir", None, "where to store ckpts and logs")
flags.DEFINE_string("data_dir", None, "input data directory.")
flags.DEFINE_string("config", None,
"using config files to set hyperparameters.")
# Dataset Flags
flags.DEFINE_enum("dataset", "blender",
list(k for k in datasets.dataset_dict.keys()),
"The type of dataset feed to nerf.")
flags.DEFINE_bool("image_batching", False,
"sample rays in a batch from different images.")
flags.DEFINE_bool(
"white_bkgd", True, "using white color as default background."
"(used in the blender dataset only)")
flags.DEFINE_integer("batch_size", 1024,
"the number of rays in a mini-batch (for training).")
flags.DEFINE_integer("factor", 4,
"the downsample factor of images, 0 for no downsample.")
flags.DEFINE_bool("spherify", False, "set for spherical 360 scenes.")
flags.DEFINE_bool(
"render_path", False, "render generated path if set true."
"(used in the llff dataset only)")
flags.DEFINE_integer(
"llffhold", 8, "will take every 1/N images as LLFF test set."
"(used in the llff dataset only)")
# Model Flags
flags.DEFINE_enum("model", "nerf", list(k for k in models.model_dict.keys()),
"name of model to use.")
flags.DEFINE_float("near", 2., "near clip of volumetric rendering.")
flags.DEFINE_float("far", 6., "far clip of volumentric rendering.")
flags.DEFINE_integer("net_depth", 8, "depth of the first part of MLP.")
flags.DEFINE_integer("net_width", 256, "width of the first part of MLP.")
flags.DEFINE_integer("net_depth_condition", 1,
"depth of the second part of MLP.")
flags.DEFINE_integer("net_width_condition", 128,
"width of the second part of MLP.")
flags.DEFINE_integer(
"skip_layer", 4, "add a skip connection to the output vector of every"
"skip_layer layers.")
flags.DEFINE_integer("num_rgb_channels", 3, "the number of RGB channels.")
flags.DEFINE_integer("num_sigma_channels", 1,
"the number of density channels.")
flags.DEFINE_bool("randomized", True, "use randomized stratified sampling.")
flags.DEFINE_integer("deg_point", 10,
"Degree of positional encoding for points.")
flags.DEFINE_integer("deg_view", 4,
"degree of positional encoding for viewdirs.")
flags.DEFINE_integer(
"num_coarse_samples", 64,
"the number of samples on each ray for the coarse model.")
flags.DEFINE_integer("num_fine_samples", 128,
"the number of samples on each ray for the fine model.")
flags.DEFINE_bool("use_viewdirs", True, "use view directions as a condition.")
flags.DEFINE_float(
"noise_std", None, "std dev of noise added to regularize sigma output."
"(used in the llff dataset only)")
flags.DEFINE_bool("lindisp", False,
"sampling linearly in disparity rather than depth.")
flags.DEFINE_string("net_activation", "relu",
"activation function used within the MLP.")
flags.DEFINE_string("rgb_activation", "sigmoid",
"activation function used to produce RGB.")
flags.DEFINE_string("sigma_activation", "relu",
"activation function used to produce density.")
# Train Flags
flags.DEFINE_float("lr", 5e-4, "Learning rate for training.")
flags.DEFINE_integer(
"lr_decay", 500, "the number of steps (in 1000s) for exponential"
"learning rate decay.")
flags.DEFINE_integer("max_steps", 1000000,
"the number of optimization steps.")
flags.DEFINE_integer("save_every", 10000,
"the number of steps to save a checkpoint.")
flags.DEFINE_integer("print_every", 100,
"the number of steps between reports to tensorboard.")
flags.DEFINE_integer(
"render_every", 5000, "the number of steps to render a test image,"
"better to be x00 for accurate step time record.")
flags.DEFINE_integer("gc_every", 10000,
"the number of steps to run python garbage collection.")
# Eval Flags
flags.DEFINE_bool(
"eval_once", True,
"evaluate the model only once if true, otherwise keeping evaluating new"
"checkpoints if there's any.")
flags.DEFINE_bool("save_output", True,
"save predicted images to disk if True.")
flags.DEFINE_integer(
"chunk", 8192,
"the size of chunks for evaluation inferences, set to the value that"
"fits your GPU/TPU memory.")
def update_flags(args):
pth = path.join(BASE_DIR, args.config + ".yaml")
with open_file(pth, "r") as fin:
configs = yaml.load(fin, Loader=yaml.FullLoader)
args.__dict__.update(configs)
def open_file(pth, mode="r"):
if not INTERNAL:
return open(pth, mode=mode)
def file_exists(pth):
if not INTERNAL:
return path.exists(pth)
def listdir(pth):
if not INTERNAL:
return os.listdir(pth)
def isdir(pth):
if not INTERNAL:
return path.isdir(pth)
def makedirs(pth):
if not INTERNAL:
os.makedirs(pth)
def render_image(state, rays, render_fn, rng, normalize_disp, chunk=8192):
"""Render all the pixels of an image (in test mode).
Args:
state: model_utils.TrainState.
rays: a `Rays` namedtuple, the rays to be rendered.
render_fn: function, jit-ed render function.
rng: jnp.ndarray, random number generator (used in training mode only).
normalize_disp: bool, if true then normalize `disp` to [0, 1].
chunk: int, the size of chunks to render sequentially.
Returns:
rgb: jnp.ndarray, rendered color image.
disp: jnp.ndarray, rendered disparity image.
acc: jnp.ndarray, rendered accumulated weights per pixel.
"""
height, width = rays[0].shape[:2]
num_rays = height * width
rays = datasets.ray_fn(lambda r: r.reshape((num_rays, -1)), rays)
unused_rng, key_0, key_1 = jax.random.split(rng, 3)
model = state.optimizer.target
model_state = state.model_state
host_id = jax.host_id()
results = []
with nn.stateful(model_state, mutable=False):
for i in range(0, num_rays, chunk):
# pylint: disable=cell-var-from-loop
print(" " + "X" * int((i / num_rays) * 78), end="\r")
chunk_rays = datasets.ray_fn(lambda r: r[i:i + chunk], rays)
chunk_size = chunk_rays[0].shape[0]
rays_remaining = chunk_size % jax.device_count()
rays_per_host = chunk_size // jax.host_count()
if rays_remaining != 0:
padding = jax.device_count() - rays_remaining
chunk_rays = datasets.ray_fn(
lambda r: jnp.pad(r, ((0, padding), (0, 0)), mode="edge"),
chunk_rays)
else:
padding = 0
# After padding the number of chunk_rays is always divisible by
# host_count.
start, stop = host_id * rays_per_host, (host_id + 1) * rays_per_host
chunk_rays = datasets.ray_fn(lambda r: shard(r[start:stop]), chunk_rays)
chunk_results = render_fn(key_0, key_1, model, chunk_rays)[-1]
results.append([unshard(x[0], padding) for x in chunk_results])
# pylint: enable=cell-var-from-loop
print("")
rgb, disp, acc = [jnp.concatenate(r, axis=0) for r in zip(*results)]
# Normalize disp for visualization for ndc_rays in llff front-facing scenes.
if normalize_disp:
disp = (disp - disp.min()) / (disp.max() - disp.min())
return (rgb.reshape((height, width, -1)), disp.reshape(
(height, width, -1)), acc.reshape((height, width, -1)))
def compute_psnr(mse):
"""Compute psnr value given mse (we assume the maximum pixel value is 1).
Args:
mse: float, mean square error of pixels.
Returns:
psnr: float, the psnr value.
"""
return -10. * jnp.log(mse) / jnp.log(10.)
def save_img(img, pth):
"""Save an image to disk.
Args:
img: jnp.ndarry, [height, width, channels], img will be clipped to [0, 1]
before saved to pth.
pth: string, path to save the image to.
"""
with open_file(pth, "wb") as imgout:
Image.fromarray(np.array(
(np.clip(img, 0., 1.) * 255.).astype(jnp.uint8))).save(imgout, "PNG")
def learning_rate_decay(step, init_lr=5e-4, decay_steps=100000, decay_rate=0.1):
"""Continuous learning rate decay function.
The computation for learning rate is lr = (init_lr * decay_rate**(step /
decay_steps))
Args:
step: int, the global optimization step.
init_lr: float, the initial learning rate.
decay_steps: int, the decay steps, please see the learning rate computation
above.
decay_rate: float, the decay rate, please see the learning rate computation
above.
Returns:
lr: the learning for global step 'step'.
"""
power = step / decay_steps
return init_lr * (decay_rate**power)
def shard(xs):
"""Split data into shards for multiple devices along the first dimension."""
return jax.tree_map(
lambda x: x.reshape((jax.local_device_count(), -1) + x.shape[1:]), xs)
def to_device(xs):
"""Transfer data to devices (GPU/TPU)."""
return jax.tree_map(jnp.array, xs)
def unshard(x, padding=0):
"""Collect the sharded tensor to the shape before sharding."""
y = x.reshape([x.shape[0] * x.shape[1]] + list(x.shape[2:]))
if padding > 0:
y = y[:-padding]
return y
| 36.182759 | 80 | 0.665682 |
fd79feec1c57882c07af8e5d006d140dc0123589
| 2,632 |
py
|
Python
|
esphome/cpp_helpers.py
|
TheEggi/esphomeyaml
|
98e8cc1edc7b29891e8100eb484922e5c2d4fc33
|
[
"MIT"
] | null | null | null |
esphome/cpp_helpers.py
|
TheEggi/esphomeyaml
|
98e8cc1edc7b29891e8100eb484922e5c2d4fc33
|
[
"MIT"
] | null | null | null |
esphome/cpp_helpers.py
|
TheEggi/esphomeyaml
|
98e8cc1edc7b29891e8100eb484922e5c2d4fc33
|
[
"MIT"
] | null | null | null |
from esphome.const import CONF_INVERTED, CONF_MODE, CONF_NUMBER, CONF_SETUP_PRIORITY, \
CONF_UPDATE_INTERVAL, CONF_TYPE_ID
from esphome.core import coroutine, ID, CORE
from esphome.cpp_generator import RawExpression, add, get_variable
from esphome.cpp_types import App, GPIOPin
from esphome.py_compat import text_type
@coroutine
def gpio_pin_expression(conf):
"""Generate an expression for the given pin option.
This is a coroutine, you must await it with a 'yield' expression!
"""
if conf is None:
return
from esphome import pins
for key, (func, _) in pins.PIN_SCHEMA_REGISTRY.items():
if key in conf:
yield coroutine(func)(conf)
return
number = conf[CONF_NUMBER]
mode = conf[CONF_MODE]
inverted = conf.get(CONF_INVERTED)
yield GPIOPin.new(number, RawExpression(mode), inverted)
@coroutine
def register_component(var, config):
"""Register the given obj as a component.
This is a coroutine, you must await it with a 'yield' expression!
:param var: The variable representing the component.
:param config: The configuration for the component.
"""
id_ = text_type(var.base)
if id_ not in CORE.component_ids:
raise ValueError(u"Component ID {} was not declared to inherit from Component, "
u"or was registered twice. Please create a bug report with your "
u"configuration.".format(id_))
CORE.component_ids.remove(id_)
if CONF_SETUP_PRIORITY in config:
add(var.set_setup_priority(config[CONF_SETUP_PRIORITY]))
if CONF_UPDATE_INTERVAL in config:
add(var.set_update_interval(config[CONF_UPDATE_INTERVAL]))
add(App.register_component(var))
yield var
@coroutine
def register_parented(var, value):
if isinstance(value, ID):
paren = yield get_variable(value)
else:
paren = value
add(var.set_parent(paren))
def extract_registry_entry_config(registry, full_config):
# type: (Registry, ConfigType) -> RegistryEntry
key, config = next((k, v) for k, v in full_config.items() if k in registry)
return registry[key], config
@coroutine
def build_registry_entry(registry, full_config):
registry_entry, config = extract_registry_entry_config(registry, full_config)
type_id = full_config[CONF_TYPE_ID]
builder = registry_entry.coroutine_fun
yield builder(config, type_id)
@coroutine
def build_registry_list(registry, config):
actions = []
for conf in config:
action = yield build_registry_entry(registry, conf)
actions.append(action)
yield actions
| 32.097561 | 90 | 0.707447 |
399fc2d7baced1a75069e3414f24ee4738871b6f
| 2,467 |
py
|
Python
|
crossmap/sparsevector.py
|
tkonopka/crossmap
|
237e4319a77281490c4e037918977230fea43d7e
|
[
"MIT"
] | 1 |
2021-08-12T11:40:10.000Z
|
2021-08-12T11:40:10.000Z
|
crossmap/sparsevector.py
|
tkonopka/crossmap
|
237e4319a77281490c4e037918977230fea43d7e
|
[
"MIT"
] | null | null | null |
crossmap/sparsevector.py
|
tkonopka/crossmap
|
237e4319a77281490c4e037918977230fea43d7e
|
[
"MIT"
] | null | null | null |
"""
Handling sparse data using dicts
This class encode sparse vectors as a dictionary. The objective
is to have decent space efficiency and allow quicker addition than csr_matrix.
"""
from numpy import array, int32, float64
from .csr import threshold_csr_arrays, csr_vector
class Sparsevector:
def __init__(self, v=None):
"""initialize as an empty dictionary, or copy from a vector
:param v: csr_vector as baseline
"""
data = dict()
if v is not None:
for i, d in zip(v.indices, v.data):
data[i] = d
self.data = data
def add_csr(self, v, multiplier=1.0):
"""add a csr vector
:param v: csr vector
:param multiplier: numeric, multiplier for values in v
"""
self.add(v.indices, v.data*multiplier)
def add_dense(self, v, multiplier=1.0):
"""add a dense vector
:param v: array or list
:param multiplier: numeric, multiplier for values in v
"""
data = self.data
for i in range(len(v)):
d = v[i]
if d == 0.0:
continue
if i not in data:
data[i] = 0.0
data[i] += d * multiplier
def add(self, indices, values):
"""add a small set of sparse data to this object
:param indices: list of indices (integers)
:param values: list of numeric values to match indices
:param multiplier: numeric, multiplier for values in data
"""
data = self.data
for i, d in zip(indices, values):
try:
data[i] += d
except KeyError:
data[i] = d
def to_csr(self, n, threshold=None):
"""create a csr vector representation of the dictionary
:param n: dimension of the output object
:param threshold: real number, only entries greater in absolute value
are preserved in the output
:return: csr_matrix object
"""
_data = self.data
indices = array(list(_data.keys()), dtype=int32, copy=False)
data = array([_data[_] for _ in indices], dtype=float64, copy=False)
if len(data) and threshold is not None and threshold != 0.0:
threshold *= max(data)
data, indices = threshold_csr_arrays(data, indices, threshold)
return csr_vector(data, indices, n)
def __str__(self):
return str(self.data)
| 28.686047 | 78 | 0.580867 |
820b06e3200a786d55f69dcd7ee00931d61cef2b
| 2,318 |
py
|
Python
|
Color Selection/Color Selection and Region masking.py
|
wgcv/SelfDrivingCars-Demo
|
3b418cd444a9eea80a5403a553caf955965577e3
|
[
"MIT"
] | null | null | null |
Color Selection/Color Selection and Region masking.py
|
wgcv/SelfDrivingCars-Demo
|
3b418cd444a9eea80a5403a553caf955965577e3
|
[
"MIT"
] | null | null | null |
Color Selection/Color Selection and Region masking.py
|
wgcv/SelfDrivingCars-Demo
|
3b418cd444a9eea80a5403a553caf955965577e3
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
from PIL import Image
# Read in the image and print out some stats
im = np.asarray(Image.open('test.jpg'))
image = im
#image = mpimg.imread('test.png')
# Grab the x and y size and make a copy of the image
ysize = image.shape[0]
xsize = image.shape[1]
# Note: always make a copy rather than simply using "="
color_select = np.copy(image)
# Define our color selection criteria
# Note: if you run this code, you'll find these are not sensible values!!
# But you'll get a chance to play with them soon in a quiz
#red_threshold = 0.8
#green_threshold = 0.8
#blue_threshold = 0.8
red_threshold = 200
green_threshold = 200
blue_threshold = 200
rgb_threshold = [red_threshold, green_threshold, blue_threshold]
# Identify pixels below the threshold
thresholds = (image[:,:,0] < rgb_threshold[0]) \
| (image[:,:,1] < rgb_threshold[1]) \
| (image[:,:,2] < rgb_threshold[2])
color_select[thresholds] = [0,0,0]
# Display the image
plt.imshow(color_select)
plt.show()
#Mascara para ver solo lo que esta dentro del triangulo
region_select = np.copy(image)
# Define a triangle region of interest
# Keep in mind the origin (x=0, y=0) is in the upper left in image processing
# Note: if you run this code, you'll find these are not sensible values!!
# But you'll get a chance to play with them soon in a quiz
left_bottom = [0, 539]
right_bottom = [900, 539]
apex = [400, 0]
# Fit lines (y=Ax+B) to identify the 3 sided region of interest
# np.polyfit() returns the coefficients [A, B] of the fit
fit_left = np.polyfit((left_bottom[0], apex[0]), (left_bottom[1], apex[1]), 1)
fit_right = np.polyfit((right_bottom[0], apex[0]), (right_bottom[1], apex[1]), 1)
fit_bottom = np.polyfit((left_bottom[0], right_bottom[0]), (left_bottom[1], right_bottom[1]), 1)
# Find the region inside the lines
XX, YY = np.meshgrid(np.arange(0, xsize), np.arange(0, ysize))
region_thresholds = (YY > (XX*fit_left[0] + fit_left[1])) & \
(YY > (XX*fit_right[0] + fit_right[1])) & \
(YY < (XX*fit_bottom[0] + fit_bottom[1]))
# Color pixels red which are inside the region of interest
region_select[region_thresholds] = [255, 0, 0]
# Display the image
plt.imshow(region_select)
plt.show()
| 38 | 96 | 0.692839 |
074a5fb7865c2fac90f131586a9b8fdb4e6c5335
| 853 |
py
|
Python
|
doc/source/contributor/create/examples/resource/fake.py
|
nicolasochem/openstacksdk
|
34ea72ce5b0b7f16a038ca57b2a9f1ec2f90ce00
|
[
"Apache-2.0"
] | null | null | null |
doc/source/contributor/create/examples/resource/fake.py
|
nicolasochem/openstacksdk
|
34ea72ce5b0b7f16a038ca57b2a9f1ec2f90ce00
|
[
"Apache-2.0"
] | null | null | null |
doc/source/contributor/create/examples/resource/fake.py
|
nicolasochem/openstacksdk
|
34ea72ce5b0b7f16a038ca57b2a9f1ec2f90ce00
|
[
"Apache-2.0"
] | null | null | null |
# Apache 2 header omitted for brevity
from openstack.fake import fake_service
from openstack import resource
class Fake(resource.Resource):
resource_key = "resource"
resources_key = "resources"
base_path = "/fake"
service = fake_service.FakeService()
allow_create = True
allow_get = True
allow_update = True
allow_delete = True
allow_list = True
allow_head = True
#: The transaction date and time.
timestamp = resource.Header("x-timestamp")
#: The name of this resource.
name = resource.Body("name", alternate_id=True)
#: The value of the resource. Also available in headers.
value = resource.Body("value", alias="x-resource-value")
#: Is this resource cool? If so, set it to True.
#: This is a multi-line comment about cool stuff.
cool = resource.Body("cool", type=bool)
| 29.413793 | 60 | 0.689332 |
32131c595967b875f3d22a593dbcb536538ad23a
| 3,897 |
py
|
Python
|
graphzoom/graphsage_utils.py
|
NIRVANALAN/GraphZoom
|
32afa35324bf699d754508dbfcc727855a4e9878
|
[
"BSD-3-Clause"
] | null | null | null |
graphzoom/graphsage_utils.py
|
NIRVANALAN/GraphZoom
|
32afa35324bf699d754508dbfcc727855a4e9878
|
[
"BSD-3-Clause"
] | null | null | null |
graphzoom/graphsage_utils.py
|
NIRVANALAN/GraphZoom
|
32afa35324bf699d754508dbfcc727855a4e9878
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
from pathlib import Path
import numpy as np
import random
import json
import sys
import os
import networkx as nx
from networkx.readwrite import json_graph
# version_info = list(map(int, nx.__version__.split('.')))
# major = version_info[0]
# minor = version_info[1]
# assert (major <= 1) and (minor <= 11), "networkx major version > 1.11"
WALK_LEN = 5
N_WALKS = 50
def load_gsage_graph(prefix, normalize=True, load_walks=False):
G_data = json.load(open(prefix + "-G.json"))
G = json_graph.node_link_graph(G_data)
if isinstance(G.nodes()[0], int):
def conversion(n): return int(n)
else:
def conversion(n): return n
id_map = json.load(open(prefix + "-id_map.json"))
id_map = {conversion(k): int(v) for k, v in id_map.items()}
class_map = json.load(open(prefix + "-class_map.json"))
if isinstance(list(class_map.values())[0], list):
def lab_conversion(n): return n
else:
def lab_conversion(n): return int(n)
class_map = {conversion(k): lab_conversion(v)
for k, v in class_map.items()}
# Remove all nodes that do not have val/test annotations
# (necessary because of networkx weirdness with the Reddit data)
broken_nodes = []
for node in G.nodes():
if not 'val' in G.node[node] or not 'test' in G.node[node]:
broken_nodes.append(node)
for node in broken_nodes:
G.remove_node(node)
print("Removed {:d} nodes that lacked proper annotations due to networkx versioning issues".format(
len(broken_nodes)))
# Make sure the graph has edge train_removed annotations
# (some datasets might already have this..)
print("Loaded data.. now preprocessing..")
for edge in G.edges():
if (G.node[edge[0]]['val'] or G.node[edge[1]]['val'] or
G.node[edge[0]]['test'] or G.node[edge[1]]['test']):
G[edge[0]][edge[1]]['train_removed'] = True
else:
G[edge[0]][edge[1]]['train_removed'] = False
if normalize:
norm_feats_path = Path(str(prefix)+'-norm-feats.npy')
if not norm_feats_path.exists():
feats = np.load(str(prefix)+'-feats.npy')
from sklearn.preprocessing import StandardScaler
train_ids = np.array([id_map[n] for n in G.nodes(
) if not G.node[n]['val'] and not G.node[n]['test']])
train_feats = feats[train_ids]
scaler = StandardScaler()
scaler.fit(train_feats)
feats = scaler.transform(feats)
np.save(str(prefix)+'-norm-feats.npy', feats)
else:
feats = np.load(str(prefix)+'-norm-feats.npy')
else:
feats = np.load(str(prefix)+'-feats.npy')
import pdb
pdb.set_trace()
return G, feats, class_map
def run_random_walks(G, nodes, num_walks=N_WALKS):
pairs = []
for count, node in enumerate(nodes):
if G.degree(node) == 0:
continue
for i in range(num_walks):
curr_node = node
for j in range(WALK_LEN):
next_node = random.choice(G.neighbors(curr_node))
# self co-occurrences are useless
if curr_node != node:
pairs.append((node, curr_node))
curr_node = next_node
if count % 1000 == 0:
print("Done walks for", count, "nodes")
return pairs
if __name__ == "__main__":
""" Run random walks """
graph_file = sys.argv[1]
out_file = sys.argv[2]
G_data = json.load(open(graph_file))
G = json_graph.node_link_graph(G_data)
nodes = [n for n in G.nodes() if not G.node[n]["val"]
and not G.node[n]["test"]]
G = G.subgraph(nodes)
pairs = run_random_walks(G, nodes)
with open(out_file, "w") as fp:
fp.write("\n".join([str(p[0]) + "\t" + str(p[1]) for p in pairs]))
| 35.108108 | 103 | 0.604054 |
2b12567c777c0822a49e01c50777a6c82e837378
| 11,557 |
py
|
Python
|
conans/client/profile_loader.py
|
ssaavedra/conan
|
e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c
|
[
"MIT"
] | 1 |
2021-08-05T15:33:08.000Z
|
2021-08-05T15:33:08.000Z
|
conans/client/profile_loader.py
|
ssaavedra/conan
|
e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c
|
[
"MIT"
] | 9 |
2017-10-07T06:23:10.000Z
|
2021-06-29T15:22:27.000Z
|
conans/client/profile_loader.py
|
ssaavedra/conan
|
e15dc7902fbbeaf469798a3b9948ead1ecfc8e3c
|
[
"MIT"
] | 1 |
2022-03-23T18:07:31.000Z
|
2022-03-23T18:07:31.000Z
|
import os
from collections import OrderedDict, defaultdict
from conans.errors import ConanException, ConanV2Exception
from conans.model.env_info import EnvValues, unquote
from conans.model.options import OptionsValues
from conans.model.profile import Profile
from conans.model.ref import ConanFileReference
from conans.util.conan_v2_mode import conan_v2_behavior
from conans.util.config_parser import ConfigParser
from conans.util.files import load, mkdir
from conans.util.log import logger
class ProfileParser(object):
def __init__(self, text):
""" divides the text in 3 items:
- self.vars: Dictionary with variable=value declarations
- self.includes: List of other profiles to include
- self.profile_text: the remaining, containing settings, options, env, etc
"""
self.vars = OrderedDict() # Order matters, if user declares F=1 and then FOO=12,
# and in profile MYVAR=$FOO, it will
self.includes = []
self.profile_text = ""
for counter, line in enumerate(text.splitlines()):
if not line.strip() or line.strip().startswith("#"):
continue
elif line.strip().startswith("["):
self.profile_text = "\n".join(text.splitlines()[counter:])
break
elif line.strip().startswith("include("):
include = line.split("include(", 1)[1]
if not include.endswith(")"):
raise ConanException("Invalid include statement")
include = include[:-1]
self.includes.append(include)
else:
try:
name, value = line.split("=", 1)
except ValueError as error:
raise ConanException("Error while parsing line %i: '%s'" % (counter, line))
name = name.strip()
if " " in name:
raise ConanException("The names of the variables cannot contain spaces")
value = unquote(value)
self.vars[name] = value
def apply_vars(self):
self._apply_in_vars()
self._apply_in_profile_text()
def get_includes(self):
# Replace over includes seems insane and it is not documented. I am leaving it now
# afraid of breaking, but should be removed Conan 2.0
for include in self.includes:
for repl_key, repl_value in self.vars.items():
include = include.replace("$%s" % repl_key, repl_value)
yield include
def update_vars(self, included_vars):
""" update the variables dict with new ones from included profiles,
but keeping (higher priority) existing values"""
included_vars.update(self.vars)
self.vars = included_vars
def _apply_in_vars(self):
tmp_vars = OrderedDict()
for key, value in self.vars.items():
for repl_key, repl_value in self.vars.items():
key = key.replace("$%s" % repl_key, repl_value)
value = value.replace("$%s" % repl_key, repl_value)
tmp_vars[key] = value
self.vars = tmp_vars
def _apply_in_profile_text(self):
for k, v in self.vars.items():
self.profile_text = self.profile_text.replace("$%s" % k, v)
def get_profile_path(profile_name, default_folder, cwd, exists=True):
def valid_path(profile_path):
if exists and not os.path.isfile(profile_path):
raise ConanException("Profile not found: %s" % profile_path)
return profile_path
if os.path.isabs(profile_name):
return valid_path(profile_name)
if profile_name[:2] in ("./", ".\\"): # local
profile_path = os.path.abspath(os.path.join(cwd, profile_name))
return valid_path(profile_path)
if not os.path.exists(default_folder):
mkdir(default_folder)
profile_path = os.path.join(default_folder, profile_name)
if exists:
if not os.path.isfile(profile_path):
profile_path = os.path.abspath(os.path.join(cwd, profile_name))
if not os.path.isfile(profile_path):
raise ConanException("Profile not found: %s" % profile_name)
return profile_path
def read_profile(profile_name, cwd, default_folder):
""" Will look for "profile_name" in disk if profile_name is absolute path,
in current folder if path is relative or in the default folder otherwise.
return: a Profile object
"""
if not profile_name:
return None, None
profile_path = get_profile_path(profile_name, default_folder, cwd)
logger.debug("PROFILE LOAD: %s" % profile_path)
text = load(profile_path)
try:
return _load_profile(text, profile_path, default_folder)
except ConanV2Exception:
raise
except ConanException as exc:
raise ConanException("Error reading '%s' profile: %s" % (profile_name, exc))
def _load_profile(text, profile_path, default_folder):
""" Parse and return a Profile object from a text config like representation.
cwd is needed to be able to load the includes
"""
try:
inherited_profile = Profile()
cwd = os.path.dirname(os.path.abspath(profile_path)) if profile_path else None
profile_parser = ProfileParser(text)
# Iterate the includes and call recursive to get the profile and variables
# from parent profiles
for include in profile_parser.get_includes():
# Recursion !!
profile, included_vars = read_profile(include, cwd, default_folder)
inherited_profile.update(profile)
profile_parser.update_vars(included_vars)
# Apply the automatic PROFILE_DIR variable
if cwd:
profile_parser.vars["PROFILE_DIR"] = os.path.abspath(cwd).replace('\\', '/')
# Replace the variables from parents in the current profile
profile_parser.apply_vars()
# Current profile before update with parents (but parent variables already applied)
doc = ConfigParser(profile_parser.profile_text,
allowed_fields=["build_requires", "settings", "env", "scopes", "options"])
if 'scopes' in doc._sections:
conan_v2_behavior("Field 'scopes' in profile is deprecated")
# Merge the inherited profile with the readed from current profile
_apply_inner_profile(doc, inherited_profile)
return inherited_profile, profile_parser.vars
except ConanException:
raise
except Exception as exc:
raise ConanException("Error parsing the profile text file: %s" % str(exc))
def _load_single_build_require(profile, line):
tokens = line.split(":", 1)
if len(tokens) == 1:
pattern, req_list = "*", line
else:
pattern, req_list = tokens
refs = [ConanFileReference.loads(reference.strip()) for reference in req_list.split(",")]
profile.build_requires.setdefault(pattern, []).extend(refs)
def _apply_inner_profile(doc, base_profile):
"""
:param doc: ConfigParser object from the current profile (excluding includes and vars,
and with values already replaced)
:param base_profile: Profile inherited, it's used as a base profile to modify it.
:return: None
"""
def get_package_name_value(item):
"""Parse items like package:name=value or name=value"""
package_name = None
if ":" in item:
tmp = item.split(":", 1)
package_name, item = tmp
name, value = item.split("=", 1)
name = name.strip()
value = unquote(value)
return package_name, name, value
for setting in doc.settings.splitlines():
setting = setting.strip()
if setting and not setting.startswith("#"):
if "=" not in setting:
raise ConanException("Invalid setting line '%s'" % setting)
package_name, name, value = get_package_name_value(setting)
if package_name:
base_profile.package_settings[package_name][name] = value
else:
base_profile.settings[name] = value
if doc.build_requires:
# FIXME CHECKS OF DUPLICATED?
for req in doc.build_requires.splitlines():
_load_single_build_require(base_profile, req)
if doc.options:
base_profile.options.update(OptionsValues.loads(doc.options))
# The env vars from the current profile (read in doc)
# are updated with the included profiles (base_profile)
# the current env values has priority
current_env_values = EnvValues.loads(doc.env)
current_env_values.update(base_profile.env_values)
base_profile.env_values = current_env_values
def profile_from_args(profiles, settings, options, env, cwd, cache):
""" Return a Profile object, as the result of merging a potentially existing Profile
file and the args command-line arguments
"""
default_profile = cache.default_profile # Ensures a default profile creating
if profiles is None:
result = default_profile
else:
result = Profile()
for p in profiles:
tmp, _ = read_profile(p, cwd, cache.profiles_path)
result.update(tmp)
args_profile = _profile_parse_args(settings, options, env)
if result:
result.update(args_profile)
else:
result = args_profile
return result
def _profile_parse_args(settings, options, envs):
""" return a Profile object result of parsing raw data
"""
def _get_tuples_list_from_extender_arg(items):
if not items:
return []
# Validate the pairs
for item in items:
chunks = item.split("=", 1)
if len(chunks) != 2:
raise ConanException("Invalid input '%s', use 'name=value'" % item)
return [(item[0], item[1]) for item in [item.split("=", 1) for item in items]]
def _get_simple_and_package_tuples(items):
"""Parse items like "thing:item=value or item2=value2 and returns a tuple list for
the simple items (name, value) and a dict for the package items
{package: [(item, value)...)], ...}
"""
simple_items = []
package_items = defaultdict(list)
tuples = _get_tuples_list_from_extender_arg(items)
for name, value in tuples:
if ":" in name: # Scoped items
tmp = name.split(":", 1)
ref_name = tmp[0]
name = tmp[1]
package_items[ref_name].append((name, value))
else:
simple_items.append((name, value))
return simple_items, package_items
def _get_env_values(env, package_env):
env_values = EnvValues()
for name, value in env:
env_values.add(name, EnvValues.load_value(value))
for package, data in package_env.items():
for name, value in data:
env_values.add(name, EnvValues.load_value(value), package)
return env_values
result = Profile()
options = _get_tuples_list_from_extender_arg(options)
result.options = OptionsValues(options)
env, package_env = _get_simple_and_package_tuples(envs)
env_values = _get_env_values(env, package_env)
result.env_values = env_values
settings, package_settings = _get_simple_and_package_tuples(settings)
result.settings = OrderedDict(settings)
for pkg, values in package_settings.items():
result.package_settings[pkg] = OrderedDict(values)
return result
| 38.652174 | 101 | 0.642554 |
02435678750a957052494d47c1eaf144f2689671
| 252 |
py
|
Python
|
manage.py
|
Seulki-You/HCI_Chatbot
|
46063f21ffebbe4ee46f3c58f0325d73eb3f69c2
|
[
"MIT"
] | null | null | null |
manage.py
|
Seulki-You/HCI_Chatbot
|
46063f21ffebbe4ee46f3c58f0325d73eb3f69c2
|
[
"MIT"
] | null | null | null |
manage.py
|
Seulki-You/HCI_Chatbot
|
46063f21ffebbe4ee46f3c58f0325d73eb3f69c2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "SSUperBot.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 22.909091 | 73 | 0.77381 |
6ddc3c6cea20985c470c6f45e39dee0558ee3186
| 2,198 |
py
|
Python
|
yield.py
|
dhqdqk/python3-learn
|
6711bcb289e319f18409bc5a3c951e9998192894
|
[
"Apache-2.0"
] | null | null | null |
yield.py
|
dhqdqk/python3-learn
|
6711bcb289e319f18409bc5a3c951e9998192894
|
[
"Apache-2.0"
] | null | null | null |
yield.py
|
dhqdqk/python3-learn
|
6711bcb289e319f18409bc5a3c951e9998192894
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#coding:utf-8
import asyncio
'''
协程,又称微线程,纤程。英文名Coroutine。
asyncio是Python 3.4版本引入的标准库,直接内置了对异步IO的支持。
用asyncio提供的@asyncio.coroutine可以把一个generator标记为coroutine类型,
然后在coroutine内部用yield from调用另一个coroutine实现异步操作
为了简化并更好地标识异步IO,从Python 3.5开始引入了新的语法async和await,可以让coroutine的代码更简洁易读
aiohttp是py3.5添加的处理http异步调用的模块
'''
def consumer():
r = ''
while True:
n = yield r
print('>>>c.n %d' % n)
print('[CONSUMER] consuming %s...' % n)
r = '200 OK'
def produce(c):
c.send(None)
n = 0
while n < 5:
n = n + 1
print('[PRODUCER] Producing %s...' % n)
r = c.send(n)
print('[PRODUCER] Consumer return: %s' % r)
c.close()
c = consumer()
produce(c)
# 把一个generator标记为coroutine类型
@asyncio.coroutine
def wget(host):
print('wget %s ..' % host)
connect = asyncio.open_connection(host, 80)
reader, writer = yield from connect
header = 'GET / HTTP/1.0\r\nHost:%s \r\n\r\n' % host
writer.write(header.encode('utf-8'))
yield from writer.drain()
while True:
line = yield from reader.readline()
if line == b'\r\n':
break
print('%s header > %s' % (host, line.decode('utf-8').rstrip()))
writer.close()
loop = asyncio.get_event_loop()
tasks = [wget(host) for host in ['www.sina.com.cn',
'www.sohu.com',
'www.zhihu.com']]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
'''
async def awget(host):
print('wget %s ..' % host)
connect = asyncio.open_connection(host, 80)
reader, writer = await connect
header = 'GET / HTTP/1.0\r\nHost:%s \r\n\r\n' % host
writer.write(header.encode('utf-8'))
await writer.drain()
while True:
line = await reader.readline()
if line == b'\r\n':
break
print('%s header > %s' % (host, line.decode('utf-8').rstrip()))
writer.close()
loop1 = asyncio.get_event_loop()
task1 = [awget(host) for host in ['www.sina.com.cn',
'www.sohu.com',
'www.zhihu.com']]
loop1.run_until_complete(asyncio.wait(task1))
loop1.close()
'''
| 27.135802 | 71 | 0.585987 |
b29d48e0146be1cb72983dcc77e1b6f8568f2a71
| 13,651 |
py
|
Python
|
lib/galaxy/managers/users.py
|
ramezrawas/galaxy-1
|
c03748dd49c060a68d07bce56eae33e0ba154414
|
[
"CC-BY-3.0"
] | 6 |
2018-11-03T22:43:35.000Z
|
2022-02-15T17:51:33.000Z
|
lib/galaxy/managers/users.py
|
ramezrawas/galaxy-1
|
c03748dd49c060a68d07bce56eae33e0ba154414
|
[
"CC-BY-3.0"
] | 7 |
2016-12-07T22:19:37.000Z
|
2019-01-30T15:04:26.000Z
|
lib/galaxy/managers/users.py
|
ramezrawas/galaxy-1
|
c03748dd49c060a68d07bce56eae33e0ba154414
|
[
"CC-BY-3.0"
] | 10 |
2017-04-10T21:40:22.000Z
|
2022-02-21T16:50:10.000Z
|
"""
Manager and Serializer for Users.
"""
import sqlalchemy
from galaxy import model
from galaxy import exceptions
from galaxy import util
from galaxy.managers import base
from galaxy.managers import deletable
from galaxy.managers import api_keys
from galaxy.security import validate_user_input
import logging
log = logging.getLogger( __name__ )
class UserManager( base.ModelManager, deletable.PurgableManagerMixin ):
model_class = model.User
foreign_key_name = 'user'
# TODO: there is quite a bit of functionality around the user (authentication, permissions, quotas, groups/roles)
# most of which it may be unneccessary to have here
# TODO: incorp BaseAPIController.validate_in_users_and_groups
# TODO: incorp CreatesUsersMixin
# TODO: incorp CreatesApiKeysMixin
# TODO: incorporate UsesFormDefinitionsMixin?
def create( self, webapp_name=None, **kwargs ):
"""
Create a new user.
"""
# TODO: deserialize and validate here
email = kwargs[ 'email' ]
username = kwargs[ 'username' ]
password = kwargs[ 'password' ]
self._error_on_duplicate_email( email )
user = model.User( email=email, password=password )
user.username = username
if self.app.config.user_activation_on:
user.active = False
else:
# Activation is off, every new user is active by default.
user.active = True
self.session().add( user )
try:
self.session().flush()
# TODO:?? flush needed for permissions below? If not, make optional
except sqlalchemy.exc.IntegrityError as db_err:
raise exceptions.Conflict( db_err.message )
# can throw an sqlalx.IntegrityError if username not unique
self.app.security_agent.create_private_user_role( user )
if webapp_name == 'galaxy':
# We set default user permissions, before we log in and set the default history permissions
permissions = self.app.config.new_user_dataset_access_role_default_private
self.app.security_agent.user_set_default_permissions( user, default_access_private=permissions )
return user
def delete(self, user):
user.deleted = True
self.session().add(user)
self.session().flush()
def _error_on_duplicate_email( self, email ):
"""
Check for a duplicate email and raise if found.
:raises exceptions.Conflict: if any are found
"""
# TODO: remove this check when unique=True is added to the email column
if self.by_email( email ) is not None:
raise exceptions.Conflict( 'Email must be unique', email=email )
# ---- filters
def by_email( self, email, filters=None, **kwargs ):
"""
Find a user by their email.
"""
filters = self._munge_filters( self.model_class.email == email, filters )
try:
# TODO: use one_or_none
return super( UserManager, self ).one( filters=filters, **kwargs )
except exceptions.ObjectNotFound:
return None
def by_email_like( self, email_with_wildcards, filters=None, order_by=None, **kwargs ):
"""
Find a user searching with SQL wildcards.
"""
filters = self._munge_filters( self.model_class.email.like( email_with_wildcards ), filters )
order_by = order_by or ( model.User.email, )
return super( UserManager, self ).list( filters=filters, order_by=order_by, **kwargs )
# ---- admin
def is_admin( self, user ):
"""
Return True if this user is an admin.
"""
admin_emails = self._admin_emails()
return user and admin_emails and user.email in admin_emails
def _admin_emails( self ):
"""
Return a list of admin email addresses from the config file.
"""
return [ email.strip() for email in self.app.config.get( "admin_users", "" ).split( "," ) ]
def admins( self, filters=None, **kwargs ):
"""
Return a list of admin Users.
"""
filters = self._munge_filters( self.model_class.email.in_( self._admin_emails() ), filters )
return super( UserManager, self ).list( filters=filters, **kwargs )
def error_unless_admin( self, user, msg="Administrators only", **kwargs ):
"""
Raise an error if `user` is not an admin.
:raises exceptions.AdminRequiredException: if `user` is not an admin.
"""
# useful in admin only methods
if not self.is_admin( user ):
raise exceptions.AdminRequiredException( msg, **kwargs )
return user
# ---- anonymous
def is_anonymous( self, user ):
"""
Return True if `user` is anonymous.
"""
# define here for single point of change and make more readable
return user is None
def error_if_anonymous( self, user, msg="Log-in required", **kwargs ):
"""
Raise an error if `user` is anonymous.
"""
if user is None:
# TODO: code is correct (401) but should be named AuthenticationRequired (401 and 403 are flipped)
raise exceptions.AuthenticationFailed( msg, **kwargs )
return user
# ---- current
def current_user( self, trans ):
# define here for single point of change and make more readable
# TODO: trans
return trans.user
# ---- api keys
def create_api_key( self, user ):
"""
Create and return an API key for `user`.
"""
# TODO: seems like this should return the model
return api_keys.ApiKeyManager( self.app ).create_api_key( user )
# TODO: possibly move to ApiKeyManager
def valid_api_key( self, user ):
"""
Return this most recent APIKey for this user or None if none have been created.
"""
query = ( self.session().query( model.APIKeys )
.filter_by( user=user )
.order_by( sqlalchemy.desc( model.APIKeys.create_time ) ) )
all = query.all()
if len( all ):
return all[0]
return None
# TODO: possibly move to ApiKeyManager
def get_or_create_valid_api_key( self, user ):
"""
Return this most recent APIKey for this user or create one if none have been
created.
"""
existing = self.valid_api_key( user )
if existing:
return existing
return self.create_api_key( self, user )
# ---- preferences
def preferences( self, user ):
log.warn(dict( (key, value) for key, value in user.preferences.items() ))
return dict( (key, value) for key, value in user.preferences.items() )
# ---- roles and permissions
def private_role( self, user ):
return self.app.security_agent.get_private_user_role( user )
def sharing_roles( self, user ):
return self.app.security_agent.get_sharing_roles( user )
def default_permissions( self, user ):
return self.app.security_agent.user_get_default_permissions( user )
def quota( self, user ):
# TODO: use quota manager
return self.app.quota_agent.get_percent( user=user )
def tags_used( self, user, tag_models=None ):
"""
Return a list of distinct 'user_tname:user_value' strings that the
given user has used.
"""
# TODO: simplify and unify with tag manager
if self.is_anonymous( user ):
return []
# get all the taggable model TagAssociations
if not tag_models:
tag_models = [ v.tag_assoc_class for v in self.app.tag_handler.item_tag_assoc_info.values() ]
# create a union of subqueries for each for this user - getting only the tname and user_value
all_tags_query = None
for tag_model in tag_models:
subq = ( self.session().query( tag_model.user_tname, tag_model.user_value )
.filter( tag_model.user == user ) )
all_tags_query = subq if all_tags_query is None else all_tags_query.union( subq )
# if nothing init'd the query, bail
if all_tags_query is None:
return []
# boil the tag tuples down into a sorted list of DISTINCT name:val strings
tags = all_tags_query.distinct().all()
tags = [( ( name + ':' + val ) if val else name ) for name, val in tags ]
return sorted( tags )
def has_requests( self, user, trans ):
"""
"""
if self.is_anonymous( user ):
return False
request_types = self.app.security_agent.get_accessible_request_types( trans, user )
return bool( user.requests or request_types )
class UserSerializer( base.ModelSerializer, deletable.PurgableSerializerMixin ):
model_manager_class = UserManager
def __init__( self, app ):
"""
Convert a User and associated data to a dictionary representation.
"""
super( UserSerializer, self ).__init__( app )
self.user_manager = self.manager
self.default_view = 'summary'
self.add_view( 'summary', [
'id', 'email', 'username'
])
self.add_view( 'detailed', [
# 'update_time',
# 'create_time',
'is_admin',
'total_disk_usage',
'nice_total_disk_usage',
'quota_percent',
'deleted',
'purged',
# 'active',
'preferences',
# all tags
'tags_used',
# all annotations
# 'annotations'
], include_keys_from='summary' )
def add_serializers( self ):
super( UserSerializer, self ).add_serializers()
deletable.PurgableSerializerMixin.add_serializers( self )
self.serializers.update({
'id' : self.serialize_id,
'create_time' : self.serialize_date,
'update_time' : self.serialize_date,
'is_admin' : lambda i, k, **c: self.user_manager.is_admin( i ),
'preferences' : lambda i, k, **c: self.user_manager.preferences( i ),
'total_disk_usage' : lambda i, k, **c: float( i.total_disk_usage ),
'quota_percent' : lambda i, k, **c: self.user_manager.quota( i ),
'tags_used' : lambda i, k, **c: self.user_manager.tags_used( i ),
'has_requests' : lambda i, k, trans=None, **c: self.user_manager.has_requests( i, trans )
})
class UserDeserializer( base.ModelDeserializer ):
"""
Service object for validating and deserializing dictionaries that
update/alter users.
"""
model_manager_class = UserManager
def add_deserializers( self ):
super( UserDeserializer, self ).add_deserializers()
self.deserializers.update({
'username' : self.deserialize_username,
})
def deserialize_username( self, item, key, username, trans=None, **context ):
# TODO: validate_user_input requires trans and should(?) raise exceptions
# move validation to UserValidator and use self.app, exceptions instead
validation_error = validate_user_input.validate_publicname( trans, username, user=item )
if validation_error:
raise base.ModelDeserializingError( validation_error )
return self.default_deserializer( item, key, username, trans=trans, **context )
class CurrentUserSerializer( UserSerializer ):
model_manager_class = UserManager
def serialize( self, user, keys, **kwargs ):
"""
Override to return at least some usage info if user is anonymous.
"""
kwargs[ 'current_user' ] = user
if self.user_manager.is_anonymous( user ):
return self.serialize_current_anonymous_user( user, keys, **kwargs )
return super( UserSerializer, self ).serialize( user, keys, **kwargs )
def serialize_current_anonymous_user( self, user, keys, trans=None, **kwargs ):
# use the current history if any to get usage stats for trans' anonymous user
# TODO: might be better as sep. Serializer class
usage = 0
percent = None
history = trans.history
if history:
usage = self.app.quota_agent.get_usage( trans, history=trans.history )
percent = self.app.quota_agent.get_percent( trans=trans, usage=usage )
# a very small subset of keys available
values = {
'id' : None,
'total_disk_usage' : float( usage ),
'nice_total_disk_usage' : util.nice_size( usage ),
'quota_percent' : percent,
}
serialized = {}
for key in keys:
if key in values:
serialized[ key ] = values[ key ]
return serialized
class AdminUserFilterParser( base.ModelFilterParser, deletable.PurgableFiltersMixin ):
model_manager_class = UserManager
model_class = model.User
def _add_parsers( self ):
super( AdminUserFilterParser, self )._add_parsers()
deletable.PurgableFiltersMixin._add_parsers( self )
# PRECONDITION: user making the query has been verified as an admin
self.orm_filter_parsers.update({
'email' : { 'op': ( 'eq', 'contains', 'like' ) },
'username' : { 'op': ( 'eq', 'contains', 'like' ) },
'active' : { 'op': ( 'eq' ) },
'disk_usage' : { 'op': ( 'le', 'ge' ) }
})
self.fn_filter_parsers.update({
})
| 36.5 | 117 | 0.614021 |
2ef8c489181d5ecaa9d31010919d08f64313b887
| 7,842 |
py
|
Python
|
interpolation.py
|
NREL/dw-tap-api
|
db3322189937f10355ba4e634421004ff2f7b5bb
|
[
"BSD-3-Clause"
] | null | null | null |
interpolation.py
|
NREL/dw-tap-api
|
db3322189937f10355ba4e634421004ff2f7b5bb
|
[
"BSD-3-Clause"
] | null | null | null |
interpolation.py
|
NREL/dw-tap-api
|
db3322189937f10355ba4e634421004ff2f7b5bb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Class that manages vertical interpolations.
A class which will associate a set of model points with a desired point to
be interpolated. The class keeps the state of interpolation in the attribute
called model points.
A set of model points will first contain the set of all TS (x,y,z)
and then be reduced to a set (x,y,z_desired).
Examples:
- Instantiate an interpolation object:
interpolation.interpolation(
XYZPoint_desired ,
[XYZPoint_model1, XYZPoint_model2, XYZPoint_model3],
vertically_interpolate = True,
vertical_interpolation_techniques = ['nn','stability_adjusted_log_law']
)
Written by: Sagi Zisman ([email protected]) and Caleb Phillips ([email protected])
in collaboration with the National Renewable Energy Laboratories.
"""
import timeseries
import points
import vertical_interpolation as vif
import numpy as np
class interpolation():
_desired_point = None # Should refer a desired point object
_model_points = None # Should refer to a set of model points which
# will be acted on in the process of interpolation
_model_transformed = None
_to_vertically_interpolate = None
_vertical_interpolation_techniques = None
def __init__(self,
desired_point,
model_points,
vertically_interpolate,
spatially_interpolate,
vertical_interpolation_techniques):
self._desired_point = desired_point
self._model_points = \
[model_points] if type(model_points) is not list else \
model_points
self._to_vertically_interpolate = vertically_interpolate
self._to_spatially_interpolate = spatially_interpolate
self._vertical_interpolation_techniques = \
vertical_interpolation_techniques
self._model_transformed = []
def interpolate(self, save_path=None):
if self._to_vertically_interpolate:
self._vertically_interpolate(save_path)
def _vertically_interpolate(self, save_path):
for xy_point in self._model_points:
vertically_interpolated_timeseries = []
heights = [poynt.height for poynt in xy_point._xyz_points]
# Ensure sorted assumption is satisfied
assert np.array_equal(heights, np.sort(heights))
if 'polynomial' in self._vertical_interpolation_techniques:
for degree in range(1, len(xy_point._xyz_points)):
polynomial_interpolated_series = \
vif.polynomial(self._desired_point,
xy_point._xyz_points,
degree=degree)
if polynomial_interpolated_series is not None:
polynomial_interpolated_series.name = \
'vert_poly_deg{0}'.format(str(degree))
vertically_interpolated_timeseries.\
append(timeseries.
timeseries(polynomial_interpolated_series))
if 'nn' in self._vertical_interpolation_techniques:
nn_interpolated_series = \
vif.nearest_neighbor(self._desired_point,
xy_point._xyz_points)
if nn_interpolated_series is not None:
nn_interpolated_series.name = 'vert_nn'
vertically_interpolated_timeseries.\
append(timeseries.timeseries(nn_interpolated_series))
if 'stability_adjusted_log_law' in self._vertical_interpolation_techniques:
stability_corrected_log_law_series = \
vif.stability_corrected_log_law(self._desired_point,
xy_point._xyz_points,
xy_point.get_timeseries_with_attribute('stability'),
xy_point.surface_roughness,
xy_point.displacement_height)
if stability_corrected_log_law_series is not None:
stability_corrected_log_law_series.name = \
'stability_corrected_log_law_series'
vertically_interpolated_timeseries.\
append(timeseries.
timeseries(stability_corrected_log_law_series))
if 'neutral_log_law' in self._vertical_interpolation_techniques:
neutral_log_law_series =\
vif.neutral_log_law(self._desired_point,
xy_point._xyz_points,
xy_point.surface_roughness,
xy_point.displacement_height)
if neutral_log_law_series is not None:
neutral_log_law_series.name = 'neutral_log_law_series'
vertically_interpolated_timeseries.\
append(timeseries.timeseries(neutral_log_law_series))
if 'stability_adjusted_power_law' in self._vertical_interpolation_techniques:
stability_corrected_power_law_series =\
vif.stability_corrected_power_law(self._desired_point,
xy_point._xyz_points)
if stability_corrected_power_law_series is not None:
stability_corrected_power_law_series.name =\
'stability_corrected_power_law_series'
vertically_interpolated_timeseries.\
append(timeseries.timeseries(stability_corrected_power_law_series))
if 'neutral_power_law' in self._vertical_interpolation_techniques:
neutral_power_law_series =\
vif.neutral_power_law(self._desired_point,
xy_point._xyz_points)
if neutral_power_law_series is not None:
neutral_power_law_series.name = 'neutral_power_law_series'
vertically_interpolated_timeseries.\
append(timeseries.timeseries(neutral_power_law_series))
ground_truth = \
self._desired_point.get_timeseries_with_attribute('ws')
if ground_truth is not None:
ground_truth.name = "ground_truth"
vertically_interpolated_timeseries.append(ground_truth)
# Create a new XY Point that will contain the time series
# associated with the vertical interpolation
model_transform_point = points.XYPoint(xy_point.lat,
xy_point.lon,
'model')
model_transform_point.xyz_points = \
points.XYZPoint(xy_point.lat,
xy_point.lon,
self._desired_point.height,
'model',
gid=self._desired_point._gid)
model_transform_point.\
xyz_points.\
set_timeseries(vertically_interpolated_timeseries)
self._model_transformed.append(model_transform_point)
# Signal that the vertical interpolation has been completed
self._to_vertically_interpolate = False
return
@property
def interpolation_finished(self):
return len(self._model_transformed) == 1
| 45.859649 | 106 | 0.582632 |
406d04f2c0f7920ffc184bd32d3094740322f618
| 605 |
py
|
Python
|
distributed_social_network/api/migrations/0006_node.py
|
CMPUT404F21-TEAM-PROJECT/cmput404-group-project
|
5fc929f6bd22d41dc73734d34b1563bcfdc87f27
|
[
"Apache-2.0"
] | 1 |
2022-02-10T05:50:19.000Z
|
2022-02-10T05:50:19.000Z
|
distributed_social_network/api/migrations/0006_node.py
|
CMPUT404F21-TEAM-PROJECT/cmput404-group-project
|
5fc929f6bd22d41dc73734d34b1563bcfdc87f27
|
[
"Apache-2.0"
] | 43 |
2022-02-08T00:59:49.000Z
|
2022-03-14T00:10:01.000Z
|
distributed_social_network/api/migrations/0006_node.py
|
CMPUT404F21-TEAM-PROJECT/cmput404-group-project
|
5fc929f6bd22d41dc73734d34b1563bcfdc87f27
|
[
"Apache-2.0"
] | 1 |
2022-03-17T22:11:38.000Z
|
2022-03-17T22:11:38.000Z
|
# Generated by Django 3.2.7 on 2022-03-20 01:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0005_alter_followrequest_unique_together_and_more'),
]
operations = [
migrations.CreateModel(
name='Node',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=200)),
('password', models.CharField(max_length=200)),
],
),
]
| 27.5 | 117 | 0.596694 |
8ad7117bc9491e79bc6343b9dcf90bf6ac309029
| 3,119 |
py
|
Python
|
price/settings.py
|
kirikitty/scrapy-phone-price
|
bd7fae4a3f426283b61c10e26f3cbafc5f318dcd
|
[
"MIT"
] | null | null | null |
price/settings.py
|
kirikitty/scrapy-phone-price
|
bd7fae4a3f426283b61c10e26f3cbafc5f318dcd
|
[
"MIT"
] | null | null | null |
price/settings.py
|
kirikitty/scrapy-phone-price
|
bd7fae4a3f426283b61c10e26f3cbafc5f318dcd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Scrapy settings for price project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'price'
SPIDER_MODULES = ['price.spiders']
NEWSPIDER_MODULE = 'price.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'price (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'price.middlewares.PriceSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'price.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'price.pipelines.PricePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| 34.274725 | 109 | 0.77589 |
8fad0df05a0bf2cdbe98dee004131f1debf283dc
| 237 |
py
|
Python
|
Constructor.py
|
pprathameshmore/PythonLearning
|
593bad64950afc9d39b289068402a6fc83e36df2
|
[
"MIT"
] | null | null | null |
Constructor.py
|
pprathameshmore/PythonLearning
|
593bad64950afc9d39b289068402a6fc83e36df2
|
[
"MIT"
] | 1 |
2018-03-01T15:38:56.000Z
|
2018-03-01T18:40:59.000Z
|
Constructor.py
|
pprathameshmore/PythonLearning
|
593bad64950afc9d39b289068402a6fc83e36df2
|
[
"MIT"
] | 1 |
2018-03-01T15:13:44.000Z
|
2018-03-01T15:13:44.000Z
|
#@Author Prathamesh More
class ConstructorDemo:
name = ""
def __init__(self, name):
print("Printing from Constructor")
ConstructorDemo.name = name
print(name)
c = ConstructorDemo('Prathamesh More')
| 14.8125 | 42 | 0.64557 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.