text
stringlengths 28
881k
|
---|
from app.business.blog.views import blog # noqaNEWLINE |
import sysNEWLINEimport pytestNEWLINENEWLINENEWLINEfrom cachecontrol import CacheControlNEWLINEfrom cachecontrol.caches.file_cache import FileCacheNEWLINEfrom cachecontrol.filewrapper import CallbackFileWrapperNEWLINEfrom requests import SessionNEWLINENEWLINENEWLINEclass Test39(object):NEWLINENEWLINE @pytest.mark.skipif(sys.version.startswith('2'),NEWLINE reason='Only run this for python 3.x')NEWLINE def test_file_cache_recognizes_consumed_file_handle(self):NEWLINE s = CacheControl(Session(), FileCache('web_cache'))NEWLINE s.get('http://httpbin.org/cache/60')NEWLINE r = s.get('http://httpbin.org/cache/60')NEWLINE assert r.from_cacheNEWLINENEWLINENEWLINEdef test_getattr_during_gc():NEWLINE s = CallbackFileWrapper(None, None)NEWLINE # normal behavior:NEWLINE with pytest.raises(AttributeError):NEWLINE s.xNEWLINENEWLINE # this previously had caused an infinite recursionNEWLINE vars(s).clear() # gc does this.NEWLINE with pytest.raises(AttributeError):NEWLINE s.xNEWLINE |
"""NEWLINEPRACTICE Exam 3.NEWLINENEWLINEThis problem provides practice at:NEWLINE *** FOR and WHILE loops. ***NEWLINENEWLINEAuthors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher,NEWLINE Mark Hays, Amanda Stouder, Aaron Wilkin, their colleagues,NEWLINE and Marcus Hughes-Oliver.NEWLINE""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.NEWLINENEWLINE###############################################################################NEWLINE# Students:NEWLINE#NEWLINE# These problems have DIFFICULTY and TIME ratings:NEWLINE# DIFFICULTY rating: 1 to 10, where:NEWLINE# 1 is very easyNEWLINE# 3 is an "easy" Test 2 question.NEWLINE# 5 is a "typical" Test 2 question.NEWLINE# 7 is a "hard" Test 2 question.NEWLINE# 10 is an EXTREMELY hard problem (too hard for a Test 2 question)NEWLINE#NEWLINE# TIME ratings: A ROUGH estimate of the number of minutes that weNEWLINE# would expect a well-prepared student to take on the problem.NEWLINE#NEWLINE# IMPORTANT: For ALL the problems in this module,NEWLINE# if you reach the time estimate and are NOT close to a solution,NEWLINE# STOP working on that problem and ASK YOUR INSTRUCTOR FOR HELPNEWLINE# on it, in class or via Piazza.NEWLINE###############################################################################NEWLINENEWLINEimport simple_testing as stNEWLINEimport mathNEWLINENEWLINENEWLINEdef main():NEWLINE """ Calls the TEST functions in this module. """NEWLINE run_test_practice_problem3()NEWLINENEWLINE# -----------------------------------------------------------------------------NEWLINE# Students: Some of the testing code below uses SimpleTestCase objects,NEWLINE# from the imported simple_testing (st) module.NEWLINE# -----------------------------------------------------------------------------NEWLINENEWLINENEWLINEdef run_test_practice_problem3():NEWLINE """ Tests the practice_problem3 function. """NEWLINE ###########################################################################NEWLINE # DONE: 2. Implement this TEST function.NEWLINE # It TESTS the practice_problem3 function defined below.NEWLINE # Include at least ** 2 ** ADDITIONAL tests beyond those we wrote.NEWLINE #NEWLINE # Try to choose tests that might expose errors in your code!NEWLINE #NEWLINE # As usual, include both EXPECTED and ACTUAL results in your testsNEWLINE # and compute the latter BY HAND (not by running your program).NEWLINE ###########################################################################NEWLINE # DIFFICULTY AND TIME RATINGS (see top of this file for explanation)NEWLINE # DIFFICULTY: 3NEWLINE # TIME ESTIMATE: 10 minutes.NEWLINE ###########################################################################NEWLINENEWLINE # -------------------------------------------------------------------------NEWLINE # 13 tests, plus a 14th after these.NEWLINE # They use the imported simple_testing (st) module.NEWLINE # Each test is a SimpleTestCase with 3 arguments:NEWLINE # -- the function to test,NEWLINE # -- a list containing the argument(s) to send to the function,NEWLINE # -- the correct returned value.NEWLINE # For example, the first test below will callNEWLINE # practice_problem3(-2, 2, 1.3)NEWLINE # and compare the returned value against [1, 7] (the correct answer).NEWLINE # -------------------------------------------------------------------------NEWLINE tests = [st.SimpleTestCase(practice_problem3,NEWLINE [-2, 2, 1.3],NEWLINE [1, 7]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 3, 0.25],NEWLINE [-5, 0, 1]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 4, 0.25],NEWLINE [-5, 0, 1, 2]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 5, 0.25],NEWLINE [-5, 0, 1, 2, 6]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 6, 0.25],NEWLINE [-5, 0, 1, 2, 6, 7]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 7, 0.25],NEWLINE [-5, 0, 1, 2, 6, 7, 8]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 3, -1.0],NEWLINE [-1, 0, 1]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 4, -1.0],NEWLINE [-1, 0, 1, 2]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 5, -1.0],NEWLINE [-1, 0, 1, 2, 3]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 6, -1.0],NEWLINE [-1, 0, 1, 2, 3, 5]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [30, 0, -1000],NEWLINE []),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [100, 5, 1.414],NEWLINE [139, 183, 516, 560, 849]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [0, 1, 1.414213562373],NEWLINE [286602]),NEWLINE ]NEWLINE # 14th test:NEWLINE big_list = []NEWLINE for k in range(888, 1888):NEWLINE big_list.append(k)NEWLINE tests.append(st.SimpleTestCase(practice_problem3,NEWLINE [888, 1000,NEWLINE - math.sqrt(2) - 0.00000000001],NEWLINE big_list))NEWLINENEWLINE # -------------------------------------------------------------------------NEWLINE # Run the 14 tests in the tests list constructed above.NEWLINE # -------------------------------------------------------------------------NEWLINE st.SimpleTestCase.run_tests('practice_problem3', tests)NEWLINENEWLINE ###########################################################################NEWLINE # TO DO 2 continued: More tests:NEWLINE # YOU add at least ** 2 ** additional tests here.NEWLINE #NEWLINE # You can use the SimpleTestCase class as above, or useNEWLINE # the ordinary expected/actual way, your choice.NEWLINE #NEWLINE # SUGGESTION: Ask an assistant to CHECK your tests to confirmNEWLINE # that they are adequate tests!NEWLINE ###########################################################################NEWLINE test1 = [st.SimpleTestCase(practice_problem3,NEWLINE [0, 2, .99],NEWLINE [0, 1]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-10, 6, 0.2],NEWLINE [-6, -5, 0, 1, 2, 6])]NEWLINE st.SimpleTestCase.run_tests('practice_problem3', test1)NEWLINENEWLINENEWLINEdef practice_problem3(start, n, threshold):NEWLINE """NEWLINE What comes in:NEWLINE -- An integer: startNEWLINE -- An nonnegative integer: nNEWLINE -- A number: thresholdNEWLINE What goes out: Returns a list of the first n integers,NEWLINE starting at start, for which the sum of the integer'sNEWLINE sine and cosine is bigger than the given threshold.NEWLINE Side effects: None.NEWLINE Examples:NEWLINE practice_problem3(-2, 2, 1.3) returns [1, 7]NEWLINE as you can see if you work through this example usingNEWLINE the numbers presented below. (Do so!)NEWLINENEWLINE For these examples, the following (and more) numbersNEWLINE (each is rounded to 2 decimal places for the sake of brevity)NEWLINE are relevant:NEWLINE -5: sin = 0.96, cos = 0.28, sum = 1.24NEWLINE -4: sin = 0.76, cos = -0.65, sum = 0.10NEWLINE -3: sin = -0.14, cos = -0.99, sum = -1.13NEWLINE -2: sin = -0.91, cos = -0.42, sum = -1.33NEWLINE -1: sin = -0.84, cos = 0.54, sum = -0.30NEWLINE 0: sin = 0.00, cos = 1.00, sum = 1.00NEWLINE 1: sin = 0.84, cos = 0.54, sum = 1.38NEWLINE 2: sin = 0.91, cos = -0.42, sum = 0.49NEWLINE 3: sin = 0.14, cos = -0.99, sum = -0.85NEWLINE 4: sin = -0.76, cos = -0.65, sum = -1.41NEWLINE 5: sin = -0.96, cos = 0.28, sum = -0.68NEWLINE 6: sin = -0.28, cos = 0.96, sum = 0.68NEWLINE 7: sin = 0.66, cos = 0.75, sum = 1.41NEWLINE 8: sin = 0.99, cos = -0.15, sum = 0.84NEWLINE 9: sin = 0.41, cos = -0.91, sum = -0.50NEWLINE 10: sin = -0.54, cos = -0.84, sum = -1.38NEWLINE 11: sin = -1.00, cos = 0.00, sum = -1.00NEWLINE 12: sin = -0.54, cos = 0.84, sum = 0.31NEWLINE 13: sin = 0.42, cos = 0.91, sum = 1.33NEWLINENEWLINE So if start is -5 and threshold is 0.25 and:NEWLINE -- n is 3, then this function returns [-5, 0, 1]NEWLINE because sin(-5) + cos(-5) IS > 0.25 andNEWLINE sin(-4) + cos(-4) is NOT > 0.25 andNEWLINE sin(-3) + cos(-3) is NOT > 0.25 andNEWLINE sin(-2) + cos(-2) is NOT > 0.25 andNEWLINE sin(-1) + cos(-1) is NOT > 0.25 andNEWLINE sin(0) + cos(0) IS > 0.25 andNEWLINE sin(1) + cos(1) IS > 0.25 andNEWLINE and that makes the required 3 such numbers.NEWLINE -- n is 4, then this function returns [-5, 0, 1, 2]NEWLINE -- n is 5, then this function returns [-5, 0, 1, 2, 6]NEWLINE -- n is 6, then this function returns [-5, 0, 1, 2, 6, 7]NEWLINE -- n is 7, then this function returns [-5, 0, 1, 2, 6, 7, 8]NEWLINENEWLINE while if start is -3 and the threshold is -1.0 and:NEWLINE -- n is 3, then this function returns [-1, 0, 1]NEWLINE -- n is 4, then this function returns [-1, 0, 1, 2]NEWLINE -- n is 5, then this function returns [-1, 0, 1, 2, 3]NEWLINE -- n is 6, then this function returns [-1, 0, 1, 2, 3, 5]NEWLINENEWLINE and if n is 0 (regardless of what start is),NEWLINE this function returns []NEWLINENEWLINE and if threshold is more than the square root of 2,NEWLINE this function returns (regardless of what start and n are):NEWLINE [start, start + 1, start + 2, ... start + n - 1].NEWLINENEWLINE Type hints:NEWLINE :type start: intNEWLINE :type n: intNEWLINE :type threshold: floatNEWLINE """NEWLINE ###########################################################################NEWLINE # DONE: 3. Implement and test this function.NEWLINE # Some tests are already written for you (above),NEWLINE # but you are required to write ADDITIONAL tests (above).NEWLINE ###########################################################################NEWLINE # DIFFICULTY AND TIME RATINGS (see top of this file for explanation)NEWLINE # DIFFICULTY: 5NEWLINE # TIME ESTIMATE: < 15 minutes.NEWLINE ###########################################################################NEWLINE listy = []NEWLINE x = 0NEWLINE while x < n:NEWLINE if math.sin(start) + math.cos(start) > threshold:NEWLINE listy = listy + [start]NEWLINE x = x + 1NEWLINE start = start + 1NEWLINENEWLINE return listyNEWLINENEWLINENEWLINE# -----------------------------------------------------------------------------NEWLINE# Calls main to start the ball rolling.NEWLINE# -----------------------------------------------------------------------------NEWLINEmain()NEWLINE |
from .models import Sentences,WordOptions,WordsinsentenceNEWLINEfrom table import TableNEWLINEfrom table.columns import ColumnNEWLINENEWLINE#this has been defined for the datatables on the siteNEWLINENEWLINE#WordOptionsTable holds required data displayed when clicked on the collapsible in the nav barNEWLINEclass WordOptionsTable(Table):NEWLINE id = Column(field='id',header='id')NEWLINE word = Column(field='word',header='word')NEWLINE lemma = Column(field='lemma',header='Lemma')NEWLINE morph = Column(field='morph',header='Morph')NEWLINE aux_info = Column(field='aux_info',header='aux_info')NEWLINE pre_verb = Column(field='pre_verb',header='pre_verb')NEWLINENEWLINE#SentenceTable holds required data displyed when clicked on the collapsible in the nav barNEWLINEclass SentencesTable(Table):NEWLINE id = Column(field='id',header='id')NEWLINE line = Column(field='line',header='Sentence')NEWLINENEWLINE#WordsinsentenceTable holds required data displyed when clicked on the collapsible in the nav barNEWLINEclass WordsinsentenceTable(Table):NEWLINE id = Column(field='id',header='id')NEWLINE word = Column(field='word',header='word')NEWLINE parent = Column(field='parent',header='parent')NEWLINE children = Column(field='children',header='children')NEWLINE relation = Column(field='relation',header='relation')NEWLINE wordoptions = Column(field='wordoptions',header='wordoptions') |
#!/usr/bin/env python3NEWLINE# Copyright 2020-present NAVER Corp. Under BSD 3-clause licenseNEWLINENEWLINEimport argparseNEWLINEimport osNEWLINEimport loggingNEWLINEimport pathlibNEWLINENEWLINEimport path_to_kapture_localization # noqa: F401NEWLINEimport kapture_localization.utils.loggingNEWLINEfrom kapture_localization.utils.pairsfile import get_ordered_pairs_from_fileNEWLINENEWLINEimport kapture_localization.utils.path_to_kapture # noqa: F401NEWLINEimport kaptureNEWLINEimport kapture.utils.loggingNEWLINEfrom kapture.io.csv import table_to_fileNEWLINENEWLINElogger = kapture_localization.utils.logging.getLogger()NEWLINENEWLINENEWLINEdef slice_pairsfile(pairsfile_path: str,NEWLINE output_path: str,NEWLINE topk: int,NEWLINE threshold: float,NEWLINE startk: int,NEWLINE skip_if_na: bool):NEWLINE logger.info('slice_pairsfile...')NEWLINE similarity_dict = get_ordered_pairs_from_file(pairsfile_path)NEWLINENEWLINE # apply topk override + skip_if_naNEWLINE image_pairs = []NEWLINE for name_query, paired_images in sorted(similarity_dict.items()):NEWLINE paired_images_threshold = [x for x in paired_images if x[1] >= threshold]NEWLINE if startk + topk > len(paired_images_threshold):NEWLINE logger.debug(NEWLINE f'image {name_query} has {len(paired_images_threshold)} pairs, 'NEWLINE f'less than topk={topk} (with startk={startk})')NEWLINE if skip_if_na:NEWLINE logger.debug(f'skipping {name_query}')NEWLINE continueNEWLINE paired_images_threshold = paired_images_threshold[startk:startk+topk]NEWLINE for name_map, score in paired_images_threshold:NEWLINE image_pairs.append((name_query, name_map, score))NEWLINENEWLINE if len(image_pairs) > 0:NEWLINE os.umask(0o002)NEWLINE p = pathlib.Path(output_path)NEWLINE os.makedirs(str(p.parent.resolve()), exist_ok=True)NEWLINE with open(output_path, 'w') as fid:NEWLINE table_to_file(fid, image_pairs, header='# query_image, map_image, score')NEWLINE else:NEWLINE logger.info('no pairs written')NEWLINE logger.info('all done')NEWLINENEWLINENEWLINEdef slice_pairsfile_command_line():NEWLINE parser = argparse.ArgumentParser(description='Apply topk override / threshold on a pairsfile',NEWLINE formatter_class=argparse.ArgumentDefaultsHelpFormatter)NEWLINE parser_verbosity = parser.add_mutually_exclusive_group()NEWLINE parser_verbosity.add_argument('-v', '--verbose', nargs='?', default=logging.WARNING, const=logging.INFO,NEWLINE action=kapture.utils.logging.VerbosityParser,NEWLINE help='verbosity level (debug, info, warning, critical, ... or int value) [warning]')NEWLINE parser_verbosity.add_argument('-q', '--silent', '--quiet',NEWLINE action='store_const', dest='verbose', const=logging.CRITICAL)NEWLINE parser.add_argument('-i', '--input', required=True, help='path to input pairsfile')NEWLINE parser.add_argument('-o', '--output', required=True, help='path to output pairsfile')NEWLINE parser.add_argument('--topk',NEWLINE default=float('inf'),NEWLINE type=int,NEWLINE help='override pairfile topk with this one (must be inferior or equal)')NEWLINE parser.add_argument('--threshold', type=float, default=0,NEWLINE help='the minimum score threshold for pairs to be used')NEWLINE parser.add_argument('--startk',NEWLINE default=0,NEWLINE type=int,NEWLINE help='start position of topk')NEWLINE parser.add_argument('--skip-if-na', action='store_true', default=False,NEWLINE help='Skip query image if startk + topk greater than available pairs (i.e. na, not available)')NEWLINE args = parser.parse_args()NEWLINE logger.setLevel(args.verbose)NEWLINE if args.verbose <= logging.DEBUG:NEWLINE # also let kapture express its logsNEWLINE kapture.utils.logging.getLogger().setLevel(args.verbose)NEWLINENEWLINE logger.debug('kapture_slice_pairsfile.py \\\n' + ''.join(['\n\t{:13} = {}'.format(k, v)NEWLINE for k, v in vars(args).items()]))NEWLINE slice_pairsfile(args.input, args.output, args.topk, args.threshold, args.startk, args.skip_if_na)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE slice_pairsfile_command_line()NEWLINE |
NEWLINENEWLINE# Set plugin = TrueNEWLINEplugin = TrueNEWLINEplugin_dir = 'plugin/radar/'NEWLINENEWLINENEWLINE#model = dict(NEWLINE# type='UNet',NEWLINE# in_channels=3,NEWLINE# out_channels=1,NEWLINE# base_channels=16,NEWLINE# num_stages=5,NEWLINE# strides=(1, 1, 1, 1, 1),NEWLINE# enc_num_convs=(2, 2, 2, 2, 2),NEWLINE# dec_num_convs=(2, 2, 2, 2),NEWLINE# downsamples=(True, True, True, True),NEWLINE# norm_cfg=dict(type='BN'),NEWLINE# act_cfg=dict(type='ReLU'),NEWLINE# upsample_cfg=dict(type='InterpConv'),NEWLINE# )NEWLINEmodel = dict(NEWLINE type='SpatialTempNet',NEWLINE depth_net_cfg={'version': '1A', },NEWLINE sf_net_cfg=None,NEWLINE scale_depth=False,NEWLINE scale_depth_for_temp=False,NEWLINE depth_supervision_ratio=-1,NEWLINE depth_smoothing=1e-3,NEWLINE motion_smoothing=0,NEWLINE motion_sparse=0,NEWLINE sf_consis=0,NEWLINE depth_consis=0,NEWLINE rgb_consis=1.0,NEWLINE stereo_rgb_consis=0.2,NEWLINE loss_decay=0.25,NEWLINE)NEWLINENEWLINENEWLINEfile_client_args = dict(backend='disk')NEWLINEimg_norm_cfg = dict(NEWLINE mean=[0.0, 0.0, 0.0], std=[255.0, 255.0, 255.0], to_rgb=True)NEWLINE#mean=[58.395, 57.12, 57.375], std=[123.675, 116.28, 103.53], to_rgb=True)NEWLINENEWLINEtrain_pipeline = [NEWLINE dict(type='LoadImageFromFiles'), # filenames = results['img_info']['filenames']; results['img{}'.format(i)] = imgNEWLINE dict(NEWLINE type='Resize',NEWLINE img_scale=(384, 224), # w, h; note after reading is (h=900, w=1600)NEWLINE multiscale_mode='value',NEWLINE keep_ratio=False),NEWLINE dict(type='Normalize', **img_norm_cfg), # results.get('img_fields', ['img'])NEWLINE dict(type='LoadDepthImages', img_size=(224, 384), render_type='naive'), # results['seg_fields']NEWLINE dict(type='LoadSceneFlows', img_size=(224, 384), render_type='naive'), # results['seg_fields']NEWLINE dict(type='ImageToTensor', keys=['img{}'.format(i) for i in range(18)]),NEWLINE dict(type='Collect', keys=['img{}'.format(i) for i in range(18)] + \NEWLINE ['depth_map{}'.format(i) for i in range(18)] + \NEWLINE ['sf_map{}'.format(i) for i in range(18)] + \NEWLINE ['cam_intrinsic', 'cam_pose']),NEWLINENEWLINE]NEWLINEval_pipeline = [NEWLINE dict(type='LoadImageFromFiles'), # filenames = results['img_info']['filenames']; results['img{}'.format(i)] = imgNEWLINE dict(NEWLINE type='Resize',NEWLINE img_scale=(384, 224), # w, h; note after reading is (h=900, w=1600)NEWLINE multiscale_mode='value',NEWLINE keep_ratio=False),NEWLINE dict(type='Normalize', **img_norm_cfg), # results.get('img_fields', ['img'])NEWLINE dict(type='LoadDepthImages', img_size=(224, 384), render_type='naive'), # results['seg_fields']NEWLINE dict(type='LoadSceneFlows', img_size=(224, 384), render_type='naive'), # results['seg_fields']NEWLINE dict(type='ImageToTensor', keys=['img{}'.format(i) for i in range(18)]),NEWLINE dict(type='Collect', keys=['img{}'.format(i) for i in range(18)] + \NEWLINE ['depth_map{}'.format(i) for i in range(18)] + \NEWLINE ['sf_map{}'.format(i) for i in range(18)] + \NEWLINE ['cam_intrinsic', 'cam_pose']),NEWLINE]NEWLINENEWLINENEWLINEdata = dict(NEWLINE samples_per_gpu=1,NEWLINE workers_per_gpu=4,NEWLINE train=dict(NEWLINE type='NuscSpatialTempV2',NEWLINE sf_path='/public/MARS/datasets/nuScenes-SF/trainval',NEWLINE img_path='data/nuscenes/',NEWLINE pose_path='/public/MARS/datasets/nuScenes-SF/meta/cam_pose_intrinsic_v2.json',NEWLINE pipeline=train_pipeline,NEWLINE training=True,NEWLINE ),NEWLINE val=dict(NEWLINE type='NuscSpatialTempV2',NEWLINE sf_path='/public/MARS/datasets/nuScenes-SF/trainval',NEWLINE img_path='data/nuscenes/',NEWLINE pose_path='/public/MARS/datasets/nuScenes-SF/meta/cam_pose_intrinsic_v2.json',NEWLINE pipeline=val_pipeline,NEWLINE training=False,NEWLINE ),NEWLINE test=dict(NEWLINE type='NuscSpatialTempV2',NEWLINE sf_path='/public/MARS/datasets/nuScenes-SF/trainval',NEWLINE img_path='data/nuscenes/',NEWLINE pose_path='/public/MARS/datasets/nuScenes-SF/meta/cam_pose_intrinsic_v2.json',NEWLINE pipeline=val_pipeline,NEWLINE training=False,NEWLINE #samples_per_gpu=16,NEWLINE ),NEWLINE)NEWLINENEWLINEcheckpoint_config = dict(interval=2)NEWLINE# yapf:disable pushNEWLINE# By default we use textlogger hook and tensorboardNEWLINE# For more loggers seeNEWLINE# https://mmcv.readthedocs.io/en/latest/api.html#mmcv.runner.LoggerHookNEWLINElog_config = dict(NEWLINE interval=100,NEWLINE hooks=[NEWLINE dict(type='TextLoggerHook'),NEWLINE dict(type='TensorboardLoggerHook2')NEWLINE ])NEWLINE# yapf:enableNEWLINEdist_params = dict(backend='nccl')NEWLINElog_level = 'INFO'NEWLINEwork_dir = NoneNEWLINEload_from = NoneNEWLINEresume_from = NoneNEWLINEworkflow = [('train', 1)]NEWLINE#workflow = [('train', 1), ('val', 1)]NEWLINENEWLINE# For nuScenes dataset, we usually evaluate the model at the end of training.NEWLINE# Since the models are trained by 24 epochs by default, we set evaluationNEWLINE# interval to be 20. Please change the interval accordingly if you do notNEWLINE# use a default schedule.NEWLINE# optimizerNEWLINE# This schedule is mainly used by models on nuScenes datasetNEWLINEoptimizer = dict(type='AdamW', lr=1e-3, weight_decay=0.001)NEWLINE# max_norm=10 is better for SECONDNEWLINEoptimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))NEWLINElr_config = dict(NEWLINE policy='step',NEWLINE warmup='linear',NEWLINE warmup_iters=1000,NEWLINE warmup_ratio=1.0 / 1000,NEWLINE step=[24, 32],NEWLINE)NEWLINEmomentum_config = NoneNEWLINENEWLINE# runtime settingsNEWLINEtotal_epochs = 40NEWLINE#load_from='/public/MARS/surrdet/tyz/depth-net.pth'NEWLINEload_from=NoneNEWLINE |
import importlibNEWLINEimport inspectNEWLINEimport osNEWLINEimport pathlibNEWLINENEWLINEimport pkg_resourcesNEWLINEfrom clvm_tools.clvmc import compile_clvm as compile_clvm_pyNEWLINEfrom flax.types.blockchain_format.program import Program, SerializedProgramNEWLINENEWLINEcompile_clvm = compile_clvm_pyNEWLINENEWLINE# Handle optional use of clvm_tools_rs if available and requestedNEWLINEif "CLVM_TOOLS_RS" in os.environ:NEWLINE try:NEWLINENEWLINE def sha256file(f):NEWLINE import hashlibNEWLINENEWLINE m = hashlib.sha256()NEWLINE m.update(open(f).read().encode("utf8"))NEWLINE return m.hexdigest()NEWLINENEWLINE from clvm_tools_rs import compile_clvm as compile_clvm_rsNEWLINENEWLINE def translate_path(p_):NEWLINE p = str(p_)NEWLINE if os.path.isdir(p):NEWLINE return pNEWLINE else:NEWLINE module_object = importlib.import_module(p)NEWLINE return os.path.dirname(inspect.getfile(module_object))NEWLINENEWLINE def rust_compile_clvm(full_path, output, search_paths=[]):NEWLINE treated_include_paths = list(map(translate_path, search_paths))NEWLINE print("compile_clvm_rs", full_path, output, treated_include_paths)NEWLINE compile_clvm_rs(str(full_path), str(output), treated_include_paths)NEWLINENEWLINE if os.environ["CLVM_TOOLS_RS"] == "check":NEWLINE orig = str(output) + ".orig"NEWLINE compile_clvm_py(full_path, orig, search_paths=search_paths)NEWLINE orig256 = sha256file(orig)NEWLINE rs256 = sha256file(output)NEWLINENEWLINE if orig256 != rs256:NEWLINE print("Compiled %s: %s vs %s\n" % (full_path, orig256, rs256))NEWLINE print("Aborting compilation due to mismatch with rust")NEWLINE assert orig256 == rs256NEWLINENEWLINE compile_clvm = rust_compile_clvmNEWLINE finally:NEWLINE passNEWLINENEWLINENEWLINEdef load_serialized_clvm(clvm_filename, package_or_requirement=__name__) -> SerializedProgram:NEWLINE """NEWLINE This function takes a .clvm file in the given package and compiles it to aNEWLINE .clvm.hex file if the .hex file is missing or older than the .clvm file, thenNEWLINE returns the contents of the .hex file as a `Program`.NEWLINENEWLINE clvm_filename: file nameNEWLINE package_or_requirement: usually `__name__` if the clvm file is in the same packageNEWLINE """NEWLINENEWLINE hex_filename = f"{clvm_filename}.hex"NEWLINENEWLINE try:NEWLINE if pkg_resources.resource_exists(package_or_requirement, clvm_filename):NEWLINE full_path = pathlib.Path(pkg_resources.resource_filename(package_or_requirement, clvm_filename))NEWLINE output = full_path.parent / hex_filenameNEWLINE compile_clvm(full_path, output, search_paths=[full_path.parent])NEWLINE except NotImplementedError:NEWLINE # pyinstaller doesn't support `pkg_resources.resource_exists`NEWLINE # so we just fall through to loading the hex clvmNEWLINE passNEWLINENEWLINE clvm_hex = pkg_resources.resource_string(package_or_requirement, hex_filename).decode("utf8")NEWLINE clvm_blob = bytes.fromhex(clvm_hex)NEWLINE return SerializedProgram.from_bytes(clvm_blob)NEWLINENEWLINENEWLINEdef load_clvm(clvm_filename, package_or_requirement=__name__) -> Program:NEWLINE return Program.from_bytes(bytes(load_serialized_clvm(clvm_filename, package_or_requirement=package_or_requirement)))NEWLINE |
# (C) Datadog, Inc. 2018-presentNEWLINE# All rights reservedNEWLINE# Licensed under Simplified BSD License (see LICENSE)NEWLINENEWLINEimport copyNEWLINEimport loggingNEWLINEimport osNEWLINENEWLINEfrom datadog_checks.base.stubs.aggregator import AggregatorStubNEWLINEfrom datadog_checks.base.utils.common import get_docker_hostnameNEWLINEfrom datadog_checks.dev.docker import get_container_ipNEWLINEfrom datadog_checks.snmp import SnmpCheckNEWLINENEWLINElog = logging.getLogger(__name__)NEWLINENEWLINEHOST = get_docker_hostname()NEWLINEPORT = 1161NEWLINEHERE = os.path.dirname(os.path.abspath(__file__))NEWLINECOMPOSE_DIR = os.path.join(HERE, 'compose')NEWLINENEWLINEAUTH_PROTOCOLS = {'MD5': 'usmHMACMD5AuthProtocol', 'SHA': 'usmHMACSHAAuthProtocol'}NEWLINEPRIV_PROTOCOLS = {'DES': 'usmDESPrivProtocol', 'AES': 'usmAesCfb128Protocol'}NEWLINEAUTH_KEY = 'doggiepass'NEWLINEPRIV_KEY = 'doggiePRIVkey'NEWLINESNMP_CONTAINER_NAME = 'dd-snmp'NEWLINENEWLINECHECK_TAGS = ['snmp_device:{}'.format(HOST)]NEWLINENEWLINESNMP_CONF = {'name': 'snmp_conf', 'ip_address': HOST, 'port': PORT, 'community_string': 'public'}NEWLINENEWLINESNMP_V3_CONF = {NEWLINE 'name': 'snmp_v3_conf',NEWLINE 'ip_address': HOST,NEWLINE 'port': PORT,NEWLINE 'user': None,NEWLINE 'authKey': None,NEWLINE 'privKey': None,NEWLINE 'authProtocol': None,NEWLINE 'privProtocol': None,NEWLINE 'context_name': 'public',NEWLINE}NEWLINENEWLINEMIBS_FOLDER = {'mibs_folder': os.path.join(HERE, "mibs")}NEWLINENEWLINEIGNORE_NONINCREASING_OID = {'ignore_nonincreasing_oid': True}NEWLINENEWLINESUPPORTED_METRIC_TYPES = [NEWLINE {'OID': "1.3.6.1.2.1.7.1.0", 'name': "IAmACounter32"}, # Counter32NEWLINE {'OID': "1.3.6.1.2.1.4.31.1.1.6.1", 'name': "IAmACounter64"}, # Counter64NEWLINE {'OID': "1.3.6.1.2.1.4.24.6.0", 'name': "IAmAGauge32"}, # Gauge32NEWLINE {'OID': "1.3.6.1.2.1.88.1.1.1.0", 'name': "IAmAnInteger"}, # IntegerNEWLINE]NEWLINENEWLINEUNSUPPORTED_METRICS = [{'OID': "1.3.6.1.2.1.25.6.3.1.5.1", 'name': "IAmString"}] # String (not supported)NEWLINENEWLINECAST_METRICS = [NEWLINE {'OID': "1.3.6.1.4.1.2021.10.1.3.1", 'name': "cpuload1"}, # OctetStringNEWLINE {'OID': "1.3.6.1.4.1.2021.10.1.6.1", 'name': "cpuload2"}, # OpaqueNEWLINE]NEWLINENEWLINECONSTRAINED_OID = [{"MIB": "RFC1213-MIB", "symbol": "tcpRtoAlgorithm"}]NEWLINENEWLINEDUMMY_MIB_OID = [NEWLINE ({"MIB": "DUMMY-MIB", "symbol": "scalar"}, AggregatorStub.GAUGE, 10), # IntegerNEWLINE # Additional types we support but that are not part of the original SNMP protocol.NEWLINE ({"MIB": "DUMMY-MIB", "symbol": "dummyCounterGauge"}, AggregatorStub.GAUGE, 90), # CounterBasedGauge64NEWLINE ({"MIB": "DUMMY-MIB", "symbol": "dummyZeroCounter"}, AggregatorStub.RATE, 120), # ZeroBasedCounter64NEWLINE]NEWLINENEWLINEFORCED_METRICS = [NEWLINE {'OID': "1.3.6.1.2.1.4.24.6.0", 'name': "IAmAGauge32", 'forced_type': 'counter'}, # Gauge32NEWLINE {'OID': "1.3.6.1.2.1.4.31.1.1.6.1", 'name': "IAmACounter64", 'forced_type': 'gauge'}, # Counter32NEWLINE]NEWLINEINVALID_FORCED_METRICS = [NEWLINE {'OID': "1.3.6.1.2.1.4.24.6.0", 'name': "IAmAGauge32", 'forced_type': 'counter'}, # Gauge32NEWLINE {'OID': "1.3.6.1.2.1.4.31.1.1.6.1", 'name': "IAmACounter64", 'forced_type': 'histogram'}, # Counter32NEWLINE]NEWLINENEWLINESCALAR_OBJECTS = [NEWLINE {'OID': "1.3.6.1.2.1.7.1.0", 'name': "udpDatagrams"},NEWLINE {'OID': "1.3.6.1.2.1.6.10.0", 'name': "tcpInSegs"},NEWLINE {'OID': ".1.3.6.1.6.3.10.2.1.3.0", 'name': "snmpEngineTime"}, # OID with leading dotNEWLINE {'MIB': "TCP-MIB", 'symbol': "tcpCurrEstab"},NEWLINE]NEWLINENEWLINESCALAR_OBJECTS_WITH_TAGS = [NEWLINE {'OID': "1.3.6.1.2.1.7.1.0", 'name': "udpDatagrams", 'metric_tags': ['udpdgrams', 'UDP']},NEWLINE {'OID': "1.3.6.1.2.1.6.10.0", 'name': "tcpInSegs", 'metric_tags': ['tcpinsegs', 'TCP']},NEWLINE {'MIB': "TCP-MIB", 'symbol': "tcpCurrEstab", 'metric_tags': ['MIB', 'TCP', 'estab']},NEWLINE]NEWLINENEWLINETABULAR_OBJECTS = [NEWLINE {NEWLINE 'MIB': "IF-MIB",NEWLINE 'table': "ifTable",NEWLINE 'symbols': ["ifInOctets", "ifOutOctets"],NEWLINE 'metric_tags': [{'tag': "interface", 'column': "ifDescr"}, {'tag': "dumbindex", 'index': 1}],NEWLINE }NEWLINE]NEWLINENEWLINEBULK_TABULAR_OBJECTS = [NEWLINE {NEWLINE 'MIB': "IF-MIB",NEWLINE 'table': "ifTable",NEWLINE 'symbols': [NEWLINE "ifInOctets",NEWLINE "ifOutOctets",NEWLINE "ifInUcastPkts",NEWLINE "ifInUcastPkts",NEWLINE "ifInNUcastPkts",NEWLINE "ifInDiscards",NEWLINE "ifInErrors",NEWLINE "ifInUnknownProtos",NEWLINE ],NEWLINE 'metric_tags': [{'tag': "interface", 'column': "ifDescr"}, {'tag': "dumbindex", 'index': 1}],NEWLINE },NEWLINE {NEWLINE 'MIB': "IP-MIB",NEWLINE 'table': "ipSystemStatsTable",NEWLINE 'symbols': [NEWLINE "ipSystemStatsInReceives",NEWLINE "ipSystemStatsHCInReceives",NEWLINE "ipSystemStatsInOctets",NEWLINE "ipSystemStatsHCInOctets",NEWLINE "ipSystemStatsInHdrErrors",NEWLINE "ipSystemStatsInNoRoutes",NEWLINE "ipSystemStatsInAddrErrors",NEWLINE "ipSystemStatsInUnknownProtos",NEWLINE "ipSystemStatsInTruncatedPkts",NEWLINE "ipSystemStatsInForwDatagrams",NEWLINE "ipSystemStatsHCInForwDatagrams",NEWLINE "ipSystemStatsReasmReqds",NEWLINE "ipSystemStatsReasmOKs",NEWLINE "ipSystemStatsReasmFails",NEWLINE "ipSystemStatsInDiscards",NEWLINE "ipSystemStatsInDelivers",NEWLINE "ipSystemStatsHCInDelivers",NEWLINE "ipSystemStatsOutRequests",NEWLINE "ipSystemStatsHCOutRequests",NEWLINE "ipSystemStatsOutNoRoutes",NEWLINE "ipSystemStatsOutForwDatagrams",NEWLINE "ipSystemStatsHCOutForwDatagrams",NEWLINE "ipSystemStatsOutDiscards",NEWLINE "ipSystemStatsOutFragReqds",NEWLINE "ipSystemStatsOutFragOKs",NEWLINE "ipSystemStatsOutFragFails",NEWLINE "ipSystemStatsOutFragCreates",NEWLINE "ipSystemStatsOutTransmits",NEWLINE "ipSystemStatsHCOutTransmits",NEWLINE "ipSystemStatsOutOctets",NEWLINE "ipSystemStatsHCOutOctets",NEWLINE "ipSystemStatsInMcastPkts",NEWLINE ],NEWLINE },NEWLINE]NEWLINENEWLINEINVALID_METRICS = [{'MIB': "IF-MIB", 'table': "noIdeaWhatIAmDoingHere", 'symbols': ["ImWrong", "MeToo"]}]NEWLINENEWLINEPLAY_WITH_GET_NEXT_METRICS = [NEWLINE {"OID": "1.3.6.1.2.1.4.31.3.1.3.2", "name": "needFallback"},NEWLINE {"OID": "1.3.6.1.2.1.4.31.3.1.3.2.1", "name": "noFallbackAndSameResult"},NEWLINE]NEWLINENEWLINERESOLVED_TABULAR_OBJECTS = [NEWLINE {NEWLINE "MIB": "IF-MIB",NEWLINE "table": "ifTable",NEWLINE "symbols": [NEWLINE {"name": "ifInOctets", "OID": "1.3.6.1.2.1.2.2.1.10"},NEWLINE {"name": "ifOutOctets", "OID": "1.3.6.1.2.1.2.2.1.16"},NEWLINE ],NEWLINE "metric_tags": [NEWLINE {"tag": "interface", "column": {"name": "ifDescr", "OID": "1.3.6.1.2.1.2.2.1.2"}},NEWLINE {"tag": "dumbindex", "index": 1, "mapping": {1: "one", 2: "two", 3: "three", 90: "other"}},NEWLINE ],NEWLINE }NEWLINE]NEWLINENEWLINENEWLINEdef generate_instance_config(metrics, template=None):NEWLINE template = template if template else SNMP_CONFNEWLINE instance_config = copy.copy(template)NEWLINE instance_config['metrics'] = metricsNEWLINE instance_config['name'] = HOSTNEWLINE return instance_configNEWLINENEWLINENEWLINEdef generate_container_instance_config(metrics):NEWLINE conf = copy.deepcopy(SNMP_CONF)NEWLINE conf['ip_address'] = get_container_ip(SNMP_CONTAINER_NAME)NEWLINE return generate_instance_config(metrics, template=conf)NEWLINENEWLINENEWLINEdef generate_v3_instance_config(metrics, name=None, user=None, auth=None, auth_key=None, priv=None, priv_key=None):NEWLINE instance_config = generate_instance_config(metrics, SNMP_V3_CONF)NEWLINENEWLINE if name:NEWLINE instance_config['name'] = nameNEWLINE if user:NEWLINE instance_config['user'] = userNEWLINE if auth:NEWLINE instance_config['authProtocol'] = authNEWLINE if auth_key:NEWLINE instance_config['authKey'] = auth_keyNEWLINE if priv:NEWLINE instance_config['privProtocol'] = privNEWLINE if priv_key:NEWLINE instance_config['privKey'] = priv_keyNEWLINENEWLINE return instance_configNEWLINENEWLINENEWLINEdef create_check(instance):NEWLINE return SnmpCheck('snmp', {}, [instance])NEWLINE |
#!/usr/bin/env python3NEWLINE# Copyright (c) Facebook, Inc. and its affiliates. All Rights ReservedNEWLINENEWLINEimport reNEWLINEimport stringNEWLINEfrom collections import CounterNEWLINEfrom typing import Dict, ListNEWLINENEWLINEfrom pytext.common.constants import RawExampleFieldName, StageNEWLINEfrom pytext.metric_reporters.channel import Channel, ConsoleChannel, FileChannelNEWLINEfrom pytext.metric_reporters.metric_reporter import MetricReporterNEWLINEfrom pytext.metrics.squad_metrics import SquadMetricsNEWLINENEWLINENEWLINEclass SquadFileChannel(FileChannel):NEWLINE def get_title(self, context_keys=()):NEWLINE return (NEWLINE "index",NEWLINE "ques",NEWLINE "doc",NEWLINE "predicted_answer",NEWLINE "true_answers",NEWLINE "predicted_start_pos",NEWLINE "predicted_end_pos",NEWLINE "true_start_pos",NEWLINE "true_end_pos",NEWLINE "start_pos_scores",NEWLINE "end_pos_scores",NEWLINE "predicted_has_answer",NEWLINE "true_has_answer",NEWLINE "has_answer_scores",NEWLINE )NEWLINENEWLINE def gen_content(self, metrics, loss, preds, targets, scores, contexts, *args):NEWLINE pred_answers, pred_start_pos, pred_end_pos, pred_has_answer = predsNEWLINE true_answers, true_start_pos, true_end_pos, true_has_answer = targetsNEWLINE start_pos_scores, end_pos_scores, has_answer_scores = scoresNEWLINE for i in range(len(pred_answers)):NEWLINE yield [NEWLINE contexts[RawExampleFieldName.ROW_INDEX][i],NEWLINE contexts[SquadMetricReporter.QUES_COLUMN][i],NEWLINE contexts[SquadMetricReporter.DOC_COLUMN][i],NEWLINE pred_answers[i],NEWLINE true_answers[i],NEWLINE pred_start_pos[i],NEWLINE pred_end_pos[i],NEWLINE true_start_pos[i],NEWLINE true_end_pos[i],NEWLINE start_pos_scores[i],NEWLINE end_pos_scores[i],NEWLINE pred_has_answer[i],NEWLINE true_has_answer[i],NEWLINE has_answer_scores[i],NEWLINE ]NEWLINENEWLINENEWLINEclass SquadMetricReporter(MetricReporter):NEWLINE QUES_COLUMN = "question"NEWLINE ANSWERS_COLUMN = "answers"NEWLINE DOC_COLUMN = "doc"NEWLINE ROW_INDEX = "row_index"NEWLINENEWLINE class Config(MetricReporter.Config):NEWLINE n_best_size: int = 5NEWLINE max_answer_length: int = 16NEWLINE ignore_impossible: bool = TrueNEWLINE false_label: str = "False"NEWLINENEWLINE @classmethodNEWLINE def from_config(cls, config, *args, tensorizers=None, **kwargs):NEWLINE return cls(NEWLINE channels=[NEWLINE ConsoleChannel(),NEWLINE SquadFileChannel((Stage.TEST,), config.output_path),NEWLINE ],NEWLINE n_best_size=config.n_best_size,NEWLINE max_answer_length=config.max_answer_length,NEWLINE ignore_impossible=config.ignore_impossible,NEWLINE has_answer_labels=tensorizers["has_answer"].vocab._vocab,NEWLINE tensorizer=tensorizers["squad_input"],NEWLINE false_label=config.false_label,NEWLINE )NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE channels: List[Channel],NEWLINE n_best_size: int,NEWLINE max_answer_length: int,NEWLINE ignore_impossible: bool,NEWLINE has_answer_labels: List[str],NEWLINE tensorizer=None,NEWLINE false_label=Config.false_label,NEWLINE ) -> None:NEWLINE super().__init__(channels)NEWLINE self.channels = channelsNEWLINE self.tensorizer = tensorizerNEWLINE self.ignore_impossible = ignore_impossibleNEWLINE self.has_answer_labels = has_answer_labelsNEWLINE self.false_label = false_labelNEWLINE self.false_idx = 1 if has_answer_labels[1] == false_label else 0NEWLINE self.true_idx = 1 - self.false_idxNEWLINENEWLINE def _reset(self):NEWLINE self.all_start_pos_preds: List = []NEWLINE self.all_start_pos_targets: List = []NEWLINE self.all_start_pos_scores: List = []NEWLINE self.all_end_pos_preds: List = []NEWLINE self.all_end_pos_targets: List = []NEWLINE self.all_end_pos_scores: List = []NEWLINE self.all_has_answer_targets: List = []NEWLINE self.all_has_answer_preds: List = []NEWLINE self.all_has_answer_scores: List = []NEWLINENEWLINE self.all_preds = (NEWLINE self.all_start_pos_preds,NEWLINE self.all_end_pos_preds,NEWLINE self.all_has_answer_preds,NEWLINE )NEWLINE self.all_targets = (NEWLINE self.all_start_pos_targets,NEWLINE self.all_end_pos_targets,NEWLINE self.all_has_answer_targets,NEWLINE )NEWLINE self.all_scores = (NEWLINE self.all_start_pos_scores,NEWLINE self.all_end_pos_scores,NEWLINE self.all_has_answer_scores,NEWLINE )NEWLINE self.all_context: Dict = {}NEWLINE self.all_loss: List = []NEWLINE self.all_pred_answers: List = []NEWLINE # self.all_true_answers: List = []NEWLINE self.batch_size: List = []NEWLINE self.n_batches = 0NEWLINENEWLINE def _add_decoded_answer_batch_stats(self, m_input, preds, **contexts):NEWLINE # For BERT, doc_tokens = concatenated tokens from question and document.NEWLINE doc_tokens = m_input[0]NEWLINE starts, ends, has_answers = predsNEWLINE pred_answers = [NEWLINE self._unnumberize(tokens[start : end + 1].tolist(), doc_str)NEWLINE for tokens, start, end, doc_str in zip(NEWLINE doc_tokens, starts, ends, contexts[self.DOC_COLUMN]NEWLINE )NEWLINE ]NEWLINE self.aggregate_data(self.all_pred_answers, pred_answers)NEWLINENEWLINE def add_batch_stats(NEWLINE self, n_batches, preds, targets, scores, loss, m_input, **contextsNEWLINE ): # contexts object is the dict returned by self.batch_context().NEWLINE super().add_batch_stats(NEWLINE n_batches, preds, targets, scores, loss, m_input, **contextsNEWLINE )NEWLINE self._add_decoded_answer_batch_stats(m_input, preds, **contexts)NEWLINENEWLINE def aggregate_preds(self, new_batch, context=None):NEWLINE self.aggregate_data(self.all_start_pos_preds, new_batch[0])NEWLINE self.aggregate_data(self.all_end_pos_preds, new_batch[1])NEWLINE self.aggregate_data(self.all_has_answer_preds, new_batch[2])NEWLINENEWLINE def aggregate_targets(self, new_batch, context=None):NEWLINE self.aggregate_data(self.all_start_pos_targets, new_batch[0])NEWLINE self.aggregate_data(self.all_end_pos_targets, new_batch[1])NEWLINE self.aggregate_data(self.all_has_answer_targets, new_batch[2])NEWLINENEWLINE def aggregate_scores(self, new_batch):NEWLINE self.aggregate_data(self.all_start_pos_scores, new_batch[0])NEWLINE self.aggregate_data(self.all_end_pos_scores, new_batch[1])NEWLINE self.aggregate_data(self.all_has_answer_scores, new_batch[2])NEWLINENEWLINE def batch_context(self, raw_batch, batch):NEWLINE context = super().batch_context(raw_batch, batch)NEWLINE context[self.ROW_INDEX] = [row[self.ROW_INDEX] for row in raw_batch]NEWLINE context[self.QUES_COLUMN] = [row[self.QUES_COLUMN] for row in raw_batch]NEWLINE context[self.ANSWERS_COLUMN] = [row[self.ANSWERS_COLUMN] for row in raw_batch]NEWLINE context[self.DOC_COLUMN] = [row[self.DOC_COLUMN] for row in raw_batch]NEWLINE return contextNEWLINENEWLINE def calculate_metric(self):NEWLINE exact_matches, count = self._compute_exact_matches(NEWLINE self.all_pred_answers,NEWLINE self.all_context[self.ANSWERS_COLUMN],NEWLINE self.all_has_answer_preds,NEWLINE self.all_has_answer_targets,NEWLINE )NEWLINE f1_score = self._compute_f1_score(NEWLINE self.all_pred_answers,NEWLINE self.all_context[self.ANSWERS_COLUMN],NEWLINE self.all_has_answer_preds,NEWLINE self.all_has_answer_targets,NEWLINE )NEWLINE self.all_preds = (NEWLINE self.all_pred_answers,NEWLINE self.all_start_pos_preds,NEWLINE self.all_end_pos_preds,NEWLINE self.all_has_answer_preds,NEWLINE )NEWLINE self.all_targets = (NEWLINE self.all_context[self.ANSWERS_COLUMN],NEWLINE self.all_start_pos_targets,NEWLINE self.all_end_pos_targets,NEWLINE self.all_has_answer_targets,NEWLINE )NEWLINE self.all_scores = (NEWLINE self.all_start_pos_scores,NEWLINE self.all_end_pos_scores,NEWLINE self.all_has_answer_scores,NEWLINE )NEWLINE metrics = SquadMetrics(NEWLINE exact_matches=100.0 * exact_matches / count,NEWLINE f1_score=f1_score,NEWLINE num_examples=count,NEWLINE )NEWLINE return metricsNEWLINENEWLINE def get_model_select_metric(self, metric: SquadMetrics):NEWLINE return metric.f1_scoreNEWLINENEWLINE def _compute_exact_matches(NEWLINE self,NEWLINE pred_answer_list,NEWLINE target_answers_list,NEWLINE pred_has_answer_list,NEWLINE target_has_answer_list,NEWLINE ):NEWLINE exact_matches = 0NEWLINE for pred_answer, target_answers, pred_has_answer, target_has_answer in zip(NEWLINE pred_answer_list,NEWLINE target_answers_list,NEWLINE pred_has_answer_list,NEWLINE target_has_answer_list,NEWLINE ):NEWLINE if not self.ignore_impossible:NEWLINE if pred_has_answer != target_has_answer:NEWLINE continueNEWLINE if pred_has_answer == self.false_idx:NEWLINE exact_matches += 1NEWLINE continueNEWLINE pred = self._normalize_answer(pred_answer)NEWLINE for answer in target_answers:NEWLINE true = self._normalize_answer(answer)NEWLINE if pred == true:NEWLINE exact_matches += 1NEWLINE breakNEWLINE return exact_matches, len(pred_answer_list)NEWLINENEWLINE def _compute_f1_score(NEWLINE self,NEWLINE pred_answer_list,NEWLINE target_answers_list,NEWLINE pred_has_answer_list,NEWLINE target_has_answer_list,NEWLINE ):NEWLINE f1_scores_sum = 0.0NEWLINE for pred_answer, target_answers, pred_has_answer, target_has_answer in zip(NEWLINE pred_answer_list,NEWLINE target_answers_list,NEWLINE pred_has_answer_list,NEWLINE target_has_answer_list,NEWLINE ):NEWLINE if not self.ignore_impossible:NEWLINE if pred_has_answer != target_has_answer:NEWLINE continueNEWLINE if pred_has_answer == self.false_idx:NEWLINE f1_scores_sum += 1.0NEWLINE continueNEWLINE f1_scores_sum += max(NEWLINE self._compute_f1_per_answer(answer, pred_answer)NEWLINE for answer in target_answersNEWLINE )NEWLINE return 100.0 * f1_scores_sum / len(pred_answer_list)NEWLINENEWLINE def _unnumberize(self, ans_tokens, doc_str):NEWLINE """NEWLINE Tokens is the span of token ids that the model predicted. We re-tokenizeNEWLINE and re-numberize the raw context (doc_str) here to get doc_tokens to getNEWLINE access to start_idx and end_idx mappings. At this point, ans_tokens isNEWLINE a sub-list of doc_tokens (hopefully, if the model predicted a span inNEWLINE the context). Then we find tokens inside doc_tokens, and return theNEWLINE corresponding span in the raw text using the idx mapping.NEWLINE """NEWLINE # start_idx and end_idx are lists of char start and end positions in doc_str.NEWLINE doc_tokens, start_idx, end_idx = self.tensorizer._lookup_tokens(doc_str)NEWLINE doc_tokens = list(doc_tokens)NEWLINE num_ans_tokens = len(ans_tokens)NEWLINE answer_str = ""NEWLINE for doc_token_idx in range(len(doc_tokens) - num_ans_tokens):NEWLINE if doc_tokens[doc_token_idx : doc_token_idx + num_ans_tokens] == ans_tokens:NEWLINE start_char_idx = start_idx[doc_token_idx]NEWLINE end_char_idx = end_idx[doc_token_idx + num_ans_tokens - 1]NEWLINE answer_str = doc_str[start_char_idx:end_char_idx]NEWLINE breakNEWLINE return answer_strNEWLINENEWLINE # The following three functions are copied from Squad's evaluation script.NEWLINE # https://worksheets.codalab.org/rest/bundles/0x6b567e1cf2e041ec80d7098f031c5c9e/contents/blob/NEWLINENEWLINE def _normalize_answer(self, s):NEWLINE """Lower text and remove punctuation, articles and extra whitespace."""NEWLINENEWLINE def white_space_fix(text):NEWLINE return " ".join(text.split())NEWLINENEWLINE def remove_articles(text):NEWLINE regex = re.compile(r"\b(a|an|the)\b", re.UNICODE)NEWLINE return re.sub(regex, " ", text)NEWLINENEWLINE def remove_punc(text):NEWLINE exclude = set(string.punctuation)NEWLINE return "".join(ch for ch in text if ch not in exclude)NEWLINENEWLINE def lower(text):NEWLINE return text.lower()NEWLINENEWLINE return white_space_fix(remove_articles(remove_punc(lower(s))))NEWLINENEWLINE def _get_tokens(self, s):NEWLINE if not s:NEWLINE return []NEWLINE return self._normalize_answer(s).split()NEWLINENEWLINE def _compute_f1_per_answer(self, a_gold, a_pred):NEWLINE gold_toks = self._get_tokens(a_gold)NEWLINE pred_toks = self._get_tokens(a_pred)NEWLINE common = Counter(gold_toks) & Counter(pred_toks)NEWLINE num_same = sum(common.values())NEWLINE if len(gold_toks) == 0 or len(pred_toks) == 0:NEWLINE # If either is no-answer, then F1 is 1 if they agree, 0 otherwiseNEWLINE return int(gold_toks == pred_toks)NEWLINE if num_same == 0:NEWLINE return 0NEWLINE precision = 1.0 * num_same / len(pred_toks)NEWLINE recall = 1.0 * num_same / len(gold_toks)NEWLINE f1 = (2 * precision * recall) / (precision + recall)NEWLINE return f1NEWLINE |
from django.core.management.base import BaseCommand, CommandErrorNEWLINEfrom ._offences import OffenceScraperNEWLINEfrom ....api import modelsNEWLINEimport pprintNEWLINENEWLINEclass Command(BaseCommand):NEWLINE help = 'Scrapes offences'NEWLINENEWLINE def add_arguments(self, parser):NEWLINE passNEWLINENEWLINE def handle(self, *args, **options):NEWLINE scraper = OffenceScraper()NEWLINE offences = scraper.get_offences(5)NEWLINE for offence in offences:NEWLINE model, _created = models.Offence.objects.update_or_create(offence_name=offence["name"], defaults = {"effective_from": offence["effective_date"]})NEWLINE self.stdout.write(self.style.SUCCESS(model))NEWLINE pprint.pprint(offence, indent=4)NEWLINENEWLINE self.stdout.write(self.style.SUCCESS('Command has run successfully'))NEWLINE |
import os, timeNEWLINEimport numpy as npNEWLINEimport pybullet as pNEWLINEimport gymNEWLINEfrom gym import error,spaces,utilsNEWLINEfrom gym.utils import seedingNEWLINENEWLINEfrom .util import UtilNEWLINEimport pybullet_dataNEWLINEimport mathNEWLINENEWLINEimport randomNEWLINENEWLINEclass Demo2Env(gym.Env):NEWLINE def __init__(self):NEWLINE self.directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'assets')NEWLINENEWLINE self.numJoints = p.getNumJoints(self.tiago)NEWLINE self.available_joints_indices = [i for i in range(numJoints) if p.getJointInfo(self.tiago, i)[2] != p.JOINT_FIXED]NEWLINE self.right_arm_indices = [46, 47, 48, 49, 50, 51, 52]NEWLINE self.lowerlimits = [-1.17, -1.17, -0.78, -0.39, -2.09, -1.41, -2.09]NEWLINE self.upperlimits = [1.57, 1.57, 3.92, 2.35, 2.09, 1.41, 2.09]NEWLINE self.torso_index = [21] # torso lift prismatic jointNEWLINE self.right_gripper_indices = [58, 59]NEWLINE self.right_tool_joint = 56NEWLINE self.iter = 0NEWLINE self.iteration = 0NEWLINE self.dist = 1e30NEWLINE self.step = 1 / 240NEWLINE self.reached0 = FalseNEWLINE self.reached1 = FalseNEWLINE self.reached2 = FalseNEWLINE self.robot_forces = 1.0NEWLINE self.robot_gains = 0.05NEWLINE self.distance_weight = 1.0NEWLINE self.action_weight = 0.01NEWLINE self.task_success_threshold = 0.03NEWLINE self.targetpos = np.array([2.2, 2.1, 1.5])NEWLINENEWLINE self.action_robot_len = len(self.available_joints_indices)NEWLINE self.action_human_len = 0NEWLINE self.action_space = spaces.Box(NEWLINE low=np.array([-1.0] * (self.action_robot_len + self.action_human_len), dtype=np.float32),NEWLINE high=np.array([1.0] * (self.action_robot_len + self.action_human_len), dtype=np.float32), dtype=np.float32)NEWLINE self.obs_robot_len = 18 + len(self.available_joints_indices)NEWLINE self.obs_human_len = 19NEWLINE self.observation_space = spaces.Box(low=np.array([-1000000000.0]*(self.obs_robot_len+self.obs_human_len), dtype=np.float32),NEWLINE high=np.array([1000000000.0]*(self.obs_robot_len+self.obs_human_len),NEWLINE dtype=np.float32), dtype=np.float32)NEWLINE self.action_space_robot = spaces.Box(low=np.array([-1.0]*self.action_robot_len, dtype=np.float32), high=np.array([1.0]*self.action_robot_len, dtype=np.float32), dtype=np.float32)NEWLINE self.action_space_human = spaces.Box(low=np.array([-1.0]*self.action_human_len, dtype=np.float32), high=np.array([1.0]*self.action_human_len, dtype=np.float32), dtype=np.float32)NEWLINE self.observation_space_robot = spaces.Box(low=np.array([-1000000000.0]*self.obs_robot_len, dtype=np.float32), high=np.array([1000000000.0]*self.obs_robot_len, dtype=np.float32), dtype=np.float32)NEWLINE self.observation_space_human = spaces.Box(low=np.array([-1000000000.0]*self.obs_human_len, dtype=np.float32), high=np.array([1000000000.0]*self.obs_human_len, dtype=np.float32), dtype=np.float32)NEWLINENEWLINE def step(self, action):NEWLINENEWLINE obs = self._get_obs()NEWLINENEWLINENEWLINE # Get human preferencesNEWLINE end_effector_velocity = np.linalg.norm(p.getLinkState(self.right_end_effector))NEWLINENEWLINE ee_top_center_pos = [0,0,0]NEWLINE reward_distance_mouth = -np.linalg.norm(self.targetpos - np.array(ee_top_center_pos)) # Penalize distances between top of cup and mouthNEWLINE reward_action = -np.linalg.norm(action) # Penalize actionsNEWLINENEWLINENEWLINENEWLINE reward = self.config('distance_weight')*reward_distance_mouth + self.config('action_weight')*reward_action + preferences_scoreNEWLINENEWLINENEWLINE info = {'task_success': int(reward_distance_mouth <= self.task_success_threshold), 'action_robot_len': self.action_robot_len,NEWLINE 'action_human_len': self.action_human_len,'obs_robot_len': self.obs_robot_len, 'obs_human_len': self.obs_human_len}NEWLINE done = self.iteration >= 200NEWLINENEWLINENEWLINE return obs, reward, done, infoNEWLINENEWLINE def _get_obs(self):NEWLINENEWLINE robot_joint_angles = p.getJointStates(self.tiago, self.available_joints_indices)NEWLINE # Fix joint angles to be in [-pi, pi]NEWLINE robot_joint_angles = (np.array(robot_joint_angles) + np.pi) % (2 * np.pi) - np.piNEWLINE # ee_tc_pos = np.array(p.getLinkState(self.robot, 54, computeForwardKinematics=True, physicsClientId=self.id)[0])NEWLINENEWLINENEWLINE robot_obs = np.concatenate(NEWLINE [ - self.targetpos, robot_joint_angles ]).ravel()NEWLINENEWLINE return robot_obsNEWLINENEWLINE def seed(self, seed=None):NEWLINE self.np_random, seed = seeding.np_random(seed)NEWLINE return [seed]NEWLINENEWLINE def set_seed(self, seed=1000):NEWLINE self.np_random.seed(seed)NEWLINENEWLINE def set_frictions(self, links, lateral_friction=None, spinning_friction=None, rolling_friction=None):NEWLINE if type(links) == int:NEWLINE links = [links]NEWLINE for link in links:NEWLINE if lateral_friction is not None:NEWLINE p.changeDynamics(self.body, link, lateralFriction=lateral_friction, physicsClientId=self.id)NEWLINE if spinning_friction is not None:NEWLINE p.changeDynamics(self.body, link, spinningFriction=spinning_friction, physicsClientId=self.id)NEWLINE if rolling_friction is not None:NEWLINE p.changeDynamics(self.body, link, rollingFriction=rolling_friction, physicsClientId=self.id)NEWLINENEWLINE def build_assistive_env(self):NEWLINE # Build plane, furniture, robot, human, etc. (just like world creation)NEWLINE # Load the ground planeNEWLINE plane = p.loadURDF(os.path.join(self.directory, 'plane', 'plane.urdf'), physicsClientId=self.id)NEWLINENEWLINE # Randomly set friction of the groundNEWLINE self.plane.set_frictions(self.plane, lateral_friction=self.np_random.uniform(0.025, 0.5),NEWLINE spinning_friction=0, rolling_friction=0)NEWLINE # Disable rendering during creationNEWLINE p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 0, physicsClientId=self.id)NEWLINE # Create robotNEWLINE self.tiago = p.loadURDF(os.path.join(self.directory, 'tiago_dualhand', 'tiago_dual_modified.urdf'),NEWLINE useFixedBase=True, basePosition=[-10, -10, 0])NEWLINE def reset(self):NEWLINE p.resetSimulation(physicsClientId=self.id)NEWLINENEWLINE if not self.gui:NEWLINE # Reconnect the physics engine to forcefully clear memory when running long training scriptsNEWLINE self.disconnect()NEWLINE self.id = p.connect(p.DIRECT)NEWLINE self.util = Util(self.id, self.np_random)NEWLINE if self.gpu:NEWLINE self.util.enable_gpu()NEWLINE # Configure camera positionNEWLINE p.resetDebugVisualizerCamera(cameraDistance=1.75, cameraYaw=-25, cameraPitch=-45,NEWLINE cameraTargetPosition=[-0.2, 0, 0.4],NEWLINE physicsClientId=self.id)NEWLINE p.configureDebugVisualizer(p.COV_ENABLE_MOUSE_PICKING, 0, physicsClientId=self.id)NEWLINE p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0, physicsClientId=self.id)NEWLINE p.setTimeStep(1/240, physicsClientId=self.id)NEWLINE # Disable real time simulation so that the simulation only advances when we call stepSimulationNEWLINE p.setRealTimeSimulation(0, physicsClientId=self.id)NEWLINE p.setGravity(0, 0, -9.81, physicsClientId=self.id)NEWLINENEWLINE self.last_sim_time = NoneNEWLINE self.iteration = 0NEWLINE self.forces = []NEWLINE self.task_success = 0NEWLINE self.build_assistive_env()NEWLINENEWLINE # Update robot motor gainsNEWLINENEWLINENEWLINE self.generate_target()NEWLINENEWLINE p.resetDebugVisualizerCamera(cameraDistance=1.10, cameraYaw=55, cameraPitch=-45,NEWLINE cameraTargetPosition=[-0.2, 0, 0.75], physicsClientId=self.id)NEWLINENEWLINENEWLINE target_ee_pos = np.array([-0.2, -0.5, 1.1]) + self.np_random.uniform(-0.05, 0.05, size=3)NEWLINE target_ee_orient = self.get_quaternion(self.robot.toc_ee_orient_rpy[self.task])NEWLINE # self.init_robot_poseNEWLINENEWLINE # Open gripper to hold the toolNEWLINE self.robot.set_gripper_open_position(self.robot.right_gripper_indices, self.robot.gripper_pos[self.task],NEWLINE set_instantly=True)NEWLINENEWLINENEWLINENEWLINE p.setPhysicsEngineParameter(numSubSteps=4, numSolverIterations=10, physicsClientId=self.id)NEWLINENEWLINENEWLINE # Enable renderingNEWLINE p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 1, physicsClientId=self.id)NEWLINENEWLINENEWLINE for _ in range(50):NEWLINE p.stepSimulation(physicsClientId=self.id)NEWLINENEWLINE self.init_env_variables()NEWLINE return self._get_obs()NEWLINENEWLINE def init_env_variables(self, reset=False):NEWLINE if len(self.action_space.low) <= 1 or reset:NEWLINE obs_len = len(self._get_obs())NEWLINE self.observation_space.__init__(low=-np.ones(obs_len, dtype=np.float32) * 1000000000,NEWLINE high=np.ones(obs_len, dtype=np.float32) * 1000000000, dtype=np.float32)NEWLINENEWLINE # Define action/obs lengthsNEWLINE self.action_robot_len = len(self.available_joints_indices)NEWLINE self.action_human_len = 0NEWLINE self.obs_robot_len = len(self._get_obs('robot'))NEWLINE self.obs_human_len = 19NEWLINE self.action_space_robot = spaces.Box(low=np.array([-1.0] * self.action_robot_len, dtype=np.float32),NEWLINE high=np.array([1.0] * self.action_robot_len, dtype=np.float32),NEWLINE dtype=np.float32)NEWLINE self.action_space_human = spaces.Box(low=np.array([-1.0] * self.action_human_len, dtype=np.float32),NEWLINE high=np.array([1.0] * self.action_human_len, dtype=np.float32),NEWLINE dtype=np.float32)NEWLINE self.observation_space_robot = spaces.Box(NEWLINE low=np.array([-1000000000.0] * self.obs_robot_len, dtype=np.float32),NEWLINE high=np.array([1000000000.0] * self.obs_robot_len, dtype=np.float32), dtype=np.float32)NEWLINE self.observation_space_human = spaces.Box(NEWLINE low=np.array([-1000000000.0] * self.obs_human_len, dtype=np.float32),NEWLINE high=np.array([1000000000.0] * self.obs_human_len, dtype=np.float32), dtype=np.float32)NEWLINENEWLINE def generate_target(self):NEWLINE # Set targetNEWLINE self.sphere = self.create_sphere(radius=0.01, mass=0.0, pos=self.targetpos, collision=False, rgba=[0, 1, 0, 1])NEWLINE self.update_targets()NEWLINENEWLINE def update_targets(self):NEWLINE # update_targets() is automatically called at each time step for updating any targets in the environment.NEWLINE p.resetBasePositionAndOrientation(self.sphere, self.targetpos, [0, 0, 0, 1])NEWLINENEWLINE def create_sphere(self, radius=0.01, mass=0.0, pos=[0, 0, 0], visual=True, collision=True, rgba=[0, 1, 1, 1], maximal_coordinates=False, return_collision_visual=False):NEWLINE sphere_collision = p.createCollisionShape(shapeType=p.GEOM_SPHERE, radius=radius, physicsClientId=self.id) if collision else -1NEWLINE sphere_visual = p.createVisualShape(shapeType=p.GEOM_SPHERE, radius=radius, rgbaColor=rgba, physicsClientId=self.id) if visual else -1NEWLINE if return_collision_visual:NEWLINE return sphere_collision, sphere_visualNEWLINE sphere = p.createMultiBody(baseMass=mass, baseCollisionShapeIndex=sphere_collision, baseVisualShapeIndex=sphere_visual, basePosition=pos, useMaximalCoordinates=maximal_coordinates, physicsClientId=self.id)NEWLINE return sphereNEWLINE # def take_step(self, actions, gains=None, forces=None, action_multiplier=0.05, step_sim=True):NEWLINE # if gains is None:NEWLINE # gains = [a.motor_gains for a in self.agents]NEWLINE # elif type(gains) not in (list, tuple):NEWLINE # gains = [gains]*len(self.agents)NEWLINE # if forces is None:NEWLINE # forces = [a.motor_forces for a in self.agents]NEWLINE # elif type(forces) not in (list, tuple):NEWLINE # forces = [forces]*len(self.agents)NEWLINE # if self.last_sim_time is None:NEWLINE # self.last_sim_time = time.time()NEWLINE # self.iteration += 1NEWLINE # self.forces = []NEWLINE # actions = np.clip(actions, a_min=self.action_space.low, a_max=self.action_space.high)NEWLINE # actions *= action_multiplierNEWLINE # action_index = 0NEWLINE # for i, agent in enumerate(self.agents):NEWLINE # needs_action = not isinstance(agent, Human) or agent.controllableNEWLINE # if needs_action:NEWLINE # agent_action_len = len(agent.controllable_joint_indices)NEWLINE # action = np.copy(actions[action_index:action_index+agent_action_len])NEWLINE # action_index += agent_action_lenNEWLINE # if isinstance(agent, Robot):NEWLINE # action *= agent.action_multiplierNEWLINE # if len(action) != agent_action_len:NEWLINE # print('Received agent actions of length %d does not match expected action length of %d' % (len(action), agent_action_len))NEWLINE # exit()NEWLINE # # Append the new action to the current measured joint anglesNEWLINE # agent_joint_angles = agent.get_joint_angles(agent.controllable_joint_indices)NEWLINE # # Update the target robot/human joint angles based on the proposed action and joint limitsNEWLINE # for _ in range(self.frame_skip):NEWLINE # if needs_action:NEWLINE # below_lower_limits = agent_joint_angles + action < agent.controllable_joint_lower_limitsNEWLINE # above_upper_limits = agent_joint_angles + action > agent.controllable_joint_upper_limitsNEWLINE # action[below_lower_limits] = 0NEWLINE # action[above_upper_limits] = 0NEWLINE # agent_joint_angles[below_lower_limits] = agent.controllable_joint_lower_limits[below_lower_limits]NEWLINE # agent_joint_angles[above_upper_limits] = agent.controllable_joint_upper_limits[above_upper_limits]NEWLINE # if isinstance(agent, Human) and agent.impairment == 'tremor':NEWLINE # if needs_action:NEWLINE # agent.target_joint_angles += actionNEWLINE # agent_joint_angles = agent.target_joint_angles + agent.tremors * (1 if self.iteration % 2 == 0 else -1)NEWLINE # else:NEWLINE # agent_joint_angles += actionNEWLINE # if isinstance(agent, Robot) and agent.action_duplication is not None:NEWLINE # agent_joint_angles = np.concatenate([[a]*d for a, d in zip(agent_joint_angles, self.robot.action_duplication)])NEWLINE # agent.control(agent.all_controllable_joints, agent_joint_angles, agent.gains, agent.forces)NEWLINE # else:NEWLINE # agent.control(agent.controllable_joint_indices, agent_joint_angles, gains[i], forces[i])NEWLINE # if step_sim:NEWLINE # # Update all agent positionsNEWLINE # for _ in range(self.frame_skip):NEWLINE # p.stepSimulation(physicsClientId=self.id)NEWLINE # for agent in self.agents:NEWLINE # if isinstance(agent, Human):NEWLINE # agent.enforce_joint_limits()NEWLINE # if agent.controllable:NEWLINE # agent.enforce_realistic_joint_limits()NEWLINE # self.update_targets()NEWLINE # if self.gui:NEWLINE # # Slow down time so that the simulation matches real timeNEWLINE # self.slow_time() |
"""Gets the next song in the playlistNEWLINENEWLINERun with --help for more info.NEWLINE"""NEWLINENEWLINEfrom nextsong.cli import nextsongNEWLINENEWLINEnextsong()NEWLINE |
'''NEWLINECreated on Apr 06, 2012NEWLINENEWLINE@author: Michael Kraus ([email protected])NEWLINE'''NEWLINENEWLINEimport argparseNEWLINEimport matplotlibNEWLINENEWLINEfrom vorticity.diagnostics import DiagnosticsNEWLINENEWLINENEWLINEclass replay(object):NEWLINE '''NEWLINE NEWLINE '''NEWLINENEWLINE def __init__(self, hdf5_file, nPlot=1, nMax=0, output=False, contours=False):NEWLINE '''NEWLINE ConstructorNEWLINE '''NEWLINE NEWLINE self.diagnostics = Diagnostics(hdf5_file)NEWLINE NEWLINE if nMax > 0 and nMax < self.diagnostics.nt:NEWLINE self.nMax = nMaxNEWLINE else:NEWLINE self.nMax = self.diagnostics.ntNEWLINE NEWLINE self.nPlot = nPlotNEWLINE self.plot = PlotVorticity2D(self.diagnostics, output=output)NEWLINE NEWLINE NEWLINE def run(self):NEWLINE# for iTime in range(1, self.nMax+1):NEWLINE for iTime in [5,10,20,30,60]:NEWLINE if iTime == 0 or iTime % self.nPlot == 0 or iTime == self.nMax:NEWLINE print(iTime)NEWLINE self.diagnostics.read_from_hdf5(iTime)NEWLINE self.diagnostics.update_invariants(iTime)NEWLINE self.plot.update(iTime, final=(iTime == self.nMax))NEWLINE NEWLINE NEWLINENEWLINEif __name__ == '__main__':NEWLINE parser = argparse.ArgumentParser(description='Vorticity Equation Solver in 2D')NEWLINE NEWLINE parser.add_argument('hdf5_file', metavar='<run.hdf5>', type=str,NEWLINE help='Run HDF5 File')NEWLINE parser.add_argument('-np', metavar='i', type=int, default=1,NEWLINE help='plot every i\'th frame')NEWLINE parser.add_argument('-nt', metavar='i', type=int, default=0,NEWLINE help='plot up to i\'th frame')NEWLINE parser.add_argument('-o', action='store_true', required=False,NEWLINE help='save plots to file')NEWLINE parser.add_argument('-c', action='store_true', required=False,NEWLINE help='plot contours of streaming function in vorticity')NEWLINE NEWLINE args = parser.parse_args()NEWLINE NEWLINE printNEWLINE print("Replay run with " + args.hdf5_file)NEWLINE printNEWLINE NEWLINE if args.o == True:NEWLINE matplotlib.use('AGG')NEWLINE from vorticity.plot.plot_contours import PlotVorticity2DNEWLINE pyvp = replay(args.hdf5_file, args.np, args.nt, output=True, contours=args.c)NEWLINE pyvp.run()NEWLINE else:NEWLINE from vorticity.plot.plot_contours import PlotVorticity2DNEWLINE pyvp = replay(args.hdf5_file, args.np, args.nt, output=False, contours=args.c)NEWLINE NEWLINE printNEWLINE input('Hit any key to start replay.')NEWLINE printNEWLINE NEWLINE pyvp.run()NEWLINE NEWLINE printNEWLINE print("Replay finished.")NEWLINE printNEWLINE NEWLINE |
class NoSuchRequestMethod(Exception):NEWLINE def __init__(self,method_name : str):NEWLINE super().__init__(self)NEWLINE self.method_name = method_nameNEWLINENEWLINE def __str__(self):NEWLINE return '无效请求方法"%s",合理的请求方法为: "get","post","delete","put","options","patch","head".' % self.method_nameNEWLINENEWLINEclass NotATask(Exception):NEWLINE def __init__(self):NEWLINE super().__init__(self)NEWLINE def __str__(self):NEWLINE return '所添加任务必须是Class: Task 实例'NEWLINENEWLINEclass NotAList(Exception):NEWLINE def __init__(self):NEWLINE super().__init__(self)NEWLINE def __str__(self):NEWLINE return '多任务(MultiTask)实例 results属性必须为列表类型'NEWLINENEWLINEclass NotATaskQueue(Exception):NEWLINE def __init__(self):NEWLINE super().__init__(self)NEWLINE def __str__(self):NEWLINE return '错误的任务队列'NEWLINENEWLINEclass NoTaskQueue(Exception):NEWLINE def __init__(self):NEWLINE super().__init__(self)NEWLINE def __str__(self):NEWLINE return '任务执行器尚未添加任务队列,请尝试TaskOperator().add_queue()'NEWLINENEWLINEclass BinanceAPINotSetted(Exception):NEWLINE def __init__(self):NEWLINE super().__init__(self)NEWLINE def __str__(self):NEWLINE return '尚未设置使用的币安API,请尝试: Task.Set_Binance_API'NEWLINE |
MILLION = 1000000.0NEWLINEEXP18 = 10 ** 18NEWLINEEXCHANGE_OSMOSIS = "osmosis_blockchain"NEWLINECUR_OSMO = "OSMO"NEWLINECUR_CRO = "CRO"NEWLINENEWLINEMSG_TYPE_ACKNOWLEDGMENT = "MsgAcknowledgement"NEWLINEMSG_TYPE_BEGIN_UNLOCKING = "MsgBeginUnlocking"NEWLINEMSG_TYPE_DELEGATE = "MsgDelegate"NEWLINEMSG_TYPE_DEPOSIT = "MsgDeposit"NEWLINEMSG_TYPE_EXIT_POOL = "MsgExitPool"NEWLINEMSG_TYPE_IBC_TRANSFER = "MsgTransfer"NEWLINEMSG_TYPE_JOIN_POOL = "MsgJoinPool"NEWLINEMSG_TYPE_JOIN_SWAP_EXTERN_AMOUNT_IN = "MsgJoinSwapExternAmountIn"NEWLINEMSG_TYPE_LOCK_TOKENS = "MsgLockTokens"NEWLINEMSG_TYPE_MSGRECVPACKET = "MsgRecvPacket"NEWLINEMSG_TYPE_REDELEGATE = "MsgBeginRedelegate"NEWLINEMSG_TYPE_SEND = "MsgSend"NEWLINEMSG_TYPE_SET_WITHDRAW_ADDRESS = "MsgSetWithdrawAddress"NEWLINEMSG_TYPE_SUBMIT_PROPOSAL = "MsgSubmitProposal"NEWLINEMSG_TYPE_SWAP_IN = "MsgSwapExactAmountIn"NEWLINEMSG_TYPE_UNDELEGATE = "MsgUndelegate"NEWLINEMSG_TYPE_UPDATE_CLIENT = "MsgUpdateClient"NEWLINEMSG_TYPE_VOTE = "MsgVote"NEWLINEMSG_TYPE_WITHDRAW_COMMISSION = "MsgWithdrawValidatorCommission"NEWLINEMSG_TYPE_WITHDRAW_REWARD = "MsgWithdrawDelegatorReward"NEWLINEMSG_TYPE_TIMEOUT = "MsgTimeout"NEWLINE |
#!/usr/bin/python3NEWLINENEWLINE"""Project setup file for the f8a jobs."""NEWLINENEWLINEimport osNEWLINEfrom setuptools import setup, find_packagesNEWLINENEWLINENEWLINEdef get_requirements():NEWLINE """Parse all packages mentioned in the 'requirements.txt' file."""NEWLINE requirements_txt = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'requirements.txt')NEWLINE with open(requirements_txt) as fd:NEWLINE return fd.read().splitlines()NEWLINENEWLINENEWLINEsetup(NEWLINE name='fabric8_analytics_jobs',NEWLINE version='0.1',NEWLINE packages=find_packages(),NEWLINE package_data={NEWLINE 'f8a_jobs': [NEWLINE 'swagger.yaml',NEWLINE os.path.join('default_jobs', '*.yaml'),NEWLINE os.path.join('default_jobs', '*.yml')NEWLINE ]NEWLINE },NEWLINE scripts=['f8a-jobs.py'],NEWLINE install_requires=get_requirements(),NEWLINE include_package_data=True,NEWLINE author='Fridolin Pokorny',NEWLINE author_email='[email protected]',NEWLINE maintainer='Fridolin Pokorny',NEWLINE maintainer_email='[email protected]',NEWLINE description='fabric8-analytics Core job service',NEWLINE license='ASL 2.0',NEWLINE keywords='fabric8 analytics jobs',NEWLINE url='https://github.com/fabric8-analytics/jobs',NEWLINE classifiers=[NEWLINE "Programming Language :: Python :: 3",NEWLINE "Programming Language :: Python :: 3.4",NEWLINE "Programming Language :: Python :: 3.5",NEWLINE "Intended Audience :: Developers",NEWLINE ]NEWLINE)NEWLINE |
import unittestNEWLINENEWLINEfrom wpilib_ws import WPILibWsServerNEWLINENEWLINENEWLINEclass TestData(unittest.TestCase):NEWLINE def setUp(self):NEWLINE self.server = WPILibWsServer()NEWLINENEWLINE def test_verify_data(self):NEWLINE """NEWLINE As per the WPILib protocol, the server should ignore messages that:NEWLINE - Are not a dictNEWLINE - Have no 'type' key, 'device' key, or 'data' keyNEWLINE - Have a 'type' or 'device' key that is not a stringNEWLINE - Have a 'data' value that is not a dictNEWLINE - Have a 'type' value that the client or server does not recognizeNEWLINE """NEWLINENEWLINE cases = [NEWLINE ({}, False),NEWLINE ({"type": "SimDevice", "device": "bar", "data": {"foo": "bar"}}, True),NEWLINE ({"type": 1, "device": "bar", "data": {"foo": "bar"}}, False),NEWLINE ({"type": "SimDevice", "data": {"foo": "bar"}}, False),NEWLINE ({"type": "SimDevice", "device": "bar", "data": 1}, False),NEWLINE ({"type": "NotASimDevice", "device": "bar", "data": {"foo": "bar"}}, False),NEWLINE ("not an object", False),NEWLINE ]NEWLINENEWLINE for case, expected in cases:NEWLINE self.assertEqual(NEWLINE self.server.verify_data(case), expected, f"Failed on case: {case}"NEWLINE )NEWLINE |
#!/usr/bin/env python2NEWLINE#NEWLINE# Distributed under the MIT/X11 software license, see the accompanyingNEWLINE# file COPYING or http://www.opensource.org/licenses/mit-license.php.NEWLINE#NEWLINENEWLINEfrom test_framework.mininode import *NEWLINEfrom test_framework.test_framework import BitcoinTestFrameworkNEWLINEfrom test_framework.util import *NEWLINEimport timeNEWLINENEWLINE'''NEWLINETest behavior of -maxuploadtarget.NEWLINENEWLINE* Verify that getdata requests for old blocks (>1week) are droppedNEWLINEif uploadtarget has been reached.NEWLINE* Verify that getdata requests for recent blocks are respecteved evenNEWLINEif uploadtarget has been reached.NEWLINE* Verify that the upload counters are reset after 24 hours.NEWLINE'''NEWLINENEWLINE# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sendingNEWLINE# p2p messages to a node, generating the messages in the main testing logic.NEWLINEclass TestNode(NodeConnCB):NEWLINE def __init__(self):NEWLINE NodeConnCB.__init__(self)NEWLINE self.connection = NoneNEWLINE self.ping_counter = 1NEWLINE self.last_pong = msg_pong()NEWLINE self.block_receive_map = {}NEWLINENEWLINE def add_connection(self, conn):NEWLINE self.connection = connNEWLINE self.peer_disconnected = FalseNEWLINENEWLINE def on_inv(self, conn, message):NEWLINE passNEWLINENEWLINE # Track the last getdata message we receive (used in the test)NEWLINE def on_getdata(self, conn, message):NEWLINE self.last_getdata = messageNEWLINENEWLINE def on_block(self, conn, message):NEWLINE message.block.calc_sha256()NEWLINE try:NEWLINE self.block_receive_map[message.block.sha256] += 1NEWLINE except KeyError as e:NEWLINE self.block_receive_map[message.block.sha256] = 1NEWLINENEWLINE # Spin until verack message is received from the node.NEWLINE # We use this to signal that our test can begin. ThisNEWLINE # is called from the testing thread, so it needs to acquireNEWLINE # the global lock.NEWLINE def wait_for_verack(self):NEWLINE def veracked():NEWLINE return self.verack_receivedNEWLINE return wait_until(veracked, timeout=10)NEWLINENEWLINE def wait_for_disconnect(self):NEWLINE def disconnected():NEWLINE return self.peer_disconnectedNEWLINE return wait_until(disconnected, timeout=10)NEWLINENEWLINE # Wrapper for the NodeConn's send_message functionNEWLINE def send_message(self, message):NEWLINE self.connection.send_message(message)NEWLINENEWLINE def on_pong(self, conn, message):NEWLINE self.last_pong = messageNEWLINENEWLINE def on_close(self, conn):NEWLINE self.peer_disconnected = TrueNEWLINENEWLINE # Sync up with the node after delivery of a blockNEWLINE def sync_with_ping(self, timeout=30):NEWLINE def received_pong():NEWLINE return (self.last_pong.nonce == self.ping_counter)NEWLINE self.connection.send_message(msg_ping(nonce=self.ping_counter))NEWLINE success = wait_until(received_pong, timeout)NEWLINE self.ping_counter += 1NEWLINE return successNEWLINENEWLINEclass MaxUploadTest(BitcoinTestFramework):NEWLINE def __init__(self):NEWLINE self.utxo = []NEWLINE self.txouts = gen_return_txouts()NEWLINE NEWLINE def add_options(self, parser):NEWLINE parser.add_option("--testbinary", dest="testbinary",NEWLINE default=os.getenv("PEWD", "brofistd"),NEWLINE help="brofistd binary to test")NEWLINENEWLINE def setup_chain(self):NEWLINE initialize_chain_clean(self.options.tmpdir, 2)NEWLINENEWLINE def setup_network(self):NEWLINE # Start a node with maxuploadtarget of 200 MB (/24h)NEWLINE self.nodes = []NEWLINE self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-maxuploadtarget=200", "-blockmaxsize=999000"]))NEWLINENEWLINE def mine_full_block(self, node, address):NEWLINE # Want to create a full blockNEWLINE # We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limitNEWLINE for j in xrange(14):NEWLINE if len(self.utxo) < 14:NEWLINE self.utxo = node.listunspent()NEWLINE inputs=[]NEWLINE outputs = {}NEWLINE t = self.utxo.pop()NEWLINE inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})NEWLINE remchange = t["amount"] - Decimal("0.001000")NEWLINE outputs[address]=remchangeNEWLINE # Create a basic transaction that will send change back to ourself after account for a feeNEWLINE # And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #NEWLINE # of txouts is stored and is the only thing we overwrite from the original transactionNEWLINE rawtx = node.createrawtransaction(inputs, outputs)NEWLINE newtx = rawtx[0:92]NEWLINE newtx = newtx + self.txoutsNEWLINE newtx = newtx + rawtx[94:]NEWLINE # Appears to be ever so slightly faster to sign with SIGHASH_NONENEWLINE signresult = node.signrawtransaction(newtx,None,None,"NONE")NEWLINE txid = node.sendrawtransaction(signresult["hex"], True)NEWLINE # Mine a full sized block which will be these transactions we just createdNEWLINE node.generate(1)NEWLINENEWLINE def run_test(self):NEWLINE # Before we connect anything, we first set the time on the nodeNEWLINE # to be in the past, otherwise things break because the CNodeNEWLINE # time counters can't be reset backward after initializationNEWLINE old_time = int(time.time() - 2*60*60*24*7)NEWLINE self.nodes[0].setmocktime(old_time)NEWLINENEWLINE # Generate some old blocksNEWLINE self.nodes[0].generate(130)NEWLINENEWLINE # test_nodes[0] will only request old blocksNEWLINE # test_nodes[1] will only request new blocksNEWLINE # test_nodes[2] will test resetting the countersNEWLINE test_nodes = []NEWLINE connections = []NEWLINENEWLINE for i in xrange(3):NEWLINE test_nodes.append(TestNode())NEWLINE connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))NEWLINE test_nodes[i].add_connection(connections[i])NEWLINENEWLINE NetworkThread().start() # Start up network handling in another threadNEWLINE [x.wait_for_verack() for x in test_nodes]NEWLINENEWLINE # Test logic begins hereNEWLINENEWLINE # Now mine a big blockNEWLINE self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())NEWLINENEWLINE # Store the hash; we'll request this laterNEWLINE big_old_block = self.nodes[0].getbestblockhash()NEWLINE old_block_size = self.nodes[0].getblock(big_old_block, True)['size']NEWLINE big_old_block = int(big_old_block, 16)NEWLINENEWLINE # Advance to two days agoNEWLINE self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)NEWLINENEWLINE # Mine one more block, so that the prior block looks oldNEWLINE self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())NEWLINENEWLINE # We'll be requesting this new block tooNEWLINE big_new_block = self.nodes[0].getbestblockhash()NEWLINE new_block_size = self.nodes[0].getblock(big_new_block)['size']NEWLINE big_new_block = int(big_new_block, 16)NEWLINENEWLINE # test_nodes[0] will test what happens if we just keep requesting theNEWLINE # the same big old block too many times (expect: disconnect)NEWLINENEWLINE getdata_request = msg_getdata()NEWLINE getdata_request.inv.append(CInv(2, big_old_block))NEWLINENEWLINE max_bytes_per_day = 200*1024*1024NEWLINE daily_buffer = 144 * MAX_BLOCK_SIZENEWLINE max_bytes_available = max_bytes_per_day - daily_bufferNEWLINE success_count = max_bytes_available // old_block_sizeNEWLINENEWLINE # 144MB will be reserved for relaying new blocks, so expect this toNEWLINE # succeed for ~70 tries.NEWLINE for i in xrange(success_count):NEWLINE test_nodes[0].send_message(getdata_request)NEWLINE test_nodes[0].sync_with_ping()NEWLINE assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)NEWLINENEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 3)NEWLINE # At most a couple more tries should succeed (depending on how long NEWLINE # the test has been running so far).NEWLINE for i in xrange(3):NEWLINE test_nodes[0].send_message(getdata_request)NEWLINE test_nodes[0].wait_for_disconnect()NEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 2)NEWLINE print "Peer 0 disconnected after downloading old block too many times"NEWLINENEWLINE # Requesting the current block on test_nodes[1] should succeed indefinitely,NEWLINE # even when over the max upload target.NEWLINE # We'll try 200 timesNEWLINE getdata_request.inv = [CInv(2, big_new_block)]NEWLINE for i in xrange(200):NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].sync_with_ping()NEWLINE assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)NEWLINENEWLINE print "Peer 1 able to repeatedly download new block"NEWLINENEWLINE # But if test_nodes[1] tries for an old block, it gets disconnected too.NEWLINE getdata_request.inv = [CInv(2, big_old_block)]NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].wait_for_disconnect()NEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 1)NEWLINENEWLINE print "Peer 1 disconnected after trying to download old block"NEWLINENEWLINE print "Advancing system time on node to clear counters..."NEWLINENEWLINE # If we advance the time by 24 hours, then the counters should reset,NEWLINE # and test_nodes[2] should be able to retrieve the old block.NEWLINE self.nodes[0].setmocktime(int(time.time()))NEWLINE test_nodes[2].sync_with_ping()NEWLINE test_nodes[2].send_message(getdata_request)NEWLINE test_nodes[2].sync_with_ping()NEWLINE assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)NEWLINENEWLINE print "Peer 2 able to download old block"NEWLINENEWLINE [c.disconnect_node() for c in connections]NEWLINENEWLINE #stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1NEWLINE print "Restarting nodes with -whitelist=127.0.0.1"NEWLINE stop_node(self.nodes[0], 0)NEWLINE self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])NEWLINENEWLINE #recreate/reconnect 3 test nodesNEWLINE test_nodes = []NEWLINE connections = []NEWLINENEWLINE for i in xrange(3):NEWLINE test_nodes.append(TestNode())NEWLINE connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))NEWLINE test_nodes[i].add_connection(connections[i])NEWLINENEWLINE NetworkThread().start() # Start up network handling in another threadNEWLINE [x.wait_for_verack() for x in test_nodes]NEWLINENEWLINE #retrieve 20 blocks which should be enough to break the 1MB limitNEWLINE getdata_request.inv = [CInv(2, big_new_block)]NEWLINE for i in xrange(20):NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].sync_with_ping()NEWLINE assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)NEWLINENEWLINE getdata_request.inv = [CInv(2, big_old_block)]NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].wait_for_disconnect()NEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 3) #node is still connected because of the whitelistNEWLINENEWLINE print "Peer 1 still connected after trying to download old block (whitelisted)"NEWLINENEWLINE [c.disconnect_node() for c in connections]NEWLINENEWLINEif __name__ == '__main__':NEWLINE MaxUploadTest().main()NEWLINE |
from asyncio import FutureNEWLINEfrom math import ceilNEWLINEfrom unittest.mock import MagicMockNEWLINEfrom typing import Sequence, Dict, Optional, TypeNEWLINENEWLINEimport pytestNEWLINEfrom _pytest.monkeypatch import MonkeyPatchNEWLINEfrom marshmallow_jsonapi import fieldsNEWLINEfrom starlette.applications import StarletteNEWLINEfrom starlette.requests import URLNEWLINEfrom starlette.responses import ResponseNEWLINEfrom starlette.testclient import TestClientNEWLINENEWLINEfrom starlette_jsonapi import metaNEWLINEfrom starlette_jsonapi.resource import BaseResourceNEWLINEfrom starlette_jsonapi.pagination import (NEWLINE BasePagination, BasePageNumberPagination,NEWLINE BaseCursorPagination, BaseOffsetPaginationNEWLINE)NEWLINEfrom starlette_jsonapi.schema import JSONAPISchemaNEWLINENEWLINENEWLINEdef test_process_query_params_called_on_init(monkeypatch: MonkeyPatch):NEWLINE paginator = BasePagination(request=MagicMock(), data=[])NEWLINE assert paginator.process_query_params() is NoneNEWLINENEWLINE process_query_params_mock = MagicMock()NEWLINE monkeypatch.setattr(BasePagination, 'process_query_params', process_query_params_mock)NEWLINE BasePagination(request=MagicMock(), data=[])NEWLINE assert process_query_params_mock.calledNEWLINENEWLINENEWLINEdef test_unimplemented_slice_throws_error():NEWLINE class TPagination(BasePagination):NEWLINE passNEWLINENEWLINE paginator = TPagination(request=MagicMock(), data=[])NEWLINE with pytest.raises(NotImplementedError):NEWLINE paginator.get_pagination()NEWLINENEWLINENEWLINEdef test_unimplemented_generate_pagination_links():NEWLINE class TPagination(BasePagination):NEWLINE def slice_data(self, params: dict = None) -> Sequence:NEWLINE return self.dataNEWLINENEWLINE paginator = TPagination(request=MagicMock(), data=[1, 2, 3])NEWLINE data, links = paginator.get_pagination()NEWLINE assert links == {}NEWLINENEWLINENEWLINEdef test_base_page_number_pagination_process_query_params():NEWLINE # test initialization on specified valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[number]': 1, 'page[size]': 1}NEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_number == 1NEWLINE assert paginator.page_size == 1NEWLINENEWLINE # test initialization for default valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {}NEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_number == paginator.default_page_numberNEWLINE assert paginator.page_size == paginator.default_page_sizeNEWLINENEWLINE # test negative page falls back to defaultNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[number]': -1}NEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_number == paginator.default_page_numberNEWLINENEWLINENEWLINEdef test_base_page_number_pagination_create_pagination_link():NEWLINE url = URL('http://testserver/test-resource')NEWLINE request = MagicMock()NEWLINE request.url = urlNEWLINENEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINE link = paginator.create_pagination_link(page_number=2, page_size=4)NEWLINE assert link == 'http://testserver/test-resource?page%5Bnumber%5D=2&page%5Bsize%5D=4'NEWLINENEWLINENEWLINEdef test_base_offset_pagination_process_query_params():NEWLINE # test initialization on specified valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[offset]': 1, 'page[size]': 1}NEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_offset == 1NEWLINE assert paginator.page_size == 1NEWLINENEWLINE # test initialization for default valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {}NEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_offset == paginator.default_page_offsetNEWLINE assert paginator.page_size == paginator.default_page_sizeNEWLINENEWLINE # test negative offset falls back to defaultNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[offset]': -1}NEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_offset == paginator.default_page_offsetNEWLINENEWLINENEWLINEdef test_base_offset_pagination_create_pagination_link():NEWLINE url = URL('http://testserver/test-resource')NEWLINE request = MagicMock()NEWLINE request.url = urlNEWLINENEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINE link = paginator.create_pagination_link(page_offset=35, page_size=4)NEWLINE assert link == 'http://testserver/test-resource?page%5Boffset%5D=35&page%5Bsize%5D=4'NEWLINENEWLINENEWLINEdef test_base_cursor_pagination_process_query_params():NEWLINE # test initialization on specified valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[after]': 2, 'page[before]': 4, 'page[size]': 1}NEWLINE paginator = BaseCursorPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_before == 4NEWLINE assert paginator.page_after == 2NEWLINE assert paginator.page_size == 1NEWLINENEWLINE # test initialization for default valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {}NEWLINE paginator = BaseCursorPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_before == paginator.default_page_beforeNEWLINE assert paginator.page_after == paginator.default_page_afterNEWLINE assert paginator.page_size == paginator.default_page_sizeNEWLINENEWLINENEWLINEdef test_base_cursor_pagination_create_pagination_link():NEWLINE url = URL('http://testserver/test-resource')NEWLINE request = MagicMock()NEWLINE request.url = urlNEWLINENEWLINE paginator = BaseCursorPagination(request=request, data=[])NEWLINE link = paginator.create_pagination_link(page_after=2, page_before=6, page_size=4)NEWLINE assert link == 'http://testserver/test-resource?page%5Bsize%5D=4&page%5Bafter%5D=2&page%5Bbefore%5D=6'[email protected]()NEWLINEdef pagination_app(app: Starlette):NEWLINE class TPagination(BasePageNumberPagination):NEWLINE default_page_size = 2NEWLINENEWLINE def process_query_params(self):NEWLINE super(TPagination, self).process_query_params()NEWLINENEWLINE def slice_data(self, params: dict = None) -> Sequence:NEWLINE data = self.data[(self.page_number - 1) * self.page_size: self.page_number * self.page_size]NEWLINE return dataNEWLINENEWLINE def generate_pagination_links(self, params: dict = None) -> Dict[str, Optional[str]]:NEWLINE links = dict(first=None, next=None, prev=None, last=None) # type: Dict[str, Optional[str]]NEWLINE page_count = ceil(len(self.data) / self.page_size)NEWLINENEWLINE # firstNEWLINE links['first'] = self.create_pagination_link(page_number=1, page_size=self.page_size)NEWLINENEWLINE # lastNEWLINE links['last'] = self.create_pagination_link(page_number=page_count, page_size=self.page_size)NEWLINENEWLINE # nextNEWLINE has_next = self.page_number < page_countNEWLINE if has_next:NEWLINE links['next'] = self.create_pagination_link(page_number=self.page_number + 1, page_size=self.page_size)NEWLINENEWLINE # previousNEWLINE has_prev = self.page_number > 1NEWLINE if has_prev:NEWLINE links['prev'] = self.create_pagination_link(page_number=self.page_number - 1, page_size=self.page_size)NEWLINENEWLINE return linksNEWLINENEWLINE class TSchema(JSONAPISchema):NEWLINE id = fields.Str(dump_only=True)NEWLINE name = fields.Str()NEWLINENEWLINE class Meta:NEWLINE type_ = 'test-resource'NEWLINENEWLINE class TResource(BaseResource):NEWLINE type_ = 'test-resource'NEWLINE schema = TSchemaNEWLINE pagination_class = TPaginationNEWLINENEWLINE async def get_many(self, *args, **kwargs) -> Response:NEWLINE data = [NEWLINE dict(id=1, name='foo'),NEWLINE dict(id=2, name='foo'),NEWLINE dict(id=3, name='foo'),NEWLINE dict(id=4, name='foo')NEWLINE ]NEWLINE return await self.to_response(await self.serialize(data, many=True, paginate=True))NEWLINENEWLINE async def get(self, id=None, *args, **kwargs) -> Response:NEWLINE return await self.to_response(await self.serialize(dict(id=id, name='foo')))NEWLINENEWLINE async def post(self, *args, **kwargs) -> Response:NEWLINE return await self.to_response(await self.serialize(dict(id=id, name='foo')))NEWLINENEWLINE async def patch(self, id=None, *args, **kwargs) -> Response:NEWLINE return await self.to_response(await self.serialize(dict(id=id, name='foo')))NEWLINENEWLINE async def delete(self, id=None, *args, **kwargs) -> Response:NEWLINE return await self.to_response({})NEWLINENEWLINE TResource.register_routes(app, '/')NEWLINE return appNEWLINENEWLINENEWLINEdef test_get_many_calls_pagination(pagination_app: Starlette, monkeypatch: MonkeyPatch):NEWLINE test_client = TestClient(app=pagination_app)NEWLINE paginate_request_mock = MagicMock(return_value=Future())NEWLINENEWLINE object_list = [dict(id=1, name='foo')]NEWLINE links = {'first': 'first', 'next': 'next'}NEWLINE paginate_request_mock.return_value.set_result((object_list, links))NEWLINENEWLINE monkeypatch.setattr(BaseResource, 'paginate_request', paginate_request_mock)NEWLINE rv = test_client.get('/test-resource/')NEWLINE assert paginate_request_mock.called_with(object_list)NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '1',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'first',NEWLINE 'next': 'next'NEWLINE }NEWLINE }NEWLINENEWLINENEWLINEdef test_get_many_without_pagination_class(pagination_app: Starlette):NEWLINE resource = meta.registered_resources['TResource'] # type: Type[BaseResource]NEWLINE resource.pagination_class = NoneNEWLINE test_client = TestClient(app=pagination_app)NEWLINENEWLINE with pytest.raises(Exception) as exc:NEWLINE test_client.get('/test-resource/')NEWLINE assert str(exc.value) == 'Pagination class must be defined to use pagination'NEWLINENEWLINENEWLINEdef test_incorrect_request_type(pagination_app: Starlette, monkeypatch: MonkeyPatch):NEWLINE test_client = TestClient(app=pagination_app)NEWLINE paginate_request_mock = MagicMock(return_value=Future())NEWLINE paginate_request_mock.return_value.set_result(([], {}))NEWLINENEWLINE monkeypatch.setattr(BaseResource, 'paginate_request', paginate_request_mock)NEWLINE rv = test_client.get('/test-resource/1')NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINE rv = test_client.post('/test-resource/', {})NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINE rv = test_client.patch('/test-resource/1', {})NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINE rv = test_client.delete('/test-resource/1', )NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINENEWLINEdef test_specified_params(pagination_app: Starlette):NEWLINE test_client = TestClient(app=pagination_app)NEWLINENEWLINE # only size paramNEWLINE rv = test_client.get('/test-resource/?page[size]=1')NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '1',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'http://testserver/test-resource/?page%5Bnumber%5D=1&page%5Bsize%5D=1',NEWLINE 'next': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=1',NEWLINE 'prev': None,NEWLINE 'last': 'http://testserver/test-resource/?page%5Bnumber%5D=4&page%5Bsize%5D=1',NEWLINE }NEWLINE }NEWLINENEWLINE # page and size paramNEWLINE rv = test_client.get('/test-resource/?page[number]=3&page[size]=1')NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '3',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'http://testserver/test-resource/?page%5Bnumber%5D=1&page%5Bsize%5D=1',NEWLINE 'next': 'http://testserver/test-resource/?page%5Bnumber%5D=4&page%5Bsize%5D=1',NEWLINE 'prev': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=1',NEWLINE 'last': 'http://testserver/test-resource/?page%5Bnumber%5D=4&page%5Bsize%5D=1',NEWLINE }NEWLINE }NEWLINENEWLINENEWLINEdef test_default_value_enforcement(pagination_app: Starlette):NEWLINE test_client = TestClient(app=pagination_app)NEWLINENEWLINE rv = test_client.get('/test-resource/')NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '1',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE {NEWLINE 'id': '2',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE }NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'http://testserver/test-resource/?page%5Bnumber%5D=1&page%5Bsize%5D=2',NEWLINE 'next': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=2',NEWLINE 'prev': None,NEWLINE 'last': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=2',NEWLINE }NEWLINE }NEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""NEWLINECreated on Sat Jan 16 13:04:06 2016NEWLINENEWLINE@author: lilianNEWLINE"""NEWLINENEWLINE#from onvif import ONVIFCameraNEWLINEfrom cameraUtils import IPCameraNEWLINEimport urllibNEWLINEimport loggingNEWLINEimport timeNEWLINENEWLINEclass PTZCamera(IPCamera):NEWLINENEWLINE def __init__(self, host, port ,user, passwd):NEWLINE IPCamera.__init__(self, host, port, user, passwd)NEWLINE NEWLINE self.ptzService = self.create_ptz_service()NEWLINE self.profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE self.initializePanTiltBoundaries()NEWLINE NEWLINE def initializePanTiltBoundaries(self):NEWLINE # Get PTZ configuration options for getting continuous move rangeNEWLINE request = self.ptzService.create_type('GetConfigurationOptions')NEWLINE request.ConfigurationToken = self.profile.PTZConfiguration._tokenNEWLINE ptz_configuration_options = self.ptzService.GetConfigurationOptions(request) NEWLINE NEWLINE self.XMAX = ptz_configuration_options.Spaces.ContinuousPanTiltVelocitySpace[0].XRange.MaxNEWLINE self.XMIN = ptz_configuration_options.Spaces.ContinuousPanTiltVelocitySpace[0].XRange.MinNEWLINE self.YMAX = ptz_configuration_options.Spaces.ContinuousPanTiltVelocitySpace[0].YRange.MaxNEWLINE self.YMIN = ptz_configuration_options.Spaces.ContinuousPanTiltVelocitySpace[0].YRange.MinNEWLINE self.ZMAX = ptz_configuration_options.Spaces.ContinuousZoomVelocitySpace[0].XRange.MaxNEWLINE self.ZMIN = ptz_configuration_options.Spaces.ContinuousZoomVelocitySpace[0].XRange.MinNEWLINE NEWLINE NEWLINE def getStreamUri(self):NEWLINE# return self.mediaService.GetStreamUri()[0]NEWLINE return 'rtsp://192.168.1.49:554/Streaming/Channels/1?transportmode=unicast&profile=Profile_1'NEWLINE NEWLINE NEWLINE def getStatus(self):NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('GetStatus')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE NEWLINE ptzStatus = self.ptzService.GetStatus(request)NEWLINE pan = ptzStatus.Position.PanTilt._xNEWLINE tilt = ptzStatus.Position.PanTilt._yNEWLINE zoom = ptzStatus.Position.Zoom._xNEWLINENEWLINE return (pan, tilt, zoom)NEWLINE NEWLINE def continuousToRight(self):NEWLINE panVelocityFactor = self.XMAXNEWLINE tiltVelocityFactor = 0NEWLINE zoomVelocityFactor = 0NEWLINE self.continuousMove(panVelocityFactor, tiltVelocityFactor, zoomVelocityFactor)NEWLINE NEWLINE def continuousToLeft(self):NEWLINE panVelocityFactor = self.XMINNEWLINE tiltVelocityFactor = 0NEWLINE zoomVelocityFactor = 0NEWLINE self.continuousMove(panVelocityFactor, tiltVelocityFactor, zoomVelocityFactor)NEWLINE NEWLINE def continuousToUp(self):NEWLINE panVelocityFactor = 0NEWLINE tiltVelocityFactor = self.YMAXNEWLINE zoomVelocityFactor = 0NEWLINE self.continuousMove(panVelocityFactor, tiltVelocityFactor, zoomVelocityFactor)NEWLINE NEWLINE def continuousToDown(self):NEWLINE panVelocityFactor = 0NEWLINE tiltVelocityFactor = self.YMINNEWLINE zoomVelocityFactor = 0NEWLINE self.continuousMove(panVelocityFactor, tiltVelocityFactor, zoomVelocityFactor)NEWLINE NEWLINE def continuousZoomIn(self):NEWLINE panVelocityFactor = 0NEWLINE tiltVelocityFactor = 0NEWLINE zoomVelocityFactor = self.ZMAXNEWLINE self.continuousMove(panVelocityFactor, tiltVelocityFactor, zoomVelocityFactor)NEWLINE NEWLINE def continuousZoomOut(self):NEWLINE panVelocityFactor = 0NEWLINE tiltVelocityFactor = 0NEWLINE zoomVelocityFactor = self.ZMINNEWLINE self.continuousMove(panVelocityFactor, tiltVelocityFactor, zoomVelocityFactor)NEWLINE NEWLINE def continuousMove(self, panFactor, tiltFactor, zoomFactor):NEWLINE request = self.ptzService.create_type('ContinuousMove')NEWLINE request.ProfileToken = self.profile._tokenNEWLINE request.Velocity.PanTilt._x = panFactorNEWLINE request.Velocity.PanTilt._y = tiltFactorNEWLINE request.Velocity.Zoom._x = zoomFactorNEWLINE NEWLINE self.ptzService.ContinuousMove(request)NEWLINE # Wait a certain timeNEWLINE timeout = 1NEWLINE time.sleep(timeout)NEWLINE # Stop continuous moveNEWLINE self.ptzService.Stop({'ProfileToken': request.ProfileToken})NEWLINENEWLINENEWLINE def oneStepRight(self):NEWLINE status = self.getStatus()NEWLINE logging.info("Movimiento hacia derecha desde " + str(status))NEWLINE actualPan = status[0]NEWLINE actualTilt = status[1]NEWLINE NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE pan = actualPan - float(2)/360NEWLINE if pan <= -1:NEWLINE pan = 1NEWLINENEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = actualTiltNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE def oneStepLeft(self):NEWLINE status = self.getStatus()NEWLINE print "Movimiento hacia izquierda desde " + str(status)NEWLINE actualPan = status[0]NEWLINE actualTilt = status[1]NEWLINE NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE pan = round(actualPan + float(2)/360 , 6)NEWLINE if pan >= 1:NEWLINE pan = -1NEWLINE print panNEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = actualTiltNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE NEWLINE def oneStepUp(self):NEWLINE status = self.getStatus()NEWLINE print "Movimiento hacia arriba desde " + str(status)NEWLINE actualPan = status[0]NEWLINE actualTilt = status[1]NEWLINE NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE tilt = round(actualTilt - float(2)/90, 6)NEWLINE pan = actualPanNEWLINE if tilt <= -1:NEWLINE tilt = -1NEWLINE pan = actualPanNEWLINE elif tilt >= 1:NEWLINE tilt = 1NEWLINE pan = actualPan + 180*float(2)/360NEWLINE NEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = tiltNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE NEWLINE NEWLINE def oneStepDown(self):NEWLINE status = self.getStatus()NEWLINE print "Movimiento hacia abajo desde " + str(status)NEWLINE actualPan = status[0]NEWLINE actualTilt = status[1]NEWLINE NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE tilt = round(actualTilt + float(2)/90, 6)NEWLINE pan = actualPanNEWLINE if tilt <= -1:NEWLINE tilt = -1NEWLINE pan = actualPanNEWLINE elif tilt >= 1:NEWLINE tilt = 1NEWLINE pan = actualPan + 180*float(2)/360NEWLINENEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = tiltNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE NEWLINE def oneStepZoomIn(self):NEWLINE status = self.getStatus()NEWLINE print "Zoom in desde " + str(status)NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE NEWLINE if status[2] < 0.05:NEWLINE paso = 0.07NEWLINE else:NEWLINE paso = 0.035NEWLINE NEWLINE pZoom = status[2] + pasoNEWLINE if pZoom > 1:NEWLINE pZoom = 1NEWLINE NEWLINE request.Position.Zoom._x = pZoomNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE def oneStepZoomOut(self):NEWLINE status = self.getStatus()NEWLINE print "Zoom out desde " + str(status)NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE NEWLINE pZoom = status[2] - 0.01 # Con este paso anda bienNEWLINE if pZoom < 0:NEWLINE pZoom = 0NEWLINENEWLINE request.Position.Zoom._x = pZoomNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE NEWLINE def continuousRight(self):NEWLINE logging.info("Movimiento continuo hacia derecha")NEWLINENEWLINE NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE pan = actualPan - float(2)/360NEWLINE if pan <= -1:NEWLINE pan = 1NEWLINENEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = actualTiltNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE NEWLINE NEWLINE def moveAbsolute(self, pan, tilt, zoom = 0):NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE NEWLINE# pPan = round(1 - float(pan)/180, 6)NEWLINE# pTilt = round(1 - float(tilt)/45, 6)NEWLINE# pZoom = round(float(zoom/100), 6)NEWLINE# NEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = tiltNEWLINE request.Position.Zoom._x = zoomNEWLINE absoluteMoveResponse = self.ptzService.AbsoluteMove(request)NEWLINE NEWLINE NEWLINE def setHomePosition(self):NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('SetHomePosition')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE self.ptzService.SetHomePosition(request)NEWLINE NEWLINE def gotoHomePosition(self):NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('GotoHomePosition')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE self.ptzService.GotoHomePosition(request)NEWLINE NEWLINE def getSnapshotUri(self):NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.mediaService.create_type('GetSnapshotUri')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE response = self.mediaService.GetSnapshotUri(request)NEWLINE NEWLINE logging.info(response.Uri)NEWLINE# urllib.urlretrieve("http://10.2.1.49/onvif-http/snapshot", "local-filename.jpeg")NEWLINE NEWLINE ''' Metodo para probar capturas en la PTZ '''NEWLINE def testAbsolute(self, pan, tilt, zoom = 0):NEWLINE media_profile = self.mediaService.GetProfiles()[0]NEWLINE NEWLINE request = self.ptzService.create_type('AbsoluteMove')NEWLINE request.ProfileToken = media_profile._tokenNEWLINE NEWLINE NEWLINE request.Position.PanTilt._x = panNEWLINE request.Position.PanTilt._y = tiltNEWLINE request.Position.Zoom._x = zoomNEWLINE testAbsoluteResponse = self.ptzService.AbsoluteMove(request) NEWLINE |
from __future__ import unicode_literalsNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django import templateNEWLINEfrom django.contrib.humanize.templatetags.humanize import intcommaNEWLINEfrom django.template.defaultfilters import stringfilterNEWLINENEWLINEfrom wagtail.wagtailcore import hooksNEWLINEfrom wagtail.wagtailcore.models import get_navigation_menu_items, UserPagePermissionsProxy, PageViewRestrictionNEWLINEfrom wagtail.wagtailcore.utils import camelcase_to_underscore, escape_scriptNEWLINEfrom wagtail.wagtailcore.utils import cautious_slugify as _cautious_slugifyNEWLINEfrom wagtail.wagtailadmin.menu import admin_menuNEWLINENEWLINEfrom wagtail.utils.pagination import DEFAULT_PAGE_KEYNEWLINENEWLINENEWLINEregister = template.Library()NEWLINENEWLINEregister.filter('intcomma', intcomma)[email protected]_tag('wagtailadmin/shared/explorer_nav.html')NEWLINEdef explorer_nav():NEWLINE return {NEWLINE 'nodes': get_navigation_menu_items()NEWLINE }[email protected]_tag('wagtailadmin/shared/explorer_nav_child.html')NEWLINEdef explorer_subnav(nodes):NEWLINE return {NEWLINE 'nodes': nodesNEWLINE }[email protected]_tag('wagtailadmin/shared/main_nav.html', takes_context=True)NEWLINEdef main_nav(context):NEWLINE request = context['request']NEWLINENEWLINE return {NEWLINE 'menu_html': admin_menu.render_html(request),NEWLINE 'request': request,NEWLINE }[email protected]_tagNEWLINEdef main_nav_js():NEWLINE return admin_menu.media['js'][email protected]("ellipsistrim")NEWLINEdef ellipsistrim(value, max_length):NEWLINE if len(value) > max_length:NEWLINE truncd_val = value[:max_length]NEWLINE if not len(value) == (max_length + 1) and value[max_length + 1] != " ":NEWLINE truncd_val = truncd_val[:truncd_val.rfind(" ")]NEWLINE return truncd_val + "..."NEWLINE return [email protected] fieldtype(bound_field):NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.field.__class__.__name__)NEWLINE except AttributeError:NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.__class__.__name__)NEWLINE except AttributeError:NEWLINE return ""[email protected] widgettype(bound_field):NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.field.widget.__class__.__name__)NEWLINE except AttributeError:NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.widget.__class__.__name__)NEWLINE except AttributeError:NEWLINE return ""[email protected]_tag(takes_context=True)NEWLINEdef page_permissions(context, page):NEWLINE """NEWLINE Usage: {% page_permissions page as page_perms %}NEWLINE Sets the variable 'page_perms' to a PagePermissionTester object that can be queried to find outNEWLINE what actions the current logged-in user can perform on the given page.NEWLINE """NEWLINE # Create a UserPagePermissionsProxy object to represent the user's global permissions, andNEWLINE # cache it in the context for the duration of the page request, if one does not exist alreadyNEWLINE if 'user_page_permissions' not in context:NEWLINE context['user_page_permissions'] = UserPagePermissionsProxy(context['request'].user)NEWLINENEWLINE # Now retrieve a PagePermissionTester from it, specific to the given pageNEWLINE return context['user_page_permissions'].for_page(page)[email protected]_tag(takes_context=True)NEWLINEdef test_page_is_public(context, page):NEWLINE """NEWLINE Usage: {% test_page_is_public page as is_public %}NEWLINE Sets 'is_public' to True iff there are no page view restrictions in place onNEWLINE this page.NEWLINE Caches the list of page view restrictions in the context, to avoid repeatedNEWLINE DB queries on repeated calls.NEWLINE """NEWLINE if 'all_page_view_restriction_paths' not in context:NEWLINE context['all_page_view_restriction_paths'] = PageViewRestriction.objects.select_related('page').values_list('page__path', flat=True)NEWLINENEWLINE is_private = any([NEWLINE page.path.startswith(restricted_path)NEWLINE for restricted_path in context['all_page_view_restriction_paths']NEWLINE ])NEWLINENEWLINE return not [email protected]_tagNEWLINEdef hook_output(hook_name):NEWLINE """NEWLINE Example: {% hook_output 'insert_editor_css' %}NEWLINE Whenever we have a hook whose functions take no parameters and return a string, this tag can be usedNEWLINE to output the concatenation of all of those return values onto the page.NEWLINE Note that the output is not escaped - it is the hook function's responsibility to escape unsafe content.NEWLINE """NEWLINE snippets = [fn() for fn in hooks.get_hooks(hook_name)]NEWLINE return ''.join(snippets)[email protected]_tagNEWLINEdef usage_count_enabled():NEWLINE return getattr(settings, 'WAGTAIL_USAGE_COUNT_ENABLED', False)[email protected]_tagNEWLINEdef base_url_setting():NEWLINE return getattr(settings, 'BASE_URL', None)NEWLINENEWLINENEWLINEclass EscapeScriptNode(template.Node):NEWLINE TAG_NAME = 'escapescript'NEWLINENEWLINE def __init__(self, nodelist):NEWLINE super(EscapeScriptNode, self).__init__()NEWLINE self.nodelist = nodelistNEWLINENEWLINE def render(self, context):NEWLINE out = self.nodelist.render(context)NEWLINE return escape_script(out)NEWLINENEWLINE @classmethodNEWLINE def handle(cls, parser, token):NEWLINE nodelist = parser.parse(('end' + EscapeScriptNode.TAG_NAME,))NEWLINE parser.delete_first_token()NEWLINE return cls(nodelist)NEWLINENEWLINEregister.tag(EscapeScriptNode.TAG_NAME, EscapeScriptNode.handle)NEWLINENEWLINENEWLINE# Helpers for Widget.render_with_errors, our extension to the Django widget API that allows widgets toNEWLINE# take on the responsibility of rendering their own error [email protected] render_with_errors(bound_field):NEWLINE """NEWLINE Usage: {{ field|render_with_errors }} as opposed to {{ field }}.NEWLINE If the field (a BoundField instance) has errors on it, and the associated widget implementsNEWLINE a render_with_errors method, call that; otherwise, call the regular widget rendering mechanism.NEWLINE """NEWLINE widget = bound_field.field.widgetNEWLINE if bound_field.errors and hasattr(widget, 'render_with_errors'):NEWLINE return widget.render_with_errors(bound_field.html_name, bound_field.value(), attrs={'id': bound_field.auto_id}, errors=bound_field.errors)NEWLINE else:NEWLINE return bound_field.as_widget()[email protected] has_unrendered_errors(bound_field):NEWLINE """NEWLINE Return true if this field has errors that were not accounted for by render_with_errors, becauseNEWLINE the widget does not support the render_with_errors methodNEWLINE """NEWLINE return bound_field.errors and not hasattr(bound_field.field.widget, 'render_with_errors')[email protected](is_safe=True)NEWLINE@stringfilterNEWLINEdef cautious_slugify(value):NEWLINE return _cautious_slugify(value)[email protected]_tag(takes_context=True)NEWLINEdef querystring(context, **kwargs):NEWLINE """NEWLINE Print out the current querystring. Any keyword arguments to this templateNEWLINE tag will be added to the querystring before it is printed out.NEWLINENEWLINE <a href="/page/{% querystring key='value' %}">NEWLINENEWLINE Will result in something like:NEWLINENEWLINE <a href="/page/?foo=bar&key=value">NEWLINE """NEWLINE request = context['request']NEWLINE querydict = request.GET.copy()NEWLINE # Can't do querydict.update(kwargs), because QueryDict.update() appends toNEWLINE # the list of values, instead of replacing the values.NEWLINE for key, value in kwargs.items():NEWLINE if value is None:NEWLINE # Remove the key if the value is NoneNEWLINE querydict.pop(key, None)NEWLINE else:NEWLINE # Set the key otherwiseNEWLINE querydict[key] = valueNEWLINENEWLINE return '?' + querydict.urlencode()[email protected]_tag(takes_context=True)NEWLINEdef pagination_querystring(context, page_number, page_key=DEFAULT_PAGE_KEY):NEWLINE """NEWLINE Print out a querystring with an updated page number:NEWLINENEWLINE {% if page.has_next_page %}NEWLINE <a href="{% pagination_link page.next_page_number %}">Next page</a>NEWLINE {% endif %}NEWLINE """NEWLINE return querystring(context, **{page_key: page_number})[email protected]_tag("wagtailadmin/pages/listing/_pagination.html",NEWLINE takes_context=True)NEWLINEdef paginate(context, page, base_url='', page_key=DEFAULT_PAGE_KEY,NEWLINE classnames=''):NEWLINE """NEWLINE Print pagination previous/next links, and the page count. Take theNEWLINE following arguments:NEWLINENEWLINE pageNEWLINE The current page of results. This should be a Django pagination `Page`NEWLINE instanceNEWLINENEWLINE base_urlNEWLINE The base URL of the next/previous page, with no querystring.NEWLINE This is optional, and defaults to the current page by just printing theNEWLINE querystring for the next/previous page.NEWLINENEWLINE page_keyNEWLINE The name of the page variable in the query string. Defaults to the sameNEWLINE name as used in the :func:`~wagtail.utils.pagination.paginate`NEWLINE function.NEWLINENEWLINE classnamesNEWLINE Extra classes to add to the next/previous links.NEWLINE """NEWLINE request = context['request']NEWLINE return {NEWLINE 'base_url': base_url,NEWLINE 'classnames': classnames,NEWLINE 'request': request,NEWLINE 'page': page,NEWLINE 'page_key': page_key,NEWLINE 'paginator': page.paginator,NEWLINE }NEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Copyright 2020 Green Valley Belgium NVNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINE# @@license_version:1.7@@NEWLINENEWLINEimport jsonNEWLINEimport loggingNEWLINENEWLINEimport webapp2NEWLINEfrom google.appengine.ext import webappNEWLINENEWLINEfrom mcfw.properties import azzertNEWLINEfrom rogerthat.bizz.communities.communities import get_communityNEWLINEfrom rogerthat.bizz.friend_helper import FriendHelperNEWLINEfrom rogerthat.bizz.service.i18n import excel_export, excel_importNEWLINEfrom rogerthat.dal.friend import get_friends_mapNEWLINEfrom rogerthat.dal.profile import get_service_profileNEWLINEfrom rogerthat.dal.service import get_friend_serviceidentity_connectionNEWLINEfrom rogerthat.models import ProfileHashIndexNEWLINEfrom rogerthat.rpc import usersNEWLINEfrom rogerthat.rpc.service import BusinessExceptionNEWLINEfrom rogerthat.templates import renderNEWLINEfrom rogerthat.to.friends import FriendTO, FRIEND_TYPE_SERVICENEWLINEfrom rogerthat.translations import DEFAULT_LANGUAGENEWLINEfrom rogerthat.utils import safe_file_name, filename_friendly_timeNEWLINEfrom rogerthat.utils.channel import broadcast_via_iframe_resultNEWLINEfrom rogerthat.utils.crypto import md5_hexNEWLINEfrom rogerthat.utils.service import add_slash_defaultNEWLINENEWLINEtry:NEWLINE from cStringIO import StringIONEWLINEexcept ImportError:NEWLINE from StringIO import StringIONEWLINENEWLINENEWLINEclass ServicePageHandler(webapp.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE service_email = self.request.GET.get('service')NEWLINE azzert(service_email)NEWLINENEWLINE user = users.get_current_user()NEWLINE service_identity_user = add_slash_default(users.User(service_email))NEWLINE azzert(get_friend_serviceidentity_connection(user, service_identity_user),NEWLINE "%s tried to get service page of service %s, but is not connected" % (user.email(), service_identity_user.email()))NEWLINENEWLINE params = {'service_email': service_email, 'container_id': 'servicePageContainer_%s' % md5_hex(service_email)}NEWLINE self.response.out.write(render('service_page', [DEFAULT_LANGUAGE], params, 'web'))NEWLINENEWLINENEWLINEclass ServiceMenuItemBrandingHandler(webapp.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE service_email = self.request.GET.get('service')NEWLINE azzert(service_email)NEWLINENEWLINE user = users.get_current_user()NEWLINE service_identity_user = add_slash_default(users.User(service_email))NEWLINE azzert(get_friend_serviceidentity_connection(user, service_identity_user),NEWLINE "%s tried to get a menu item page of service %s, but is not connected" % (user.email(), service_identity_user.email()))NEWLINENEWLINE branding = self.request.GET.get('branding')NEWLINE azzert(branding)NEWLINE params = {'container_id': 'smi_branding_container_%s' %NEWLINE branding, 'branding': branding, 'service_email': service_email}NEWLINE self.response.out.write(render('smi_branding', [DEFAULT_LANGUAGE], params, 'web'))NEWLINENEWLINENEWLINEclass ServiceAboutPageHandler(webapp.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE service_email = self.request.GET.get('service')NEWLINE azzert(service_email)NEWLINENEWLINE user = users.get_current_user()NEWLINE service_identity_user = add_slash_default(users.User(service_email))NEWLINE azzert(get_friend_serviceidentity_connection(user, service_identity_user),NEWLINE "%s tried to get About page of service %s, but is not connected" % (user.email(), service_identity_user.email()))NEWLINENEWLINE helper = FriendHelper.from_data_store(service_identity_user, FRIEND_TYPE_SERVICE)NEWLINE service = FriendTO.fromDBFriendMap(helper, get_friends_map(user), service_identity_user,NEWLINE includeServiceDetails=True, targetUser=user)NEWLINE azzert(service.type == FriendTO.TYPE_SERVICE)NEWLINENEWLINE params = {'service': service,NEWLINE 'service_name': service.name or service.email,NEWLINE 'container_id': 'serviceAboutPageContainer_%s' % md5_hex(service_email)}NEWLINE self.response.out.write(render('service_about', [DEFAULT_LANGUAGE], params, 'web'))NEWLINENEWLINENEWLINEclass EditableTranslationSetExcelDownloadHandler(webapp2.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE browser_timezone_str = self.request.get('tz_offset', '0')NEWLINE try:NEWLINE browser_timezone = int(browser_timezone_str)NEWLINE except ValueError:NEWLINE logging.warning("Invalid browser timezone offset: [%s]" % browser_timezone_str)NEWLINE browser_timezone = 0NEWLINE if abs(browser_timezone) > 24 * 3600:NEWLINE logging.warning("Invalid browser timezone offset: [%s]" % browser_timezone_str)NEWLINE browser_timezone = 0NEWLINENEWLINE service_user = users.get_current_user()NEWLINE book, latest_export_timestamp = excel_export(service_user, browser_timezone)NEWLINENEWLINE # ReturnNEWLINE output = StringIO()NEWLINE book.save(output)NEWLINE output.seek(0)NEWLINENEWLINE filename = "Rogerthat_%s_%s.xls" % (filename_friendly_time(latest_export_timestamp), service_user.email())NEWLINENEWLINE self.response.headers['Content-Type'] = 'application/vnd.ms-excel'NEWLINE self.response.headers['Content-Disposition'] = 'attachment; filename=%s' % safe_file_name(filename)NEWLINE self.response.out.write(output.getvalue())NEWLINENEWLINENEWLINEclass PostEditableTranslationSetExcelHandler(webapp2.RequestHandler):NEWLINENEWLINE def post(self):NEWLINE import xlrdNEWLINE try:NEWLINE service_user = users.get_current_user()NEWLINENEWLINE file_ = self.request.POST.get('file').fileNEWLINE book = xlrd.open_workbook(file_contents=file_.read())NEWLINENEWLINE excel_import(service_user, book)NEWLINE except BusinessException as be:NEWLINE self.response.out.write(broadcast_via_iframe_result(NEWLINE u'rogerthat.service.translations.post_result', error=be.message))NEWLINE returnNEWLINE except:NEWLINE self.response.out.write(broadcast_via_iframe_result(NEWLINE u'rogerthat.service.translations.post_result', error=u"Unknown error has occurred."))NEWLINE logging.exception("Failure receiving translations!")NEWLINE returnNEWLINE self.response.out.write(broadcast_via_iframe_result(u'rogerthat.service.translations.post_result'))NEWLINENEWLINENEWLINEclass GetServiceAppHandler(webapp2.RequestHandler):NEWLINENEWLINE def get_default_app_id(self, user_hash):NEWLINE index = ProfileHashIndex.get(ProfileHashIndex.create_key(user_hash))NEWLINE if not index:NEWLINE logging.debug('No profile found with user_hash %s', user_hash)NEWLINE return NoneNEWLINE profile = get_service_profile(index.user)NEWLINE if not profile:NEWLINE logging.debug('Profile not found: %s', index.user)NEWLINE community = get_community(profile.community_id)NEWLINE return community.default_appNEWLINENEWLINE def get(self):NEWLINE user_hash = self.request.GET['user']NEWLINE self.response.out.write(json.dumps({'app_id': self.get_default_app_id(user_hash)}))NEWLINE |
NEWLINEfrom e2cnn.kernels import KernelBasis, EmptyBasisExceptionNEWLINEfrom e2cnn.gspaces import *NEWLINEfrom e2cnn.nn import FieldTypeNEWLINEfrom .. import utilsNEWLINENEWLINEfrom .basisexpansion import BasisExpansionNEWLINEfrom .basisexpansion_singleblock import block_basisexpansionNEWLINENEWLINEfrom collections import defaultdictNEWLINENEWLINEfrom typing import Callable, List, Iterable, Dict, UnionNEWLINENEWLINEimport torchNEWLINEimport numpy as npNEWLINENEWLINENEWLINE__all__ = ["BlocksBasisExpansion"]NEWLINENEWLINENEWLINEclass BlocksBasisExpansion(BasisExpansion):NEWLINE NEWLINE def __init__(self,NEWLINE in_type: FieldType,NEWLINE out_type: FieldType,NEWLINE points: np.ndarray,NEWLINE sigma: List[float],NEWLINE rings: List[float],NEWLINE basis_filter: Callable[[dict], bool] = None,NEWLINE recompute: bool = False,NEWLINE **kwargsNEWLINE ):NEWLINE r"""NEWLINE NEWLINE With this algorithm, the expansion is done on the intertwiners of the fields' representations pairs in input andNEWLINE output.NEWLINE NEWLINE Args:NEWLINE in_type (FieldType): the input field typeNEWLINE out_type (FieldType): the output field typeNEWLINE points (~numpy.ndarray): points where the analytical basis should be sampledNEWLINE sigma (list): width of each ring where the bases are sampledNEWLINE rings (list): radii of the rings where to sample the basesNEWLINE basis_filter (callable, optional): filter for the basis elements. Should take a dictionary containing anNEWLINE element's attributes and return whether to keep it or not.NEWLINE recompute (bool, optional): whether to recompute new bases or reuse, if possible, already built tensors.NEWLINE **kwargs: keyword arguments specific to the groups and basis usedNEWLINE NEWLINE Attributes:NEWLINE S (int): number of points where the filters are sampledNEWLINE NEWLINE """NEWLINENEWLINE assert in_type.gspace == out_type.gspaceNEWLINE assert isinstance(in_type.gspace, GeneralOnR2)NEWLINE NEWLINE super(BlocksBasisExpansion, self).__init__()NEWLINE self._in_type = in_typeNEWLINE self._out_type = out_typeNEWLINE self._input_size = in_type.sizeNEWLINE self._output_size = out_type.sizeNEWLINE self.points = pointsNEWLINE NEWLINE # int: number of points where the filters are sampledNEWLINE self.S = self.points.shape[1]NEWLINENEWLINE space = in_type.gspaceNEWLINENEWLINE # we group the basis vectors by their input and output representationsNEWLINE _block_expansion_modules = {}NEWLINE NEWLINE # iterate through all different pairs of input/output representationionsNEWLINE # and, for each of them, build a basisNEWLINE for i_repr in in_type._unique_representations:NEWLINE for o_repr in out_type._unique_representations:NEWLINE reprs_names = (i_repr.name, o_repr.name)NEWLINE try:NEWLINE NEWLINE basis = space.build_kernel_basis(i_repr, o_repr,NEWLINE sigma=sigma,NEWLINE rings=rings,NEWLINE **kwargs)NEWLINE NEWLINE block_expansion = block_basisexpansion(basis, points, basis_filter, recompute=recompute)NEWLINE _block_expansion_modules[reprs_names] = block_expansionNEWLINE NEWLINE # register the block expansion as a submoduleNEWLINE self.add_module(f"block_expansion_{reprs_names}", block_expansion)NEWLINE NEWLINE except EmptyBasisException:NEWLINE # print(f"Empty basis at {reprs_names}")NEWLINE passNEWLINENEWLINE self._n_pairs = len(in_type._unique_representations) * len(out_type._unique_representations)NEWLINENEWLINE # the list of all pairs of input/output representations which don't have an empty basisNEWLINE self._representations_pairs = sorted(list(_block_expansion_modules.keys()))NEWLINE NEWLINE # retrieve for each representation in both input and output fields:NEWLINE # - the number of its occurrences,NEWLINE # - the indices where it occurs andNEWLINE # - whether its occurrences are contiguous or notNEWLINE self._in_count, _in_indices, _in_contiguous = _retrieve_indices(in_type)NEWLINE self._out_count, _out_indices, _out_contiguous = _retrieve_indices(out_type)NEWLINE NEWLINE # compute the attributes and an id for each basis element (and, so, of each parameter)NEWLINE # attributes, basis_ids = _compute_attrs_and_ids(in_type, out_type, _block_expansion_modules)NEWLINE basis_ids = _compute_attrs_and_ids(in_type, out_type, _block_expansion_modules)NEWLINE NEWLINE self._weights_ranges = {}NEWLINENEWLINE last_weight_position = 0NEWLINENEWLINE self._ids_to_basis = {}NEWLINE self._basis_to_ids = []NEWLINE NEWLINE self._contiguous = {}NEWLINE NEWLINE # iterate through the different group of blocksNEWLINE # i.e., through all input/output pairsNEWLINE for io_pair in self._representations_pairs:NEWLINE NEWLINE self._contiguous[io_pair] = _in_contiguous[io_pair[0]] and _out_contiguous[io_pair[1]]NEWLINE NEWLINE # build the indices tensorsNEWLINE if self._contiguous[io_pair]:NEWLINE # in_indices = torch.LongTensor([NEWLINE in_indices = [NEWLINE _in_indices[io_pair[0]].min(),NEWLINE _in_indices[io_pair[0]].max() + 1,NEWLINE _in_indices[io_pair[0]].max() + 1 - _in_indices[io_pair[0]].min()NEWLINE ]# )NEWLINE # out_indices = torch.LongTensor([NEWLINE out_indices = [NEWLINE _out_indices[io_pair[1]].min(),NEWLINE _out_indices[io_pair[1]].max() + 1,NEWLINE _out_indices[io_pair[1]].max() + 1 - _out_indices[io_pair[1]].min()NEWLINE ] #)NEWLINE NEWLINE setattr(self, 'in_indices_{}'.format(io_pair), in_indices)NEWLINE setattr(self, 'out_indices_{}'.format(io_pair), out_indices)NEWLINENEWLINE else:NEWLINE out_indices, in_indices = torch.meshgrid([_out_indices[io_pair[1]], _in_indices[io_pair[0]]])NEWLINE in_indices = in_indices.reshape(-1)NEWLINE out_indices = out_indices.reshape(-1)NEWLINE NEWLINE # register the indices tensors and the bases tensors as parameters of this moduleNEWLINE self.register_buffer('in_indices_{}'.format(io_pair), in_indices)NEWLINE self.register_buffer('out_indices_{}'.format(io_pair), out_indices)NEWLINE NEWLINE # count the actual number of parametersNEWLINE total_weights = len(basis_ids[io_pair])NEWLINENEWLINE for i, id in enumerate(basis_ids[io_pair]):NEWLINE self._ids_to_basis[id] = last_weight_position + iNEWLINE NEWLINE self._basis_to_ids += basis_ids[io_pair]NEWLINE NEWLINE # evaluate the indices in the global weights tensor to use for the basis belonging to this groupNEWLINE self._weights_ranges[io_pair] = (last_weight_position, last_weight_position + total_weights)NEWLINE NEWLINE # increment the position counterNEWLINE last_weight_position += total_weightsNEWLINENEWLINE def get_basis_names(self) -> List[str]:NEWLINE return self._basis_to_idsNEWLINE NEWLINE def get_element_info(self, name: Union[str, int]) -> Dict:NEWLINE if isinstance(name, str):NEWLINE idx = self._ids_to_basis[name]NEWLINE else:NEWLINE idx = nameNEWLINE NEWLINE reprs_names = NoneNEWLINE relative_idx = NoneNEWLINE for pair, idx_range in self._weights_ranges.items():NEWLINE if idx_range[0] <= idx < idx_range[1]:NEWLINE reprs_names = pairNEWLINE relative_idx = idx - idx_range[0]NEWLINE breakNEWLINE assert reprs_names is not None and relative_idx is not NoneNEWLINE NEWLINE block_expansion = getattr(self, f"block_expansion_{reprs_names}")NEWLINE block_idx = relative_idx // block_expansion.dimension()NEWLINE relative_idx = relative_idx % block_expansion.dimension()NEWLINE NEWLINE attr = block_expansion.get_element_info(relative_idx).copy()NEWLINE NEWLINE block_count = 0NEWLINE out_irreps_count = 0NEWLINE for o, o_repr in enumerate(self._out_type.representations):NEWLINE in_irreps_count = 0NEWLINE for i, i_repr in enumerate(self._in_type.representations):NEWLINE NEWLINE if reprs_names == (i_repr.name, o_repr.name):NEWLINE NEWLINE if block_count == block_idx:NEWLINENEWLINE # retrieve the attributes of each basis element and build a new list ofNEWLINE # attributes adding information specific to the current blockNEWLINE attr.update({NEWLINE "in_irreps_position": in_irreps_count + attr["in_irrep_idx"],NEWLINE "out_irreps_position": out_irreps_count + attr["out_irrep_idx"],NEWLINE "in_repr": reprs_names[0],NEWLINE "out_repr": reprs_names[1],NEWLINE "in_field_position": i,NEWLINE "out_field_position": o,NEWLINE })NEWLINE NEWLINE # build the ids of the basis vectorsNEWLINE # add names and indices of the input and output fieldsNEWLINE id = '({}-{},{}-{})'.format(i_repr.name, i, o_repr.name, o)NEWLINE # add the original id in the block submoduleNEWLINE id += "_" + attr["id"]NEWLINE NEWLINE # update with the new idNEWLINE attr["id"] = idNEWLINE NEWLINE attr["idx"] = idxNEWLINE NEWLINE return attrNEWLINE NEWLINE block_count += 1NEWLINE NEWLINE in_irreps_count += len(i_repr.irreps)NEWLINE out_irreps_count += len(o_repr.irreps)NEWLINE NEWLINE raise ValueError(f"Parameter with index {idx} not found!")NEWLINENEWLINE def get_basis_info(self) -> Iterable:NEWLINE NEWLINE out_irreps_counts = [0]NEWLINE out_block_counts = defaultdict(list)NEWLINE for o, o_repr in enumerate(self._out_type.representations):NEWLINE out_irreps_counts.append(out_irreps_counts[-1] + len(o_repr.irreps))NEWLINE out_block_counts[o_repr.name].append(o)NEWLINE NEWLINE in_irreps_counts = [0]NEWLINE in_block_counts = defaultdict(list)NEWLINE for i, i_repr in enumerate(self._in_type.representations):NEWLINE in_irreps_counts.append(in_irreps_counts[-1] + len(i_repr.irreps))NEWLINE in_block_counts[i_repr.name].append(i)NEWLINENEWLINE # iterate through the different group of blocksNEWLINE # i.e., through all input/output pairsNEWLINE idx = 0NEWLINE for reprs_names in self._representations_pairs:NEWLINENEWLINE block_expansion = getattr(self, f"block_expansion_{reprs_names}")NEWLINE NEWLINE for o in out_block_counts[reprs_names[1]]:NEWLINE out_irreps_count = out_irreps_counts[o]NEWLINE for i in in_block_counts[reprs_names[0]]:NEWLINE in_irreps_count = in_irreps_counts[i]NEWLINE NEWLINE # retrieve the attributes of each basis element and build a new list ofNEWLINE # attributes adding information specific to the current blockNEWLINE for attr in block_expansion.get_basis_info():NEWLINE attr = attr.copy()NEWLINE attr.update({NEWLINE "in_irreps_position": in_irreps_count + attr["in_irrep_idx"],NEWLINE "out_irreps_position": out_irreps_count + attr["out_irrep_idx"],NEWLINE "in_repr": reprs_names[0],NEWLINE "out_repr": reprs_names[1],NEWLINE "in_field_position": i,NEWLINE "out_field_position": o,NEWLINE })NEWLINE NEWLINE # build the ids of the basis vectorsNEWLINE # add names and indices of the input and output fieldsNEWLINE id = '({}-{},{}-{})'.format(reprs_names[0], i, reprs_names[1], o)NEWLINE # add the original id in the block submoduleNEWLINE id += "_" + attr["id"]NEWLINE NEWLINE # update with the new idNEWLINE attr["id"] = idNEWLINE NEWLINE attr["idx"] = idxNEWLINE idx += 1NEWLINE NEWLINE yield attrNEWLINENEWLINE def dimension(self) -> int:NEWLINE return len(self._ids_to_basis)NEWLINENEWLINE def _expand_block(self, weights, io_pair):NEWLINE # retrieve the basisNEWLINE block_expansion = getattr(self, f"block_expansion_{io_pair}")NEWLINENEWLINE # retrieve the linear coefficients for the basis expansionNEWLINE coefficients = weights[self._weights_ranges[io_pair][0]:self._weights_ranges[io_pair][1]]NEWLINE NEWLINE # reshape coefficients for the batch matrix multiplicationNEWLINE coefficients = coefficients.view(-1, block_expansion.dimension())NEWLINE NEWLINE # expand the current subset of basis vectors and set the result in the appropriate place in the filterNEWLINE filter = block_expansion(coefficients)NEWLINE k, o, i, p = filter.shapeNEWLINE NEWLINE filter = filter.view(self._out_count[io_pair[1]],NEWLINE self._in_count[io_pair[0]],NEWLINE o,NEWLINE i,NEWLINE self.S,NEWLINE )NEWLINE filter = filter.transpose(1, 2)NEWLINE return filterNEWLINE NEWLINE def forward(self, weights: torch.Tensor) -> torch.Tensor:NEWLINE """NEWLINE Forward step of the Module which expands the basis and returns the filter builtNEWLINENEWLINE Args:NEWLINE weights (torch.Tensor): the learnable weights used to linearly combine the basis filtersNEWLINENEWLINE Returns:NEWLINE the filter builtNEWLINENEWLINE """NEWLINE assert weights.shape[0] == self.dimension()NEWLINE assert len(weights.shape) == 1NEWLINE NEWLINE if self._n_pairs == 1:NEWLINE # if there is only one block (i.e. one type of input field and one type of output field),NEWLINE # we can return the expanded block immediately, instead of copying it inside a preallocated empty tensorNEWLINE io_pair = self._representations_pairs[0]NEWLINE in_indices = getattr(self, f"in_indices_{io_pair}")NEWLINE out_indices = getattr(self, f"out_indices_{io_pair}")NEWLINE filter = self._expand_block(weights, io_pair).reshape(out_indices[2], in_indices[2], self.S)NEWLINE NEWLINE else:NEWLINE NEWLINE # build the tensor which will contain te filterNEWLINE filter = torch.zeros(self._output_size, self._input_size, self.S, device=weights.device)NEWLINENEWLINE # iterate through all input-output field representations pairsNEWLINE for io_pair in self._representations_pairs:NEWLINE NEWLINE # retrieve the indicesNEWLINE in_indices = getattr(self, f"in_indices_{io_pair}")NEWLINE out_indices = getattr(self, f"out_indices_{io_pair}")NEWLINE NEWLINE # expand the current subset of basis vectors and set the result in the appropriate place in the filterNEWLINE expanded = self._expand_block(weights, io_pair)NEWLINE NEWLINE if self._contiguous[io_pair]:NEWLINE filter[NEWLINE out_indices[0]:out_indices[1],NEWLINE in_indices[0]:in_indices[1],NEWLINE :,NEWLINE ] = expanded.reshape(out_indices[2], in_indices[2], self.S)NEWLINE else:NEWLINE filter[NEWLINE out_indices,NEWLINE in_indices,NEWLINE :,NEWLINE ] = expanded.reshape(-1, self.S)NEWLINENEWLINE # return the new filterNEWLINE return filterNEWLINENEWLINENEWLINEdef _retrieve_indices(type: FieldType):NEWLINE fiber_position = 0NEWLINE _indices = defaultdict(list)NEWLINE _count = defaultdict(int)NEWLINE _contiguous = {}NEWLINE NEWLINE for repr in type.representations:NEWLINE _indices[repr.name] += list(range(fiber_position, fiber_position + repr.size))NEWLINE fiber_position += repr.sizeNEWLINE _count[repr.name] += 1NEWLINE NEWLINE for name, indices in _indices.items():NEWLINE # _contiguous[o_name] = indices == list(range(indices[0], indices[0]+len(indices)))NEWLINE _contiguous[name] = utils.check_consecutive_numbers(indices)NEWLINE _indices[name] = torch.LongTensor(indices)NEWLINE NEWLINE return _count, _indices, _contiguousNEWLINENEWLINENEWLINEdef _compute_attrs_and_ids(in_type, out_type, block_submodules):NEWLINE NEWLINE basis_ids = defaultdict(lambda: [])NEWLINE NEWLINE # iterate over all blocksNEWLINE # each block is associated to an input/output representations pairNEWLINE out_fiber_position = 0NEWLINE out_irreps_count = 0NEWLINE for o, o_repr in enumerate(out_type.representations):NEWLINE in_fiber_position = 0NEWLINE in_irreps_count = 0NEWLINE for i, i_repr in enumerate(in_type.representations):NEWLINE NEWLINE reprs_names = (i_repr.name, o_repr.name)NEWLINE NEWLINE # if a basis for the space of kernels between the current pair of representations existsNEWLINE if reprs_names in block_submodules:NEWLINE NEWLINE # retrieve the attributes of each basis element and build a new list ofNEWLINE # attributes adding information specific to the current blockNEWLINE ids = []NEWLINE for attr in block_submodules[reprs_names].get_basis_info():NEWLINE # build the ids of the basis vectorsNEWLINE # add names and indices of the input and output fieldsNEWLINE id = '({}-{},{}-{})'.format(i_repr.name, i, o_repr.name, o)NEWLINE # add the original id in the block submoduleNEWLINE id += "_" + attr["id"]NEWLINE NEWLINE ids.append(id)NEWLINENEWLINE # append the ids of the basis vectorsNEWLINE basis_ids[reprs_names] += idsNEWLINE NEWLINE in_fiber_position += i_repr.sizeNEWLINE in_irreps_count += len(i_repr.irreps)NEWLINE out_fiber_position += o_repr.sizeNEWLINE out_irreps_count += len(o_repr.irreps)NEWLINE NEWLINE # return attributes, basis_idsNEWLINE return basis_idsNEWLINE |
#!/usr/bin/pythonNEWLINE# -*- coding: utf8 -*-NEWLINENEWLINE# This code is based on: T.Davidson, F.Kloosterman, M.Wilson "Hippocampal replay of extended experience",NEWLINE# in Neuron, vol. 63, pp. 497-507, 2009NEWLINE# difference: \tau_i(x) (rate parameters) are known (from poisson_proc.py and generate_spike_train.py)NEWLINENEWLINEimport numpy as npNEWLINEfrom scipy.misc import factorialNEWLINEimport matplotlib.pyplot as pltNEWLINEimport osNEWLINENEWLINEfInSpikes = 'spikes.npz'NEWLINEfInPF = 'PFstarts.npz'NEWLINEfOut = 'route_0.005.npz'NEWLINENEWLINEtempRes = 0.005 # [s]NEWLINEspaRes = 2*np.pi / 360.0 # [rad] ( == 1 degree)NEWLINEN = 4000NEWLINENEWLINESWBasePath = '/home/bandi/workspace/KOKI/SharpWaves' # os.path.split(os.path.split(__file__)[0])[0]NEWLINENEWLINEspatialPoints = np.linspace(0, 2*np.pi, int(2*np.pi / spaRes))NEWLINEsamplingTimes = np.linspace(0, 10, int(10.0 / tempRes)+1)NEWLINENEWLINE# (constants from poisson_proc.py:)NEWLINElRoute = 300 # circumference [cm]NEWLINElPlaceField = 30 # [cm]NEWLINEr = lRoute / (2 * np.pi) # radius [cm]NEWLINEphiPFRad = lPlaceField / r # (angle of) place field [rad]NEWLINEavgRateInField = 20.0 # avg. in-field firing rate [Hz]NEWLINENEWLINENEWLINE# list of overlapping place fieldsNEWLINEfName = os.path.join(SWBasePath, 'files', fInPF)NEWLINEnpzFile = np.load(fName)NEWLINEpfStarts = npzFile['pfStarts'].tolist()NEWLINENEWLINEoverlappingPFs = []NEWLINEfor pfStart in pfStarts:NEWLINE overlap = []NEWLINE pfEnd = np.mod(pfStart + phiPFRad, 2*np.pi)NEWLINE if pfStart < (2*np.pi - phiPFRad):NEWLINE overlap = [i for i, val in enumerate(pfStarts) if pfStart <= val and val < pfEnd]NEWLINE else:NEWLINE overlap = [i for i, val in enumerate(pfStarts) if pfStart <= val or val < pfEnd]NEWLINENEWLINE overlappingPFs.append(overlap)NEWLINENEWLINENEWLINE# calculate firing rates (\tau_i(x)) !!! calculate not estimateNEWLINErates = []NEWLINEfor i in range(0, N):NEWLINE tau = np.zeros((1, int(2*np.pi / spaRes)))NEWLINENEWLINE pfEnd = np.mod(pfStarts[i] + phiPFRad, 2*np.pi)NEWLINE mPF = pfStarts[i] + phiPFRad / 2NEWLINENEWLINE for ind, phi in enumerate(spatialPoints):NEWLINE if pfStarts[i] < pfEnd:NEWLINE if pfStarts[i] <= phi and phi < pfEnd:NEWLINE tau[0][ind] = np.cos((2*np.pi) / (2 * phiPFRad) * (phi - mPF)) * avgRateInFieldNEWLINE else:NEWLINE if pfStarts[i] <= phi or phi < pfEnd:NEWLINE tau[0][ind] = np.cos((2*np.pi) / (2 * phiPFRad) * (phi - mPF)) * avgRateInFieldNEWLINENEWLINE rates.append(tau)NEWLINENEWLINEprint 'rates calculated'NEWLINENEWLINENEWLINE# read spike timesNEWLINEfName = os.path.join(SWBasePath, 'files', fInSpikes)NEWLINEnpzFile = np.load(fName)NEWLINEspikes = npzFile['spikes'] # only for the populational firing rateNEWLINEspiketimes = npzFile['spiketimes']NEWLINENEWLINE# taking cells into account, whose have overlapping place fields with a cell, that fired in the binNEWLINEcellROI = []NEWLINEbinSpikes = []NEWLINENEWLINEfor t1, t2 in zip(samplingTimes[:-1], samplingTimes[1:]):NEWLINE count = 0NEWLINE tmp = [] # will be a list of list (cells that have to be taken into account)NEWLINE for i in range(0, N):NEWLINE n_i = ((t1 < spiketimes[i]) & (spiketimes[i] < t2)).sum() # #{spikes of the i-th cell in the bin}NEWLINE if n_i != 0:NEWLINE tmp.append(overlappingPFs[i])NEWLINE count += n_iNEWLINE tmp2 = list(set(sorted([item for sublist in tmp for item in sublist])))NEWLINE cellROI.append(tmp2)NEWLINE binSpikes.append(count)NEWLINENEWLINEprint 'average spikes/bin:', np.mean(binSpikes)NEWLINENEWLINE# calc. mean firing rates (to decide if there is a replay or not)NEWLINEpopre = {}NEWLINENEWLINEfor i in spikes:NEWLINE if np.floor(i[1] * 1000) not in popre:NEWLINE popre[np.floor(i[1] * 1000)] = 1NEWLINE elif np.floor(i[1] * 1000) in popre:NEWLINE popre[np.floor(i[1] * 1000)] += 1NEWLINENEWLINE# rate correctionNEWLINEfor i in range(0, 10000):NEWLINE if i not in popre:NEWLINE popre[i] = 0NEWLINENEWLINEexcRate = popre.values()NEWLINEmeanExcRate = np.mean(excRate)NEWLINENEWLINE# --------------------------------------------------------------------------------------------------------------------------NEWLINE# log(likelihood): log(Pr(spikes|x)) = \sum_{i=1}^N n_ilog(\frac{\Delta t \tau_i(x)}{n_i!}) - \Delta t \sum_{i=1}^N \tau_i(x)NEWLINE# --------------------------------------------------------------------------------------------------------------------------NEWLINENEWLINEdelta_t = tempRes # in sNEWLINEroute = []NEWLINEML = []NEWLINENEWLINEbin = 0NEWLINEfor t1, t2 in zip(samplingTimes[:-1], samplingTimes[1:]):NEWLINE likelihoods = []NEWLINE binAvgRate = np.mean(excRate[int(t1*1000):int(t2*1000)])NEWLINE if binAvgRate >= meanExcRate / 2: # if there is replayNEWLINE for indPhi in range(0, len(spatialPoints)):NEWLINE likelihood1 = 0NEWLINE likelihood2 = 0NEWLINENEWLINE for i in cellROI[bin]: # instead of "for i in range(0, N):"NEWLINE tmp = 0NEWLINENEWLINE n_i = ((t1 < spiketimes[i]) & (spiketimes[i] < t2)).sum() # #{spikes of the i-th cell in the bin}NEWLINE tau_i_phi = rates[i][0, indPhi] # firing rate of the i-th cell in a given position (on the circle)NEWLINE if tau_i_phi != 0 and n_i != 0: # because log() can't take 0NEWLINE tmp = n_i * np.log(delta_t * tau_i_phi / factorial(n_i).item())NEWLINE # .item() is needed because factorial gives 0-d arrayNEWLINENEWLINE likelihood1 += tmpNEWLINE likelihood2 += tau_i_phiNEWLINE likelihood = likelihood1 - delta_t * likelihood2NEWLINENEWLINE likelihoods.append(likelihood)NEWLINE likelihoods = [np.nan if x == 0 else x for x in likelihoods] # change 0s to np.nanNEWLINE if np.isnan(likelihoods).all(): # just to make sureNEWLINE likelihoods[0] = 0NEWLINENEWLINE # search for the maximum of the likelihoods in a given sampling timeNEWLINE id = np.nanargmax(likelihoods)NEWLINE maxLikelihood = likelihoods[id]NEWLINE place = spatialPoints[id]NEWLINE route.append(place)NEWLINE ML.append(maxLikelihood)NEWLINE print 'sampling time:', str(t2 * 1000), '[ms]:', str(place), '[rad] ML:', maxLikelihoodNEWLINE bin += 1NEWLINE else: # if there is no replayNEWLINE route.append(np.nan)NEWLINE ML.append(np.nan)NEWLINE print 'sampling time:', str(t2 * 1000), '[ms]: not replay'NEWLINE bin += 1NEWLINENEWLINENEWLINEfName = os.path.join(SWBasePath, 'files', fOut)NEWLINEnp.savez(fName, route=route, ML=ML)NEWLINE |
from discord.errors import ForbiddenNEWLINENEWLINENEWLINEclass RequestLocalStartTime:NEWLINE def __init__(NEWLINE self,NEWLINE scoped_session,NEWLINE format_date_time,NEWLINE time_zone_input,NEWLINE time_zone_embed,NEWLINE start_time_embed,NEWLINE ):NEWLINE self.scoped_session = scoped_sessionNEWLINE self.format_date_time = format_date_timeNEWLINE self.time_zone_input = time_zone_inputNEWLINE self.time_zone_embed = time_zone_embedNEWLINE self.start_time_embed = start_time_embedNEWLINENEWLINE async def call(self, apollo_user, discord_user, event):NEWLINE if not apollo_user.time_zone:NEWLINE try:NEWLINE await discord_user.send(embed=self.time_zone_embed.call())NEWLINE except Forbidden:NEWLINE passNEWLINENEWLINE apollo_user.time_zone = await self.time_zone_input.call(NEWLINE discord_user, discord_user.dm_channelNEWLINE )NEWLINE with self.scoped_session.call() as session:NEWLINE session.add(apollo_user)NEWLINENEWLINE local_start_time = event.utc_start_time.to(apollo_user.time_zone)NEWLINE formatted_local_start_time = self.format_date_time.call(local_start_time)NEWLINENEWLINE embed = self.start_time_embed.call(event.title, formatted_local_start_time)NEWLINE await discord_user.send(embed=embed)NEWLINE |
'''NEWLINECreated on May 1, 2015NEWLINENEWLINE@author: zwickerNEWLINE'''NEWLINENEWLINEfrom __future__ import division, absolute_importNEWLINENEWLINEimport numpy as npNEWLINEfrom six.moves import rangeNEWLINENEWLINEfrom .lib_exp_base import LibraryExponentialBaseNEWLINEfrom ..library_numeric_base import LibraryNumericMixin, get_sensitivity_matrixNEWLINENEWLINENEWLINENEWLINEclass LibraryExponentialNumeric(LibraryNumericMixin, LibraryExponentialBase):NEWLINE """ represents a single receptor library that handles continuous mixtures,NEWLINE which are defined by their concentration mean and variance """NEWLINENEWLINE # default parameters that are used to initialize a class if not overwrittenNEWLINE parameters_default = {NEWLINE 'max_num_receptors': 28, #< prevents memory overflowsNEWLINE 'sensitivity_matrix': None, #< will be calculated if not givenNEWLINE 'sensitivity_matrix_params': None, #< parameters determining I_aiNEWLINE 'monte_carlo_steps': 'auto', #< default steps for monte carloNEWLINE 'monte_carlo_steps_min': 1e4, #< minimal steps for monte carloNEWLINE 'monte_carlo_steps_max': 1e5, #< maximal steps for monte carloNEWLINE }NEWLINE NEWLINE NEWLINE @classmethodNEWLINE def create_test_instance(cls, **kwargs):NEWLINE """ creates a test instance used for consistency tests """NEWLINE obj = super(LibraryExponentialNumeric, cls).create_test_instance(**kwargs)NEWLINENEWLINE # determine optimal parameters for the interaction matrixNEWLINE from .lib_exp_theory import LibraryExponentialLogNormalNEWLINE theory = LibraryExponentialLogNormal.from_other(obj)NEWLINE obj.choose_sensitivity_matrix(**theory.get_optimal_library())NEWLINE return objNEWLINE NEWLINENEWLINE @propertyNEWLINE def _sample_steps(self):NEWLINE """ returns the number of steps that are sampled """NEWLINE if self.parameters['monte_carlo_steps'] == 'auto':NEWLINE steps_min = self.parameters['monte_carlo_steps_min']NEWLINE steps_max = self.parameters['monte_carlo_steps_max']NEWLINE steps = np.clip(10 * 2**self.Nr, steps_min, steps_max) NEWLINE # Here, the factor 10 is an arbitrary scaling factorNEWLINE else:NEWLINE steps = self.parameters['monte_carlo_steps']NEWLINE NEWLINE return int(steps)NEWLINE NEWLINE NEWLINE def _sample_mixtures(self, steps=None):NEWLINE """ sample mixtures with uniform probability yielding single mixtures """NEWLINE NEWLINE if steps is None:NEWLINE steps = self._sample_stepsNEWLINE NEWLINE c_means = self.concentration_meansNEWLINE NEWLINE for _ in range(steps):NEWLINE # choose a mixture vector according to substrate probabilitiesNEWLINE yield np.random.exponential(size=self.Ns) * c_meansNEWLINENEWLINENEWLINE def choose_sensitivity_matrix(self, distribution, mean_sensitivity=1,NEWLINE **kwargs):NEWLINE """ chooses the sensitivity matrix """NEWLINE self.sens_mat, sens_mat_params = get_sensitivity_matrix(NEWLINE self.Nr, self.Ns, distribution, mean_sensitivity, **kwargs)NEWLINENEWLINE # save the parameters determining this matrixNEWLINE self.parameters['sensitivity_matrix_params'] = sens_mat_paramsNEWLINENEWLINE choose_sensitivity_matrix.__doc__ = get_sensitivity_matrix.__doc__ NEWLINENEWLINENEWLINE def receptor_activity(self, ret_correlations=False):NEWLINE """ calculates the average activity of each receptor """ NEWLINE return self.receptor_activity_monte_carlo(ret_correlations)NEWLINENEWLINE NEWLINE def mutual_information(self, ret_prob_activity=False):NEWLINE """ calculate the mutual information using a monte carlo strategy. TheNEWLINE number of steps is given by the model parameter 'monte_carlo_steps' """NEWLINE return self.mutual_information_monte_carlo(ret_prob_activity)NEWLINE NEWLINE |
#crie uma matrix 3x3 inputa numa lista e msotra no final 9 valoresNEWLINElista = list()NEWLINEdado = list()NEWLINEfor m in range(1, 10):NEWLINE dado.append(int(input(f'Digite o {m}o Valor: ')))NEWLINE lista.append(dado[:])NEWLINE dado.clear()NEWLINEprint(f'{lista[0]}{lista[1]}{lista[2]}')NEWLINEprint(f'{lista[3]}{lista[4]}{lista[5]}')NEWLINEprint(f'{lista[6]}{lista[7]}{lista[8]}')NEWLINE |
# pylint: disable=redefined-builtin, wildcard-importNEWLINE"""x86 specific declaration and schedules."""NEWLINEfrom __future__ import absolute_import as _absNEWLINENEWLINEfrom .conv2d import schedule_conv2d, schedule_conv2d_nhwcNEWLINEfrom .binarize_pack import schedule_binarize_packNEWLINEfrom .binary_dense import schedule_binary_denseNEWLINEfrom .nn import *NEWLINEfrom .injective import *NEWLINEfrom .pooling import schedule_pool, schedule_global_poolNEWLINE |
"""NEWLINEDjango settings for mysite project.NEWLINENEWLINEGenerated by 'django-admin startproject' using Django 3.2.4.NEWLINENEWLINEFor more information on this file, seeNEWLINEhttps://docs.djangoproject.com/en/3.2/topics/settings/NEWLINENEWLINEFor the full list of settings and their values, seeNEWLINEhttps://docs.djangoproject.com/en/3.2/ref/settings/NEWLINE"""NEWLINENEWLINEfrom pathlib import PathNEWLINENEWLINE# Build paths inside the project like this: BASE_DIR / 'subdir'.NEWLINEBASE_DIR = Path(__file__).resolve().parent.parentNEWLINENEWLINENEWLINE# Quick-start development settings - unsuitable for productionNEWLINE# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/NEWLINENEWLINE# SECURITY WARNING: keep the secret key used in production secret!NEWLINESECRET_KEY = 'django-insecure-!cd0v*xol@f6k=q5k1i=**i(15bpm3dka8ipdd45mk+p7it!th'NEWLINENEWLINE# SECURITY WARNING: don't run with debug turned on in production!NEWLINEDEBUG = TrueNEWLINENEWLINEALLOWED_HOSTS = ["8080-chungusii-ecybermission-f14dndyoxde.ws-us33.gitpod.io", '0.0.0.0']NEWLINENEWLINENEWLINE# Application definitionNEWLINENEWLINEINSTALLED_APPS = [NEWLINE 'selfie',NEWLINE 'corsheaders',NEWLINE 'django.contrib.admin',NEWLINE 'django.contrib.auth',NEWLINE 'django.contrib.contenttypes',NEWLINE 'django.contrib.sessions',NEWLINE 'django.contrib.messages',NEWLINE 'django.contrib.staticfiles',NEWLINE]NEWLINENEWLINEMIDDLEWARE = [NEWLINE 'corsheaders.middleware.CorsMiddleware',NEWLINE 'django.middleware.common.BrokenLinkEmailsMiddleware',NEWLINE 'django.middleware.security.SecurityMiddleware',NEWLINE 'django.contrib.sessions.middleware.SessionMiddleware',NEWLINE 'django.middleware.common.CommonMiddleware',NEWLINE 'django.middleware.csrf.CsrfViewMiddleware',NEWLINE 'django.contrib.auth.middleware.AuthenticationMiddleware',NEWLINE 'django.contrib.messages.middleware.MessageMiddleware',NEWLINE 'django.middleware.clickjacking.XFrameOptionsMiddleware',NEWLINE]NEWLINENEWLINEROOT_URLCONF = 'mysite.urls'NEWLINENEWLINETEMPLATES = [NEWLINE {NEWLINE 'BACKEND': 'django.template.backends.django.DjangoTemplates',NEWLINE 'DIRS': [],NEWLINE 'APP_DIRS': True,NEWLINE 'OPTIONS': {NEWLINE 'context_processors': [NEWLINE 'django.template.context_processors.debug',NEWLINE 'django.template.context_processors.request',NEWLINE 'django.contrib.auth.context_processors.auth',NEWLINE 'django.contrib.messages.context_processors.messages',NEWLINE ],NEWLINE },NEWLINE },NEWLINE]NEWLINENEWLINEWSGI_APPLICATION = 'mysite.wsgi.application'NEWLINENEWLINENEWLINE# DatabaseNEWLINE# https://docs.djangoproject.com/en/3.2/ref/settings/#databasesNEWLINENEWLINEDATABASES = {NEWLINE 'default': {NEWLINE 'ENGINE': 'django.db.backends.sqlite3',NEWLINE 'NAME': BASE_DIR / 'db.sqlite3',NEWLINE }NEWLINE}NEWLINENEWLINENEWLINE# Password validationNEWLINE# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validatorsNEWLINENEWLINEAUTH_PASSWORD_VALIDATORS = [NEWLINE {NEWLINE 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',NEWLINE },NEWLINE {NEWLINE 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',NEWLINE },NEWLINE {NEWLINE 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',NEWLINE },NEWLINE {NEWLINE 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',NEWLINE },NEWLINE]NEWLINENEWLINECORS_ALLOW_ALL_ORIGINS = TrueNEWLINECORS_ALLOW_CREDENTIALS = TrueNEWLINENEWLINECSRF_TRUSTED_ORIGINS = ['https://platform.appgyver.com']NEWLINENEWLINEDATA_UPLOAD_MAX_MEMORY_SIZE = 10485760NEWLINENEWLINE# InternationalizationNEWLINE# https://docs.djangoproject.com/en/3.2/topics/i18n/NEWLINENEWLINELANGUAGE_CODE = 'en-us'NEWLINENEWLINETIME_ZONE = 'UTC'NEWLINENEWLINEUSE_I18N = TrueNEWLINENEWLINEUSE_L10N = TrueNEWLINENEWLINEUSE_TZ = TrueNEWLINENEWLINENEWLINE# Static files (CSS, JavaScript, Images)NEWLINE# https://docs.djangoproject.com/en/3.2/howto/static-files/NEWLINENEWLINESTATIC_URL = '/static/'NEWLINENEWLINE# Default primary key field typeNEWLINE# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-fieldNEWLINENEWLINEDEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'NEWLINE |
#!/usr/bin/env python3NEWLINENEWLINEfrom auto.build import Builder, BuildOpts, LLVMBuilderNEWLINEfrom auto.config import DIR, RV32_LINUX, SBTNEWLINEfrom auto.utils import cat, chsuf, mkdir_if_needed, path, shellNEWLINENEWLINEimport argparseNEWLINENEWLINEclass Translator:NEWLINE def __init__(self, opts):NEWLINE self.opts = optsNEWLINENEWLINENEWLINE def _translate_obj(self, dir, obj, out):NEWLINE """ .o -> .bc """NEWLINENEWLINE opts = self.optsNEWLINE arch = opts.archNEWLINE ipath = path(dir, obj)NEWLINE opath = path(dir, out)NEWLINE flags = cat(SBT.flags, opts.sbtflags)NEWLINENEWLINE if opts.xdbg:NEWLINE # strip arch prefixNEWLINE prefix = arch.add_prefix("")NEWLINE prefix = RV32_LINUX.add_prefix(prefix)NEWLINE base = out[len(prefix):]NEWLINE # strip suffixNEWLINE base = chsuf(base, "")NEWLINE # strip modeNEWLINE p = base.rfind("-")NEWLINE if p != -1:NEWLINE base = base[:p]NEWLINE # add rv32 prefix and .a2s suffixNEWLINE a2s = RV32_LINUX.add_prefix(base) + ".a2s"NEWLINE flags = cat(flags, "-commented-asm", "-a2s", path(dir, a2s))NEWLINENEWLINE logdir = DIR.top + "/junk"NEWLINE mkdir_if_needed(logdir)NEWLINE log = path(logdir, chsuf(out, ".log"))NEWLINENEWLINE cmd = "riscv-sbt {} {} -o {} -log {}".format(NEWLINE flags, ipath, opath, log)NEWLINE shell(cmd)NEWLINENEWLINENEWLINE def translate(self):NEWLINE """ .o -> bin """NEWLINENEWLINE opts = self.optsNEWLINE obj = opts.firstNEWLINE dstdir = opts.dstdirNEWLINE out = opts.outNEWLINENEWLINE bc = out + ".bc"NEWLINE s = out + ".s"NEWLINENEWLINE # translate obj to .bcNEWLINE self._translate_obj(dstdir, obj, bc)NEWLINE # gen .llNEWLINE opts.opt = opts.xoptNEWLINE llbld = LLVMBuilder(opts)NEWLINE llbld.dis(dstdir, bc)NEWLINENEWLINE # gen .sNEWLINE if not opts.xopt:NEWLINE llbld.bc2s(dstdir, bc, s)NEWLINE else:NEWLINE opt1 = out + ".opt.bc"NEWLINE llbld.opt(dstdir, bc, opt1, printf_break=False)NEWLINE llbld.dis(dstdir, opt1)NEWLINE llbld.bc2s(dstdir, opt1, s)NEWLINENEWLINE # build .sNEWLINE opts.asm = TrueNEWLINE opts.srcdir = dstdirNEWLINE opts.dstdir = dstdirNEWLINE opts.ins = [s]NEWLINE opts.out = outNEWLINE bld = Builder(opts)NEWLINE bld.build()NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE parser = argparse.ArgumentParser(description="translate obj")NEWLINE BuildOpts.add_to_parser(parser)NEWLINE parser.add_argument("--xopt", action="store_true",NEWLINE help="optimize translated code")NEWLINE parser.add_argument("--xdbg", action="store_true",NEWLINE help="insert debug info on translated code")NEWLINE args = parser.parse_args()NEWLINENEWLINE # set xlator optsNEWLINE opts = BuildOpts.parse(args)NEWLINE opts.xopt = args.xoptNEWLINE opts.xdbg = args.xdbgNEWLINE xltr = Translator(opts)NEWLINE # translateNEWLINE xltr.translate()NEWLINE |
from unittest import TestCaseNEWLINEimport numpy as npNEWLINEimport osNEWLINEimport pickleNEWLINEimport loggingNEWLINENEWLINEfrom qcodes.data.data_array import DataArrayNEWLINEfrom qcodes.data.io import DiskIONEWLINEfrom qcodes.data.data_set import load_data, new_data, DataSetNEWLINEfrom qcodes.utils.helpers import LogCaptureNEWLINENEWLINEfrom .data_mocks import (MockFormatter, MatchIO,NEWLINE DataSet2D, DataSet1D,NEWLINE DataSetCombined, RecordingMockFormatter)NEWLINENEWLINEfrom .common import strip_qcNEWLINENEWLINENEWLINEclass TestDataArray(TestCase):NEWLINENEWLINE def test_attributes(self):NEWLINE pname = 'Betty Sue'NEWLINE plabel = 'The best apple pie this side of Wenatchee'NEWLINE pfullname = 'bert'NEWLINENEWLINE class MockParam:NEWLINE name = pnameNEWLINE label = plabelNEWLINENEWLINE def __init__(self, full_name=None):NEWLINE self.full_name = full_nameNEWLINENEWLINE name = 'Oscar'NEWLINE label = 'The grouch. GRR!'NEWLINE fullname = 'ernie'NEWLINE array_id = 24601NEWLINE set_arrays = ('awesomeness', 'chocolate content')NEWLINE shape = 'Ginornous'NEWLINE action_indices = (1, 2, 3, 4, 5)NEWLINENEWLINE p_data = DataArray(parameter=MockParam(pfullname), name=name,NEWLINE label=label, full_name=fullname)NEWLINE p_data2 = DataArray(parameter=MockParam(pfullname))NEWLINENEWLINE # explicitly given name and label override parameter valsNEWLINE self.assertEqual(p_data.name, name)NEWLINE self.assertEqual(p_data.label, label)NEWLINE self.assertEqual(p_data.full_name, fullname)NEWLINE self.assertEqual(p_data2.name, pname)NEWLINE self.assertEqual(p_data2.label, plabel)NEWLINE self.assertEqual(p_data2.full_name, pfullname)NEWLINE # test default valuesNEWLINE self.assertIsNone(p_data.array_id)NEWLINE self.assertEqual(p_data.shape, ())NEWLINE self.assertEqual(p_data.action_indices, ())NEWLINE self.assertEqual(p_data.set_arrays, ())NEWLINE self.assertIsNone(p_data.ndarray)NEWLINENEWLINE np_data = DataArray(name=name, label=label, array_id=array_id,NEWLINE set_arrays=set_arrays, shape=shape,NEWLINE action_indices=action_indices)NEWLINE self.assertEqual(np_data.name, name)NEWLINE self.assertEqual(np_data.label, label)NEWLINE # no full name or parameter - use nameNEWLINE self.assertEqual(np_data.full_name, name)NEWLINE # test simple assignmentsNEWLINE self.assertEqual(np_data.array_id, array_id)NEWLINE self.assertEqual(np_data.set_arrays, set_arrays)NEWLINE self.assertEqual(np_data.shape, shape)NEWLINE self.assertEqual(np_data.action_indices, action_indices)NEWLINENEWLINE name_data = DataArray(name=name)NEWLINE self.assertEqual(name_data.label, name)NEWLINENEWLINE blank_data = DataArray()NEWLINE self.assertIsNone(blank_data.name)NEWLINENEWLINE def test_preset_data(self):NEWLINE onetwothree = [NEWLINE # lists and tuples workNEWLINE [1.0, 2.0, 3.0],NEWLINE (1.0, 2.0, 3.0),NEWLINENEWLINE # iterators get automatically cast to floatsNEWLINE (i + 1 for i in range(3)),NEWLINE map(float, range(1, 4)),NEWLINENEWLINE # and of course numpy arrays themselves workNEWLINE np.array([1.0, 2.0, 3.0]),NEWLINE ]NEWLINENEWLINE expected123 = [1.0, 2.0, 3.0]NEWLINENEWLINE for item in onetwothree:NEWLINE data = DataArray(preset_data=item)NEWLINE self.assertEqual(data.ndarray.tolist(), expected123)NEWLINE self.assertEqual(data.shape, (3, ))NEWLINENEWLINE # you can re-initialize a DataArray with the same shape data,NEWLINE # but not with a different shapeNEWLINE list456 = [4, 5, 6]NEWLINE data.init_data(data=list456)NEWLINE self.assertEqual(data.ndarray.tolist(), list456)NEWLINE with self.assertRaises(ValueError):NEWLINE data.init_data([1, 2])NEWLINE self.assertEqual(data.ndarray.tolist(), list456)NEWLINE self.assertEqual(data.shape, (3, ))NEWLINENEWLINE # you can call init_data again with no data, and nothing changesNEWLINE data.init_data()NEWLINE self.assertEqual(data.ndarray.tolist(), list456)NEWLINE self.assertEqual(data.shape, (3, ))NEWLINENEWLINE # multidimensional works tooNEWLINE list2d = [[1, 2], [3, 4]]NEWLINE data2 = DataArray(preset_data=list2d)NEWLINE self.assertEqual(data2.ndarray.tolist(), list2d)NEWLINE self.assertEqual(data2.shape, (2, 2))NEWLINENEWLINE def test_init_data_error(self):NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE data.shape = (3, )NEWLINENEWLINE # not sure when this would happen... but if you call init_dataNEWLINE # and it notices an inconsistency between shape and the actualNEWLINE # data that's already there, it raises an errorNEWLINE with self.assertRaises(ValueError):NEWLINE data.init_data()NEWLINENEWLINE def test_clear(self):NEWLINE nan = float('nan')NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE data.clear()NEWLINE # sometimes it's annoying that nan != nanNEWLINE self.assertEqual(repr(data.ndarray.tolist()), repr([nan, nan]))NEWLINENEWLINE def test_edit_and_mark(self):NEWLINE data = DataArray(preset_data=[[1, 2], [3, 4]])NEWLINE self.assertEqual(data[0].tolist(), [1, 2])NEWLINE self.assertEqual(data[0, 1], 2)NEWLINENEWLINE data.modified_range = NoneNEWLINE self.assertIsNone(data.last_saved_index)NEWLINENEWLINE self.assertEqual(len(data), 2)NEWLINE data[0] = np.array([5, 6])NEWLINE data[1, 0] = 7NEWLINE self.assertEqual(data.ndarray.tolist(), [[5, 6], [7, 4]])NEWLINENEWLINE self.assertEqual(data.modified_range, (0, 2))NEWLINENEWLINE # as if we saved the first two points... the third should stillNEWLINE # show as modifiedNEWLINE data.mark_saved(1)NEWLINE self.assertEqual(data.last_saved_index, 1)NEWLINE self.assertEqual(data.modified_range, (2, 2))NEWLINENEWLINE # now we save the third point... no modifications left.NEWLINE data.mark_saved(2)NEWLINE self.assertEqual(data.last_saved_index, 2)NEWLINE self.assertEqual(data.modified_range, None)NEWLINENEWLINE data.clear_save()NEWLINE self.assertEqual(data.last_saved_index, None)NEWLINE self.assertEqual(data.modified_range, (0, 2))NEWLINENEWLINE def test_edit_and_mark_slice(self):NEWLINE data = DataArray(preset_data=[[1] * 5] * 6)NEWLINENEWLINE self.assertEqual(data.shape, (6, 5))NEWLINE data.modified_range = NoneNEWLINENEWLINE data[:4:2, 2:] = 2NEWLINE self.assertEqual(data.tolist(), [NEWLINE [1, 1, 2, 2, 2],NEWLINE [1, 1, 1, 1, 1],NEWLINE [1, 1, 2, 2, 2],NEWLINE [1, 1, 1, 1, 1],NEWLINE [1, 1, 1, 1, 1],NEWLINE [1, 1, 1, 1, 1]NEWLINE ])NEWLINE self.assertEqual(data.modified_range, (2, 14))NEWLINENEWLINE def test_repr(self):NEWLINE array2d = [[1, 2], [3, 4]]NEWLINE arrayrepr = repr(np.array(array2d))NEWLINE array_id = (3, 4)NEWLINE data = DataArray(preset_data=array2d)NEWLINENEWLINE self.assertEqual(repr(data), 'DataArray[2,2]:\n' + arrayrepr)NEWLINENEWLINE data.array_id = array_idNEWLINE self.assertEqual(repr(data), 'DataArray[2,2]: ' + str(array_id) +NEWLINE '\n' + arrayrepr)NEWLINENEWLINE def test_nest_empty(self):NEWLINE data = DataArray()NEWLINENEWLINE self.assertEqual(data.shape, ())NEWLINENEWLINE mock_set_array = 'not really an array but we don\'t check'NEWLINE mock_set_array2 = 'another one'NEWLINENEWLINE data.nest(2, action_index=44, set_array=mock_set_array)NEWLINE data.nest(3, action_index=66, set_array=mock_set_array2)NEWLINENEWLINE # the array doesn't exist until you initialize itNEWLINE self.assertIsNone(data.ndarray)NEWLINENEWLINE # but other attributes are setNEWLINE self.assertEqual(data.shape, (3, 2))NEWLINE self.assertEqual(data.action_indices, (66, 44))NEWLINE self.assertEqual(data.set_arrays, (mock_set_array2, mock_set_array))NEWLINENEWLINE data.init_data()NEWLINE self.assertEqual(data.ndarray.shape, (3, 2))NEWLINENEWLINE # after initializing data, you can't nest anymore because this isn'tNEWLINE # a preset arrayNEWLINE with self.assertRaises(RuntimeError):NEWLINE data.nest(4)NEWLINENEWLINE def test_nest_preset(self):NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE data.nest(3)NEWLINE self.assertEqual(data.shape, (3, 2))NEWLINE self.assertEqual(data.ndarray.tolist(), [[1, 2]] * 3)NEWLINE self.assertEqual(data.action_indices, ())NEWLINE self.assertEqual(data.set_arrays, (data,))NEWLINENEWLINE # test that the modified range gets correctly set toNEWLINE # (0, 2*3-1 = 5)NEWLINE self.assertEqual(data.modified_range, (0, 5))NEWLINENEWLINE # you need a set array for all but the inner nestingNEWLINE with self.assertRaises(TypeError):NEWLINE data.nest(4)NEWLINENEWLINE def test_data_set_property(self):NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE self.assertIsNone(data.data_set)NEWLINENEWLINE mock_data_set = 'pretend this is a DataSet, we don\'t check type'NEWLINE mock_data_set2 = 'you can only assign to another after first clearing'NEWLINE data.data_set = mock_data_setNEWLINE self.assertEqual(data.data_set, mock_data_set)NEWLINENEWLINE with self.assertRaises(RuntimeError):NEWLINE data.data_set = mock_data_set2NEWLINENEWLINE data.data_set = NoneNEWLINE self.assertIsNone(data.data_set)NEWLINE data.data_set = mock_data_set2NEWLINE self.assertEqual(data.data_set, mock_data_set2)NEWLINENEWLINE def test_fraction_complete(self):NEWLINE data = DataArray(shape=(5, 10))NEWLINE self.assertIsNone(data.ndarray)NEWLINE self.assertEqual(data.fraction_complete(), 0.0)NEWLINENEWLINE data.init_data()NEWLINE self.assertEqual(data.fraction_complete(), 0.0)NEWLINENEWLINE # index = 1 * 10 + 7 - add 1 (for index 0) and you get 18NEWLINE # each index is 2% of the total, so this is 36%NEWLINE data[1, 7] = 1NEWLINE self.assertEqual(data.fraction_complete(), 18/50)NEWLINENEWLINE # add a last_saved_index but modified_range is still biggerNEWLINE data.mark_saved(13)NEWLINE self.assertEqual(data.fraction_complete(), 18/50)NEWLINENEWLINE # now last_saved_index winsNEWLINE data.mark_saved(19)NEWLINE self.assertEqual(data.fraction_complete(), 20/50)NEWLINENEWLINE # now pretend we get more info from syncingNEWLINE data.synced_index = 22NEWLINE self.assertEqual(data.fraction_complete(), 23/50)NEWLINENEWLINENEWLINEclass TestLoadData(TestCase):NEWLINENEWLINE def test_no_saved_data(self):NEWLINE with self.assertRaises(IOError):NEWLINE load_data('_no/such/file_')NEWLINENEWLINE def test_load_false(self):NEWLINE with self.assertRaises(ValueError):NEWLINE load_data(False)NEWLINENEWLINE def test_get_read(self):NEWLINE data = load_data(formatter=MockFormatter(), location='here!')NEWLINE self.assertEqual(data.has_read_data, True)NEWLINE self.assertEqual(data.has_read_metadata, True)NEWLINENEWLINENEWLINEclass TestDataSetMetaData(TestCase):NEWLINENEWLINE def test_snapshot(self):NEWLINE data = new_data(location=False)NEWLINE expected_snap = {NEWLINE '__class__': 'qcodes.data.data_set.DataSet',NEWLINE 'location': False,NEWLINE 'arrays': {},NEWLINE 'formatter': 'qcodes.data.gnuplot_format.GNUPlotFormat',NEWLINE }NEWLINE snap = strip_qc(data.snapshot())NEWLINENEWLINE # handle io separately so we don't need to figure out our pathNEWLINE self.assertIn('DiskIO', snap['io'])NEWLINE del snap['io']NEWLINE self.assertEqual(snap, expected_snap)NEWLINENEWLINE # even though we removed io from the snapshot, it's still in .metadataNEWLINE self.assertIn('io', data.metadata)NEWLINENEWLINE # then do the same transformations to metadata to check it tooNEWLINE del data.metadata['io']NEWLINE strip_qc(data.metadata)NEWLINE self.assertEqual(data.metadata, expected_snap)NEWLINENEWLINE # location is False so read_metadata should be a noopNEWLINE data.metadata = {'food': 'Fried chicken'}NEWLINE data.read_metadata()NEWLINE self.assertEqual(data.metadata, {'food': 'Fried chicken'})NEWLINENEWLINE # snapshot should never delete things from metadata, only add or updateNEWLINE data.metadata['location'] = 'Idaho'NEWLINE snap = strip_qc(data.snapshot())NEWLINE expected_snap['food'] = 'Fried chicken'NEWLINE del snap['io']NEWLINE self.assertEqual(snap, expected_snap)NEWLINENEWLINENEWLINEclass TestNewData(TestCase):NEWLINENEWLINE @classmethodNEWLINE def setUpClass(cls):NEWLINE cls.original_lp = DataSet.location_providerNEWLINENEWLINE @classmethodNEWLINE def tearDownClass(cls):NEWLINE DataSet.location_provider = cls.original_lpNEWLINENEWLINE def test_overwrite(self):NEWLINE io = MatchIO([1])NEWLINENEWLINE with self.assertRaises(FileExistsError):NEWLINE new_data(location='somewhere', io=io)NEWLINENEWLINE data = new_data(location='somewhere', io=io, overwrite=True,)NEWLINE self.assertEqual(data.location, 'somewhere')NEWLINENEWLINE def test_location_functions(self):NEWLINE def my_location(io, record):NEWLINE return 'data/{}'.format((record or {}).get('name') or 'LOOP!')NEWLINENEWLINE def my_location2(io, record):NEWLINE name = (record or {}).get('name') or 'loop?'NEWLINE return 'data/{}/folder'.format(name)NEWLINENEWLINE DataSet.location_provider = my_locationNEWLINENEWLINE self.assertEqual(new_data().location, 'data/LOOP!')NEWLINE self.assertEqual(new_data(name='cheese').location, 'data/cheese')NEWLINENEWLINE data = new_data(location=my_location2)NEWLINE self.assertEqual(data.location, 'data/loop?/folder')NEWLINE data = new_data(location=my_location2, name='iceCream')NEWLINE self.assertEqual(data.location, 'data/iceCream/folder')NEWLINENEWLINENEWLINEclass TestDataSet(TestCase):NEWLINENEWLINE def test_constructor_errors(self):NEWLINE # no location - only allowed with load_dataNEWLINE with self.assertRaises(ValueError):NEWLINE DataSet()NEWLINE # wrong typeNEWLINE with self.assertRaises(ValueError):NEWLINE DataSet(location=42)NEWLINENEWLINE def test_write_copy(self):NEWLINE data = DataSet1D(location=False)NEWLINE mockbase = os.path.abspath('some_folder')NEWLINE data.io = DiskIO(mockbase)NEWLINENEWLINE mr = (2, 3)NEWLINE mr_full = (0, 4)NEWLINE lsi = 1NEWLINE data.x_set.modified_range = mrNEWLINE data.y.modified_range = mrNEWLINE data.x_set.last_saved_index = lsiNEWLINE data.y.last_saved_index = lsiNEWLINENEWLINE with self.assertRaises(TypeError):NEWLINE data.write_copy()NEWLINENEWLINE with self.assertRaises(TypeError):NEWLINE data.write_copy(path='some/path', io_manager=DiskIO('.'))NEWLINENEWLINE with self.assertRaises(TypeError):NEWLINE data.write_copy(path='some/path', location='something/else')NEWLINENEWLINE data.formatter = RecordingMockFormatter()NEWLINE data.write_copy(path='/some/abs/path')NEWLINE self.assertEqual(data.formatter.write_calls,NEWLINE [(None, '/some/abs/path')])NEWLINE self.assertEqual(data.formatter.write_metadata_calls,NEWLINE [(None, '/some/abs/path', False)])NEWLINE # check that the formatter gets called as if nothing has been savedNEWLINE self.assertEqual(data.formatter.modified_ranges,NEWLINE [{'x_set': mr_full, 'y': mr_full}])NEWLINE self.assertEqual(data.formatter.last_saved_indices,NEWLINE [{'x_set': None, 'y': None}])NEWLINE # but the dataset afterward has its original mods backNEWLINE self.assertEqual(data.x_set.modified_range, mr)NEWLINE self.assertEqual(data.y.modified_range, mr)NEWLINE self.assertEqual(data.x_set.last_saved_index, lsi)NEWLINE self.assertEqual(data.y.last_saved_index, lsi)NEWLINENEWLINE # recreate the formatter to clear the calls attributesNEWLINE data.formatter = RecordingMockFormatter()NEWLINE data.write_copy(location='some/rel/path')NEWLINE self.assertEqual(data.formatter.write_calls,NEWLINE [(mockbase, 'some/rel/path')])NEWLINE self.assertEqual(data.formatter.write_metadata_calls,NEWLINE [(mockbase, 'some/rel/path', False)])NEWLINENEWLINE mockbase2 = os.path.abspath('some/other/folder')NEWLINE io2 = DiskIO(mockbase2)NEWLINENEWLINE with self.assertRaises(ValueError):NEWLINE # if location=False we need to specify it in write_copyNEWLINE data.write_copy(io_manager=io2)NEWLINENEWLINE data.location = 'yet/another/path'NEWLINE data.formatter = RecordingMockFormatter()NEWLINE data.write_copy(io_manager=io2)NEWLINE self.assertEqual(data.formatter.write_calls,NEWLINE [(mockbase2, 'yet/another/path')])NEWLINE self.assertEqual(data.formatter.write_metadata_calls,NEWLINE [(mockbase2, 'yet/another/path', False)])NEWLINENEWLINE def test_pickle_dataset(self):NEWLINE # Test pickling of DataSet objectNEWLINE # If the data_manager is set to None, then the object should pickle.NEWLINE m = DataSet2D()NEWLINE pickle.dumps(m)NEWLINENEWLINE def test_default_parameter(self):NEWLINE # Test whether the default_array function worksNEWLINE m = DataSet2D()NEWLINENEWLINE # test we can run with default argumentsNEWLINE name = m.default_parameter_name()NEWLINENEWLINE # test with paramnameNEWLINE name = m.default_parameter_name(paramname='z')NEWLINE self.assertEqual(name, 'z')NEWLINE # test we can get the array instead of the nameNEWLINE array = m.default_parameter_array(paramname='z')NEWLINE self.assertEqual(array, m.z)NEWLINENEWLINE # first non-setpoint arrayNEWLINE array = m.default_parameter_array()NEWLINE self.assertEqual(array, m.z)NEWLINENEWLINE # test with metadataNEWLINE m.metadata = dict({'default_parameter_name': 'x_set'})NEWLINE name = m.default_parameter_name()NEWLINE self.assertEqual(name, 'x_set')NEWLINENEWLINE # test the fallback: no name matches, no non-setpoint arrayNEWLINE x = DataArray(name='x', label='X', preset_data=(1., 2., 3., 4., 5.), is_setpoint=True)NEWLINE m= new_data(arrays=(x,), name='onlysetpoint')NEWLINE name=m.default_parameter_name(paramname='dummy')NEWLINE self.assertEqual(name, 'x_set')NEWLINENEWLINE def test_fraction_complete(self):NEWLINE empty_data = new_data(arrays=(), location=False)NEWLINE self.assertEqual(empty_data.fraction_complete(), 0.0)NEWLINENEWLINE data = DataSetCombined(location=False)NEWLINE self.assertEqual(data.fraction_complete(), 1.0)NEWLINENEWLINE # alter only the measured arrays, check that only these are usedNEWLINE # to calculate fraction_completeNEWLINE data.y1.modified_range = (0, 0) # 1 of 2NEWLINE data.y2.modified_range = (0, 0) # 1 of 2NEWLINE data.z1.modified_range = (0, 2) # 3 of 6NEWLINE data.z2.modified_range = (0, 2) # 3 of 6NEWLINE self.assertEqual(data.fraction_complete(), 0.5)NEWLINENEWLINE # mark more things complete using last_saved_index and synced_indexNEWLINE data.y1.last_saved_index = 1 # 2 of 2NEWLINE data.z1.synced_index = 5 # 6 of 6NEWLINE self.assertEqual(data.fraction_complete(), 0.75)NEWLINENEWLINE def mock_sync(self):NEWLINE i = self.sync_indexNEWLINE self.syncing_array[i] = iNEWLINE self.sync_index = i + 1NEWLINE return self.sync_index < self.syncing_array.sizeNEWLINENEWLINE def failing_func(self):NEWLINE raise RuntimeError('it is called failing_func for a reason!')NEWLINENEWLINE def logging_func(self):NEWLINE logging.info('background at index {}'.format(self.sync_index))NEWLINENEWLINE def test_complete(self):NEWLINE array = DataArray(name='y', shape=(5,))NEWLINE array.init_data()NEWLINE data = new_data(arrays=(array,), location=False)NEWLINE self.syncing_array = arrayNEWLINE self.sync_index = 0NEWLINE data.sync = self.mock_syncNEWLINE bf = DataSet.background_functionsNEWLINE bf['fail'] = self.failing_funcNEWLINE bf['log'] = self.logging_funcNEWLINENEWLINE with LogCapture() as logs:NEWLINE # grab info and warnings but not debug messagesNEWLINE logging.getLogger().setLevel(logging.INFO)NEWLINE data.complete(delay=0.001)NEWLINENEWLINE logs = logs.valueNEWLINENEWLINE expected_logs = [NEWLINE 'waiting for DataSet <False> to complete',NEWLINE 'DataSet: 0% complete',NEWLINE 'RuntimeError: it is called failing_func for a reason!',NEWLINE 'background at index 1',NEWLINE 'DataSet: 20% complete',NEWLINE 'RuntimeError: it is called failing_func for a reason!',NEWLINE 'background function fail failed twice in a row, removing it',NEWLINE 'background at index 2',NEWLINE 'DataSet: 40% complete',NEWLINE 'background at index 3',NEWLINE 'DataSet: 60% complete',NEWLINE 'background at index 4',NEWLINE 'DataSet: 80% complete',NEWLINE 'background at index 5',NEWLINE 'DataSet <False> is complete'NEWLINE ]NEWLINENEWLINE log_index = 0NEWLINE for line in expected_logs:NEWLINE self.assertIn(line, logs, logs)NEWLINE try:NEWLINE log_index_new = logs.index(line, log_index)NEWLINE except ValueError:NEWLINE raise ValueError('line {} not found after {} in: \n {}'.format(NEWLINE line, log_index, logs))NEWLINE self.assertTrue(log_index_new >= log_index, logs)NEWLINE log_index = log_index_new + len(line) + 1 # +1 for \nNEWLINE self.assertEqual(log_index, len(logs), logs)NEWLINE |
"""Tests the plotting function developed as part of 2E"""NEWLINENEWLINE# ImportsNEWLINEfrom floodsystem.plot import plot_water_levelsNEWLINEfrom floodsystem.datafetcher import fetch_measure_levelsNEWLINEimport datetimeNEWLINEfrom floodsystem.station import MonitoringStationNEWLINENEWLINEfictional_station = MonitoringStation("station_id", "measure_id",NEWLINE "Line at y=1 and y=9, and a line that goes diagonally from 0 to 10 across 11 days",NEWLINE "coord", [1, 9], "made up river", "New Madeupville")NEWLINENEWLINEdates = []NEWLINEfor i in range(11):NEWLINE date = datetime.date(2022, 1, 11-i)NEWLINE dates.append(date)NEWLINE # remember that the actual dates go backwards!NEWLINENEWLINElevels = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]NEWLINENEWLINEplot_water_levels(fictional_station, dates, levels)NEWLINENEWLINE#print("Check that this forms a Z shape, that all three lines are plotted, and that the graph has a legend and title") |
from decimal import DecimalNEWLINENEWLINEprice = 100 * Decimal(input('Enter a price ($): '))NEWLINEbills = 100 * Decimal(input('Enter amount of money ($): '))NEWLINEchange = bills - priceNEWLINENEWLINENEWLINEquarters = change // 25NEWLINEdimes = (change % 25) // 10NEWLINEpennies = ((change % 25) % 10) // 1NEWLINENEWLINEprint(f'Your change is:\n{quarters} quarters\n{dimes} dimes\n{pennies} pennies')NEWLINENEWLINE |
# -*- coding: utf-8 -*-NEWLINE# BioSTEAM: The Biorefinery Simulation and Techno-Economic Analysis ModulesNEWLINE# Copyright (C) 2020, Yoel Cortes-Pena <[email protected]>NEWLINE# NEWLINE# This module is under the UIUC open-source license. See NEWLINE# github.com/BioSTEAMDevelopmentGroup/biosteam/blob/master/LICENSE.txtNEWLINE# for license details.NEWLINE"""NEWLINE"""NEWLINEimport numpy as npNEWLINEfrom ._batch_bioreactor import BatchBioreactorNEWLINEfrom scipy.integrate import odeintNEWLINEfrom thermosteam.reaction import Reaction, ParallelReactionNEWLINENEWLINE__all__ = ('Fermentation',)NEWLINENEWLINEclass Fermentation(BatchBioreactor):NEWLINE """NEWLINE Create a Fermentation object which models large-scale batch fermentationNEWLINE for the production of 1st generation ethanol using yeastNEWLINE [1]_ [2]_ [3]_ [4]_. A compound with CAS 'Yeast' must be present.NEWLINE Only sucrose and glucose are taken into account for conversion.NEWLINE Conversion is based on reaction time, `tau`. Cleaning and unloading time,NEWLINE `tau_0`, fraction of working volume, `V_wf`, and number of reactors,NEWLINE `N_reactors`, are attributes that can be changed. Cost of a reactorNEWLINE is based on the NREL batch fermentation tank cost assuming volumetricNEWLINE scaling with a 6/10th exponent [5]_. NEWLINE NEWLINE ParametersNEWLINE ----------NEWLINE ins : streamsNEWLINE Inlet fluids to be mixed into the fermentor.NEWLINE outs : stream sequenceNEWLINE * [0] VentNEWLINE * [1] EffluentNEWLINE tau : floatNEWLINE Reaction time.NEWLINE N : int, optionalNEWLINE Number of batch reactorsNEWLINE V : float, optionalNEWLINE Target volume of reactors [m^3].NEWLINE T=305.15 : floatNEWLINE Temperature of reactor [K].NEWLINE P=101325 : floatNEWLINE Operating pressure of reactor [Pa].NEWLINE Nmin=2 : intNEWLINE Minimum number of fermentors.NEWLINE Nmax=36: intNEWLINE Maximum number of fermentors. NEWLINE efficiency=0.9 : float, optionalNEWLINE User enforced efficiency.NEWLINE iskinetic=False: bool, optionalNEWLINE If True, `Fermenation.kinetic_model` will be used.NEWLINE NEWLINE NotesNEWLINE -----NEWLINE Either N or V must be given.NEWLINE NEWLINE ExamplesNEWLINE --------NEWLINE Simulate a Fermentation object which models batch fermentation for theNEWLINE production of 1st generation ethanol using yeast.NEWLINE NEWLINE >>> from biorefineries.lipidcane import chemicalsNEWLINE >>> from biosteam.units import FermentationNEWLINE >>> from biosteam import Stream, settingsNEWLINE >>> settings.set_thermo(chemicals)NEWLINE >>> feed = Stream('feed',NEWLINE ... Water=1.20e+05,NEWLINE ... Glucose=1.89e+03,NEWLINE ... Sucrose=2.14e+04,NEWLINE ... DryYeast=1.03e+04,NEWLINE ... units='kg/hr',NEWLINE ... T=32+273.15)NEWLINE >>> F1 = Fermentation('F1',NEWLINE ... ins=feed, outs=('CO2', 'product'),NEWLINE ... tau=8, efficiency=0.90, N=8)NEWLINE >>> F1.simulate()NEWLINE >>> F1.show()NEWLINE Fermentation: F1NEWLINE ins...NEWLINE [0] feedNEWLINE phase: 'l', T: 305.15 K, P: 101325 PaNEWLINE flow (kmol/hr): Water 6.66e+03NEWLINE Glucose 10.5NEWLINE Sucrose 62.5NEWLINE Yeast 415NEWLINE [1] missing streamNEWLINE outs...NEWLINE [0] CO2NEWLINE phase: 'g', T: 304.19 K, P: 101325 PaNEWLINE flow (kmol/hr): Water 9.48NEWLINE Ethanol 3.52NEWLINE CO2 244NEWLINE [1] productNEWLINE phase: 'l', T: 304.19 K, P: 101325 PaNEWLINE flow (kmol/hr): Water 6.59e+03NEWLINE Ethanol 240NEWLINE Glucose 4.07NEWLINE Yeast 484NEWLINE >>> F1.results()NEWLINE Fermentation Units F1NEWLINE Power Rate kW 66.6NEWLINE Cost USD/hr 5.21NEWLINE Chilled water Duty kJ/hr -7.55e+06NEWLINE Flow kmol/hr 5.06e+03NEWLINE Cost USD/hr 37.8NEWLINE Design Reactor volume m3 247NEWLINE Batch time hr 12.6NEWLINE Loading time hr 1.57NEWLINE Number of reactors 8NEWLINE Recirculation flow rate m3/hr 17.7NEWLINE Reactor duty kJ/hr 7.55e+06NEWLINE Cleaning and unloading time hr 3NEWLINE Working volume fraction 0.9NEWLINE Purchase cost Heat exchangers USD 1.02e+05NEWLINE Reactors USD 1.87e+06NEWLINE Agitators USD 1.17e+05NEWLINE Cleaning in place USD 8.9e+04NEWLINE Recirculation pumps USD 1.26e+05NEWLINE Total purchase cost USD 2.31e+06NEWLINE Utility cost USD/hr 43NEWLINE NEWLINE ReferencesNEWLINE ----------NEWLINE .. [1] Oliveira, Samuel C., et al. "Discrimination between ethanol NEWLINE inhibition models in a continuous alcoholic fermentation process usingNEWLINE flocculating yeast." Applied biochemistry and biotechnology 74.3 (1998): 161-172.NEWLINE NEWLINE .. [2] Oliveira, Samuel C., et al. "Continuous ethanol fermentation in aNEWLINE tower reactor with flocculating yeast recycle: scale-up effects on processNEWLINE performance, kinetic parameters and model predictions." BioprocessNEWLINE Engineering 20.6 (1999): 525-530.NEWLINE NEWLINE .. [3] Oliveira, Samuel C., et al. "Mathematical modeling of a continuousNEWLINE alcoholic fermentation process in a two-stage tower reactor cascade withNEWLINE flocculating yeast recycle." Bioprocess and biosystems engineering 38.3NEWLINE (2015): 469-479.NEWLINE NEWLINE .. [4] Oliveira, Samuel C., et al. "Kinetic Modeling of 1‐G EthanolNEWLINE Fermentations." Fermentation Processes. InTech, 2017.NEWLINE NEWLINE .. [5] D. Humbird, R. Davis, L. Tao, C. Kinchin, D. Hsu, and A. AdenNEWLINE National. Renewable Energy Laboratory Golden, Colorado. P. Schoen,NEWLINE J. Lukas, B. Olthof, M. Worley, D. Sexton, and D. Dudgeon. Harris GroupNEWLINE Inc. Seattle, Washington and Atlanta, Georgia. Process Design and EconomicsNEWLINE for Biochemical Conversion of Lignocellulosic Biomass to Ethanol Dilute-AcidNEWLINE Pretreatment and Enzymatic Hydrolysis of Corn Stover. May 2011. TechnicalNEWLINE Report NREL/TP-5100-47764NEWLINE NEWLINE """NEWLINE line = 'Fermentation'NEWLINE NEWLINE #: tuple[float] Kinetic parameters for the kinetic model. Default constants are fitted for Oliveria's model (mu_m1, mu_m2, Ks1, Ks2, Pm1, Pm2, Xm, Y_PS, a)NEWLINE kinetic_constants = (0.31, # mu_m1NEWLINE 1.01, # mu_m2NEWLINE 1.88, # Ks1NEWLINE 2.81, # Ks2NEWLINE 82.8, # Pm1NEWLINE 108.2, # Pm2NEWLINE 113.4, # XmNEWLINE 0.45, # Y_PSNEWLINE 0.18) # aNEWLINE NEWLINE def __init__(self, ID='', ins=None, outs=(), thermo=None, *, NEWLINE tau, N=None, V=None, T=305.15, P=101325., Nmin=2, Nmax=36,NEWLINE efficiency=0.9, iskinetic=False):NEWLINE BatchBioreactor.__init__(self, ID, ins, outs, thermo,NEWLINE tau=tau, N=N, V=V, T=T, P=P, Nmin=Nmin, Nmax=Nmax)NEWLINE self._load_components()NEWLINE self.iskinetic = iskineticNEWLINE chemicals = self.chemicalsNEWLINE self.hydrolysis_reaction = Reaction('Sucrose + Water -> 2Glucose', 'Sucrose', 1.00, chemicals)NEWLINE self.fermentation_reaction = Reaction('Glucose -> 2Ethanol + 2CO2', 'Glucose', efficiency, chemicals)NEWLINE self.cell_growth_reaction = cell_growth = Reaction('Glucose -> Yeast', 'Glucose', 0.70, chemicals, basis='wt')NEWLINE cell_growth.basis = 'mol'NEWLINE if all([i in self.chemicals for i in ('FFA', 'DAG', 'TAG', 'Glycerol')]):NEWLINE self.lipid_reaction = self.oil_reaction = ParallelReaction([NEWLINE Reaction('TAG + 3Water -> 3FFA + Glycerol', 'TAG', 0.23, chemicals),NEWLINE Reaction('TAG + Water -> FFA + DAG', 'TAG', 0.02, chemicals)NEWLINE ])NEWLINE else:NEWLINE self.lipid_reaction = NoneNEWLINE self.efficiency = efficiencyNEWLINE NEWLINE def _calc_efficiency(self, feed, tau): # pragma: no coverNEWLINE # Get initial concentrationsNEWLINE y, e, s, w = feed.indices(['Yeast',NEWLINE '64-17-5',NEWLINE '492-61-5',NEWLINE '7732-18-5'])NEWLINE mass = feed.massNEWLINE F_vol = feed.F_volNEWLINE concentration_in = mass/F_volNEWLINE X0, P0, S0 = (concentration_in[i] for i in (y, e, s))NEWLINE NEWLINE # Integrate to get final concentrationNEWLINE t = np.linspace(0, tau, 1000)NEWLINE C_t = odeint(self.kinetic_model, (X0, P0, S0), t,NEWLINE args=self.kinetic_constants)NEWLINE # Cache dataNEWLINE self._X = C_t[:, 0]NEWLINE self._P = C_t[:, 1]NEWLINE self._S = S = C_t[:, 2]NEWLINE NEWLINE # Calculate efficiencyNEWLINE Sf = S[-1]NEWLINE Sf = Sf if Sf > 0 else 0NEWLINE Y_PS = self.kinetic_constants[-2]NEWLINE eff = (S0 - Sf)/S0 * Y_PS/0.511NEWLINE return effNEWLINE NEWLINE @staticmethodNEWLINE def kinetic_model(z, t, *kinetic_constants): # pragma: no coverNEWLINE """NEWLINE Return change of yeast, ethanol, and substrate concentration in kg/m3.NEWLINE NEWLINE ParametersNEWLINE ----------NEWLINE z : Iterable with (X, E, S) [-]:NEWLINE * X: Yeast concentration (kg/m3)NEWLINE * P: Ethanol concentration (kg/m3)NEWLINE * S: Substrate concentration (kg/m3)NEWLINE NEWLINE t : floatNEWLINE Time pointNEWLINE NEWLINE *kinetic_constantsNEWLINE * mu_m1: Maximum specific growth rate (1/hr)NEWLINE * mu_m2: Maximum specific ethanol production rate (g-product/g-cell-hr)NEWLINE * Ks1: Sugar saturation constant for growth (g/L)NEWLINE * Ks2: Sugar saturation constant for product (g/L)NEWLINE * Pm1: Maximum product concentration at zero growth [mu_m1=0] (g/L)NEWLINE * Pm2: Maximum product concentration [mu_m2=0] (g/L)NEWLINE * Xm: Maximum cell concentration [mu_m1=0] (g/L)NEWLINE * Y_PS: Ethanol yield based on sugar consumedNEWLINE * a: Toxic powerNEWLINE NEWLINE """NEWLINE mu_m1, mu_m2, Ks1, Ks2, Pm1, Pm2, Xm, Y_PS, a = kinetic_constantsNEWLINE NEWLINE # Current yeast, ethanol, and glucose concentration (kg/m3)NEWLINE X, P, S = zNEWLINE NEWLINE # Compute coefficientsNEWLINE mu_X = mu_m1 * (S/(Ks1 + S)) * (1 - P/Pm1)**a*((1-X/Xm))NEWLINE mu_P = mu_m2 * (S/(Ks2 + S)) * (1 - P/Pm2)NEWLINE mu_S = mu_P/0.45NEWLINE NEWLINE # Compute derivativesNEWLINE dXdt = mu_X * XNEWLINE dPdt = (mu_P * X)NEWLINE dSdt = - mu_S * XNEWLINE return (dXdt, dPdt, dSdt)NEWLINENEWLINE @propertyNEWLINE def efficiency(self):NEWLINE return self.fermentation_reaction.XNEWLINE @efficiency.setterNEWLINE def efficiency(self, efficiency):NEWLINE self.fermentation_reaction.X = efficiencyNEWLINENEWLINE def _run(self):NEWLINE vent, effluent = self.outsNEWLINE effluent.mix_from(self.ins)NEWLINE self.hydrolysis_reaction(effluent)NEWLINE if self.iskinetic:NEWLINE self.fermentation_reaction.X = self._calc_efficiency(effluent, self._tau)NEWLINE self.fermentation_reaction(effluent)NEWLINE self.cell_growth_reaction(effluent)NEWLINE if self.lipid_reaction: self.lipid_reaction(effluent)NEWLINE vent.empty()NEWLINE vent.receive_vent(effluent)NEWLINE |
import operatorNEWLINEimport reNEWLINEfrom functools import reduceNEWLINENEWLINEfrom aocd import get_dataNEWLINENEWLINENEWLINEdef rotate_once(data):NEWLINE return [''.join(r) for r in zip(*data[::-1])]NEWLINENEWLINENEWLINEdef rotate(data, rotation):NEWLINE if rotation >= 4:NEWLINE data = [r[::-1] for r in data]NEWLINE rotation -= 4NEWLINE while rotation > 0:NEWLINE rotation -= 1NEWLINE data = rotate_once(data)NEWLINE return dataNEWLINENEWLINENEWLINEclass Tile:NEWLINE def __init__(self, id, data):NEWLINE self.id = idNEWLINE self.data = dataNEWLINENEWLINE def __repr__(self):NEWLINE return "<Tile: {}>".format(self.id)NEWLINENEWLINE @staticmethodNEWLINE def parse_input(s):NEWLINE s = s.splitlines()NEWLINE id = int(re.findall(r"\d+", s[0])[0])NEWLINE data = s[1:]NEWLINE return Tile(id, data)NEWLINENEWLINE def get_edges(self):NEWLINE yield self.data[0]NEWLINE yield self.data[-1]NEWLINE yield ''.join(r[0] for r in self.data)NEWLINE yield ''.join(r[-1] for r in self.data)NEWLINE yield self.data[0][::-1]NEWLINE yield self.data[-1][::-1]NEWLINE yield ''.join(r[0] for r in self.data)[::-1]NEWLINE yield ''.join(r[-1] for r in self.data)[::-1]NEWLINENEWLINE def get_data(self, rotation):NEWLINE data = self.data.copy()NEWLINE return rotate(data, rotation)NEWLINENEWLINE def get_edge(self, rotation, edge):NEWLINE data = self.get_data(rotation)NEWLINE if edge == 't':NEWLINE return data[0]NEWLINE elif edge == 'b':NEWLINE return data[-1]NEWLINE elif edge == 'l':NEWLINE return ''.join(r[0] for r in data)NEWLINE elif edge == 'r':NEWLINE return ''.join(r[-1] for r in data)NEWLINENEWLINENEWLINEdef part1(a):NEWLINE return reduce(operator.mul, (t.id for t in a if sum(NEWLINE any(e == e2 for t2 in a for e2 in t2.get_edges() if t2.id != t.id) for e in t.get_edges()) == 4))NEWLINENEWLINENEWLINEdef part2(a):NEWLINE tiles = {t.id: t for t in a}NEWLINENEWLINE # Create the grid with the idsNEWLINE neighbors = {t.id: {t2.id for t2 in a for e2 in t2.get_edges() for e in t.get_edges() if t2.id != t.id and e == e2}NEWLINE for t in a}NEWLINE grid = [[None for _ in range(12)] for _ in range(12)]NEWLINE # first cornerNEWLINE grid[0][0] = next(k for k, v in neighbors.items() if len(v) == 2)NEWLINE # first edge pieceNEWLINE grid[0][1] = neighbors[grid[0][0]].pop()NEWLINE neighbors[grid[0][1]].remove(grid[0][0])NEWLINE # first rowNEWLINE for i in range(2, 12):NEWLINE grid[0][i] = next(n for n in neighbors[grid[0][i - 1]] if len(neighbors[n]) <= 3)NEWLINE neighbors[grid[0][i - 1]].remove(grid[0][i])NEWLINE neighbors[grid[0][i]].remove(grid[0][i - 1])NEWLINE # rest of the gridNEWLINE for r in range(1, 12):NEWLINE for c in range(12):NEWLINE grid[r][c] = neighbors[grid[r - 1][c]].pop()NEWLINE neighbors[grid[r][c]].remove(grid[r - 1][c])NEWLINE if c != 0:NEWLINE neighbors[grid[r][c - 1]].remove(grid[r][c])NEWLINE neighbors[grid[r][c]].remove(grid[r][c - 1])NEWLINENEWLINE # Rotate and flip the tilesNEWLINE # first cornerNEWLINE rotations = {grid[0][0]: next(r for r in range(8) ifNEWLINE tiles[grid[0][0]].get_edge(r, 'r') in tiles[grid[0][1]].get_edges()NEWLINE and tiles[grid[0][0]].get_edge(r, 'b') in tiles[grid[1][0]].get_edges())}NEWLINE # first rowNEWLINE for i in range(1, 12):NEWLINE rotations[grid[0][i]] = next(r for r in range(8)NEWLINE if tiles[grid[0][i]].get_edge(r, 'l')NEWLINE == tiles[grid[0][i - 1]].get_edge(rotations[grid[0][i - 1]], 'r'))NEWLINE # rest of the gridNEWLINE for x in range(1, 12):NEWLINE for y in range(12):NEWLINE rotations[grid[x][y]] = next(r for r in range(8)NEWLINE if tiles[grid[x][y]].get_edge(r, 't')NEWLINE == tiles[grid[x - 1][y]].get_edge(rotations[grid[x - 1][y]], 'b'))NEWLINE # assemble pictureNEWLINE tile_data = [[tiles[t].get_data(rotations[t]) for t in row] for row in grid]NEWLINE picture = [''.join(tile_data[row // 8][column // 8][row % 8 + 1][column % 8 + 1]NEWLINE for column in range(12 * 8)) for row in range(12 * 8)]NEWLINE # count sea monstersNEWLINE sea_monster = [" # ",NEWLINE "# ## ## ###",NEWLINE " # # # # # # "]NEWLINENEWLINE def count_sea_monsters(picture):NEWLINE count = 0NEWLINE for r in range(len(picture)-len(sea_monster)+1):NEWLINE for c in range(len(picture[0])-len(sea_monster[0])+1):NEWLINE if all(sea_monster[i][j] == ' ' or picture[r+i][c+j] == '#'NEWLINE for i in range(len(sea_monster)) for j in range(len(sea_monster[0]))):NEWLINE count += 1NEWLINE return countNEWLINENEWLINE sea_monsters = max(count_sea_monsters(rotate(picture, r)) for r in range(8))NEWLINE return sum(r.count('#') for r in picture) - sea_monsters*sum(r.count('#') for r in sea_monster)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE data = get_data(day=20, year=2020)NEWLINE inp = [Tile.parse_input(s) for s in data.split('\n\n')]NEWLINE print(part1(inp))NEWLINE print(part2(inp))NEWLINE |
import loggingNEWLINEimport tempfileNEWLINENEWLINEimport numpy as npNEWLINEimport pytestNEWLINEimport torchNEWLINEfrom torch import nnNEWLINENEWLINEimport pfrlNEWLINEfrom pfrl.envs.abc import ABCNEWLINEfrom pfrl.experiments.evaluator import run_evaluation_episodesNEWLINEfrom pfrl.policies import (NEWLINE GaussianHeadWithStateIndependentCovariance,NEWLINE SoftmaxCategoricalHead,NEWLINE)[email protected]("discrete", [True, False])[email protected]("use_lstm", [True, False])[email protected]("batchsize", [1, 10])[email protected]("backward_separately", [True, False])NEWLINEclass TestREINFORCE:NEWLINE @pytest.fixture(autouse=True)NEWLINE def setUp(self, discrete, use_lstm, batchsize, backward_separately):NEWLINE self.discrete = discreteNEWLINE self.use_lstm = use_lstmNEWLINE self.batchsize = batchsizeNEWLINE self.backward_separately = backward_separatelyNEWLINE self.outdir = tempfile.mkdtemp()NEWLINE logging.basicConfig(level=logging.DEBUG)NEWLINENEWLINE @pytest.mark.slowNEWLINE def test_abc_cpu(self):NEWLINE self._test_abc(self.use_lstm, discrete=self.discrete)NEWLINENEWLINE @pytest.mark.slowNEWLINE @pytest.mark.gpuNEWLINE def test_abc_gpu(self):NEWLINE self._test_abc(self.use_lstm, discrete=self.discrete, gpu=0)NEWLINENEWLINE def test_abc_fast_cpu(self):NEWLINE self._test_abc(NEWLINE self.use_lstm, discrete=self.discrete, steps=10, require_success=FalseNEWLINE )NEWLINENEWLINE @pytest.mark.gpuNEWLINE def test_abc_fast_gpu(self):NEWLINE self._test_abc(NEWLINE self.use_lstm,NEWLINE discrete=self.discrete,NEWLINE steps=10,NEWLINE require_success=False,NEWLINE gpu=0,NEWLINE )NEWLINENEWLINE def _test_abc(NEWLINE self, use_lstm, discrete=True, steps=1000000, require_success=True, gpu=-1NEWLINE ):NEWLINE def make_env(process_idx, test):NEWLINE size = 2NEWLINE return ABC(NEWLINE size=size,NEWLINE discrete=discrete,NEWLINE episodic=True,NEWLINE partially_observable=self.use_lstm,NEWLINE deterministic=test,NEWLINE )NEWLINENEWLINE sample_env = make_env(0, False)NEWLINE action_space = sample_env.action_spaceNEWLINE obs_space = sample_env.observation_spaceNEWLINENEWLINE hidden_size = 20NEWLINE obs_size = obs_space.low.sizeNEWLINE if discrete:NEWLINE output_size = action_space.nNEWLINE head = SoftmaxCategoricalHead()NEWLINE else:NEWLINE output_size = action_space.low.sizeNEWLINE head = GaussianHeadWithStateIndependentCovariance(NEWLINE output_size, var_type="diagonal"NEWLINE )NEWLINE if use_lstm:NEWLINE model = pfrl.nn.RecurrentSequential(NEWLINE nn.LSTM(num_layers=1, input_size=obs_size, hidden_size=hidden_size,),NEWLINE nn.Linear(hidden_size, hidden_size),NEWLINE nn.LeakyReLU(),NEWLINE nn.Linear(hidden_size, output_size),NEWLINE head,NEWLINE )NEWLINE else:NEWLINE model = nn.Sequential(NEWLINE nn.Linear(obs_size, hidden_size),NEWLINE nn.LeakyReLU(),NEWLINE nn.Linear(hidden_size, output_size),NEWLINE head,NEWLINE )NEWLINE opt = torch.optim.Adam(model.parameters())NEWLINE beta = 1e-2NEWLINE agent = pfrl.agents.REINFORCE(NEWLINE model,NEWLINE opt,NEWLINE gpu=gpu,NEWLINE beta=beta,NEWLINE batchsize=self.batchsize,NEWLINE backward_separately=self.backward_separately,NEWLINE act_deterministically=True,NEWLINE recurrent=use_lstm,NEWLINE )NEWLINENEWLINE pfrl.experiments.train_agent_with_evaluation(NEWLINE agent=agent,NEWLINE env=make_env(0, False),NEWLINE eval_env=make_env(0, True),NEWLINE outdir=self.outdir,NEWLINE steps=steps,NEWLINE train_max_episode_len=2,NEWLINE eval_interval=500,NEWLINE eval_n_steps=None,NEWLINE eval_n_episodes=5,NEWLINE successful_score=1,NEWLINE )NEWLINENEWLINE # TestNEWLINE env = make_env(0, True)NEWLINE n_test_runs = 5NEWLINE eval_returns = run_evaluation_episodes(NEWLINE env, agent, n_steps=None, n_episodes=n_test_runs,NEWLINE )NEWLINE if require_success:NEWLINE successful_return = 1NEWLINE n_succeeded = np.sum(np.asarray(eval_returns) >= successful_return)NEWLINE assert n_succeeded == n_test_runsNEWLINE |
__all__ = [NEWLINE 'dates',NEWLINE 'dictionary',NEWLINE 'greetings',NEWLINE 'ircbot',NEWLINE 'js',NEWLINE 'listcommands',NEWLINE 'locator',NEWLINE 'pip',NEWLINE 'repeats',NEWLINE 'scheme',NEWLINE 'storedresponses',NEWLINE]NEWLINE |
"""NEWLINE Copyright 2016 Andrea McIntoshNEWLINENEWLINE Licensed under the Apache License, Version 2.0 (the "License");NEWLINE you may not use this file except in compliance with the License.NEWLINE You may obtain a copy of the License atNEWLINENEWLINE http://www.apache.org/licenses/LICENSE-2.0NEWLINENEWLINE Unless required by applicable law or agreed to in writing, softwareNEWLINE distributed under the License is distributed on an "AS IS" BASIS,NEWLINE WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE See the License for the specific language governing permissions andNEWLINE limitations under the License.NEWLINE """NEWLINENEWLINEfrom django.contrib import adminNEWLINEfrom .models import TodoList, ItemNEWLINENEWLINEclass ItemInline(admin.TabularInline):NEWLINE model = ItemNEWLINE extra = 3NEWLINENEWLINEclass ListAdmin(admin.ModelAdmin):NEWLINE fieldsets = [(None, {'fields': ['list_title', 'list_user']}),NEWLINE ]NEWLINE inlines = [ItemInline]NEWLINE list_filter = ['list_title']NEWLINE search_fields = ['list_title']NEWLINENEWLINE # vikingosegundo, http://stackoverflow.com/questions/1477319/in-django-how-do-i-know-the-currently-logged-in-user, 2016-01-26NEWLINE def save_model(self, request, obj, form, change):NEWLINE instance = form.save(commit=False)NEWLINE if not hasattr(instance,'created_by'):NEWLINE instance.list_user = request.userNEWLINE instance.save()NEWLINE form.save_m2m()NEWLINE return instanceNEWLINENEWLINE def save_formset(self, request, form, formset, change):NEWLINENEWLINE def set_user(instance):NEWLINE if not instance.list_user:NEWLINE instance.list_user = request.userNEWLINE instance.save()NEWLINENEWLINE if formset.model == TodoList:NEWLINE instances = formset.save(commit=False)NEWLINE map(set_user, instances)NEWLINE formset.save_m2m()NEWLINE return instancesNEWLINE else:NEWLINE return formset.save()NEWLINENEWLINEadmin.site.register(TodoList, ListAdmin)NEWLINE |
from __future__ import print_functionNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import unicode_literalsNEWLINEfrom django.contrib import adminNEWLINENEWLINEfrom .models import ZipCodeNEWLINENEWLINENEWLINEclass ZipCodeAdmin(admin.ModelAdmin):NEWLINE list_display = ('zip_code', 'latitude', 'longitude', 'state', 'city')NEWLINE list_filter = ('state',)NEWLINE search_fields = ('zip_code', 'state', 'city')NEWLINENEWLINEadmin.site.register(ZipCode, ZipCodeAdmin)NEWLINE |
# Copyright: 2019, NLnet Labs and the Internet.nl contributorsNEWLINE# SPDX-License-Identifier: Apache-2.0NEWLINEfrom datetime import timedeltaNEWLINEimport randomNEWLINEfrom timeit import default_timer as timerNEWLINENEWLINEfrom internetnl.celery import appNEWLINEfrom celery.utils.log import get_task_loggerNEWLINEfrom django.conf import settingsNEWLINEfrom django.core.cache import cacheNEWLINEfrom django.utils import timezoneNEWLINEfrom pyrabbit import ClientNEWLINEfrom pyrabbit.http import HTTPError, NetworkErrorNEWLINEfrom pyrabbit.api import APIError, PermissionErrorNEWLINENEWLINEfrom . import utilNEWLINEfrom .. import batch_shared_task, redis_idNEWLINEfrom ..probes import batch_webprobes, batch_mailprobesNEWLINEfrom ..tasks.dnssec import batch_web_registered as dnssec_web_tasksetNEWLINEfrom ..tasks.dnssec import batch_mail_registered as dnssec_mail_tasksetNEWLINEfrom ..tasks.ipv6 import batch_web_registered as ipv6_web_tasksetNEWLINEfrom ..tasks.ipv6 import batch_mail_registered as ipv6_mail_tasksetNEWLINEfrom ..tasks.mail import batch_mail_registered as auth_mail_tasksetNEWLINEfrom ..tasks.tls import batch_web_registered as tls_web_tasksetNEWLINEfrom ..tasks.tls import batch_mail_registered as tls_mail_tasksetNEWLINEfrom ..tasks.appsecpriv import batch_web_registered as appsecpriv_web_tasksetNEWLINEfrom ..tasks import dispatcherNEWLINEfrom ..models import BatchRequest, BatchRequestStatus, BatchDomainNEWLINEfrom ..models import BatchDomainStatus, BatchTestStatusNEWLINEfrom ..models import BatchWebTestNEWLINEfrom ..models import WebTestTls, WebTestAppsecprivNEWLINEfrom ..models import DomainTestReport, MailTestReport, MailTestTlsNEWLINEfrom ..models import MailTestDnssec, DomainTestDnssecNEWLINENEWLINElogger = get_task_logger(__name__)NEWLINENEWLINEBATCH_WEBTEST = {NEWLINE 'subtests': {NEWLINE 'ipv6': ipv6_web_taskset,NEWLINE 'dnssec': dnssec_web_taskset,NEWLINE 'tls': tls_web_taskset,NEWLINE 'appsecpriv': appsecpriv_web_taskset,NEWLINE },NEWLINE 'report': {NEWLINE 'name': 'domaintestreport'NEWLINE }NEWLINE}NEWLINEBATCH_MAILTEST = {NEWLINE 'subtests': {NEWLINE 'ipv6': ipv6_mail_taskset,NEWLINE 'dnssec': dnssec_mail_taskset,NEWLINE 'auth': auth_mail_taskset,NEWLINE 'tls': tls_mail_taskset,NEWLINE },NEWLINE 'report': {NEWLINE 'name': 'mailtestreport'NEWLINE }NEWLINE}NEWLINENEWLINENEWLINEclass Rabbit():NEWLINE """NEWLINE Wrapper class for the pyrabbit client.NEWLINENEWLINE """NEWLINENEWLINE def __init__(self, rabbit, user, password):NEWLINE self._rabbit = rabbitNEWLINE self._user = userNEWLINE self._pass = passwordNEWLINENEWLINE def _get_client(self):NEWLINE """NEWLINE Get a client connection to rabbitmq.NEWLINENEWLINE """NEWLINE try:NEWLINE self._cl = Client(self._rabbit, self._user, self._pass)NEWLINE return TrueNEWLINE except (HTTPError, NetworkError, APIError, PermissionError):NEWLINE return NoneNEWLINENEWLINE def get_queue_depth(self, host, queue):NEWLINE """NEWLINE Get the size of a queue on a rabbitmq virtual host.NEWLINE In case of a random exception, retry before failing.NEWLINENEWLINE """NEWLINE tries = 5NEWLINE while tries > 0:NEWLINE try:NEWLINE return self._cl.get_queue_depth(host, queue)NEWLINE except (AttributeError, HTTPError, NetworkError, APIError,NEWLINE PermissionError) as e:NEWLINE self._get_client()NEWLINE tries -= 1NEWLINE if tries <= 0:NEWLINE raise eNEWLINENEWLINENEWLINEdef is_queue_loaded(client):NEWLINE """NEWLINE Check if we consider the monitor queue loaded.NEWLINENEWLINE """NEWLINE current_load = client.get_queue_depth(NEWLINE settings.RABBIT_VHOST, settings.RABBIT_MON_QUEUE)NEWLINE if current_load >= settings.RABBIT_MON_THRESHOLD:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINEdef get_live_requests():NEWLINE """NEWLINE Return a dictionary with active users as keys and their earliestNEWLINE live batch request as value.NEWLINENEWLINE """NEWLINE live_requests = dict()NEWLINE batch_requests = BatchRequest.objects.filter(NEWLINE status=BatchRequestStatus.live).order_by('submit_date')NEWLINE for request in batch_requests:NEWLINE if not live_requests.get(request.user):NEWLINE live_requests[request.user] = requestNEWLINE return live_requestsNEWLINENEWLINENEWLINEdef get_user_and_request(live_requests):NEWLINE """NEWLINE Pick a user and his request from the available live_requests.NEWLINE Users are fairly chosen regardless of the number of submitted tests.NEWLINENEWLINE """NEWLINE if not live_requests:NEWLINE return None, NoneNEWLINENEWLINE user = random.choice(list(live_requests.keys()))NEWLINE batch_request = live_requests[user]NEWLINE return user, batch_requestNEWLINENEWLINENEWLINEdef pick_domain(batch_request):NEWLINE """NEWLINE Pick a domain to test.NEWLINE Selects the first available domain.NEWLINENEWLINE """NEWLINE try:NEWLINE return BatchDomain.objects.filter(NEWLINE status=BatchDomainStatus.waiting, batch_request=batch_request)[:1].get() #.first()NEWLINE except BatchDomain.DoesNotExist:NEWLINE return NoneNEWLINENEWLINENEWLINEdef check_for_result_or_start_test(batch_domain, batch_test, subtest, taskset):NEWLINE """NEWLINE Link the result if already available or start a test.NEWLINENEWLINE """NEWLINE started_test = FalseNEWLINE subtest_model = batch_test._meta.get_field(subtest).remote_field.modelNEWLINE result = find_result(batch_domain, subtest_model)NEWLINE if result:NEWLINE save_result(batch_test, subtest, result)NEWLINE else:NEWLINE start_test(batch_domain, batch_test, subtest, taskset)NEWLINE started_test = TrueNEWLINE return started_testNEWLINENEWLINENEWLINEdef find_result(batch_domain, model):NEWLINE """NEWLINE Check if we already have results for the domain. Viable results areNEWLINE ones recorded after the batch submission.NEWLINENEWLINE """NEWLINE submit_date = batch_domain.batch_request.submit_dateNEWLINE try:NEWLINE if model is WebTestTls:NEWLINE result = model.objects.filter(NEWLINE domain=batch_domain.domain,NEWLINE webtestset__timestamp__gte=submit_date).latest('id')NEWLINE elif model is MailTestTls:NEWLINE result = model.objects.filter(NEWLINE domain=batch_domain.domain,NEWLINE testset__timestamp__gte=submit_date).latest('id')NEWLINE elif model is MailTestDnssec:NEWLINE result = model.objects.filter(NEWLINE domain=batch_domain.domain,NEWLINE testset__timestamp__gte=submit_date).latest('id')NEWLINE elif model is WebTestAppsecpriv:NEWLINE result = model.objects.filter(NEWLINE domain=batch_domain.domain,NEWLINE webtestset__timestamp__gte=submit_date).latest('id')NEWLINE elif model is DomainTestDnssec:NEWLINE result = model.objects.filter(NEWLINE domain=batch_domain.domain,NEWLINE maildomain_id=None,NEWLINE timestamp__gte=submit_date).latest('id')NEWLINE else:NEWLINE result = model.objects.filter(NEWLINE domain=batch_domain.domain,NEWLINE timestamp__gte=submit_date).latest('id')NEWLINE except model.DoesNotExist:NEWLINE result = NoneNEWLINE return resultNEWLINENEWLINENEWLINEdef save_result(batch_test, subtest, result):NEWLINE """NEWLINE Link results and save model.NEWLINENEWLINE """NEWLINE setattr(batch_test, subtest, result)NEWLINE setattr(batch_test, '{}_status'.format(subtest), BatchTestStatus.done)NEWLINE batch_test.save(update_fields=[NEWLINE '{}_id'.format(subtest),NEWLINE '{}_status'.format(subtest)])NEWLINENEWLINENEWLINEdef start_test(batch_domain, batch_test, subtest, taskset):NEWLINE """NEWLINE Submit test and change status to running.NEWLINENEWLINE """NEWLINE submit_test(batch_domain, subtest, taskset)NEWLINE setattr(batch_test, '{}_status'.format(subtest), BatchTestStatus.running)NEWLINE batch_test.save(update_fields=['{}_status'.format(subtest)])NEWLINENEWLINENEWLINEdef submit_test(batch_domain, test, checks_registry):NEWLINE """NEWLINE Submit the test in celery.NEWLINENEWLINE """NEWLINE url = batch_domain.domainNEWLINE task_set = dispatcher.submit_task_set(NEWLINE url, checks_registry, error_cb=error_callback)NEWLINE # Need to cache it in redis, then the callback can look it up basedNEWLINE # on the task id.NEWLINE cache_id = redis_id.running_batch_test.id.format(task_set.id)NEWLINE cache_ttl = redis_id.running_batch_test.ttlNEWLINE cache.set(cache_id, (batch_domain.id, test), cache_ttl)NEWLINENEWLINE return task_setNEWLINENEWLINENEWLINEdef check_any_subtest_for_status(batch_test, status):NEWLINE """NEWLINE Check if any of the subtests has a given status.NEWLINENEWLINE """NEWLINE if isinstance(batch_test, BatchWebTest):NEWLINE subtests = BATCH_WEBTEST['subtests']NEWLINE else:NEWLINE subtests = BATCH_MAILTEST['subtests']NEWLINENEWLINE for subtest in subtests:NEWLINE if getattr(batch_test, "{}_status".format(subtest)) == status:NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINEdef find_or_create_report(batch_domain):NEWLINE report = get_common_report(batch_domain)NEWLINE if report:NEWLINE batch_test = batch_domain.get_batch_test()NEWLINE batch_test.report = reportNEWLINE batch_test.save(update_fields=['report'])NEWLINE else:NEWLINE create_report(batch_domain)NEWLINENEWLINENEWLINEdef get_common_report(batch_domain):NEWLINE """NEWLINE Try to find the most recent common report for all subtests.NEWLINE If no such report exists or at least one of the subtests is not yetNEWLINE part of a report return nothing.NEWLINENEWLINE """NEWLINE batch_test = batch_domain.get_batch_test()NEWLINE if isinstance(batch_test, BatchWebTest):NEWLINE subtests = BATCH_WEBTEST['subtests']NEWLINE report_details = BATCH_WEBTEST['report']NEWLINE else:NEWLINE subtests = BATCH_MAILTEST['subtests']NEWLINE report_details = BATCH_MAILTEST['report']NEWLINENEWLINE report_ids = {}NEWLINE for subtest in subtests:NEWLINE report_ids[subtest] = set()NEWLINE # example: batch_test.ipv6.mailtestreport_set.all()NEWLINE for report in getattr(NEWLINE getattr(batch_test, subtest),NEWLINE '{}_set'.format(report_details['name'])).all():NEWLINE report_ids[subtest].add(report.id)NEWLINENEWLINE if not report_ids[subtest]:NEWLINE return NoneNEWLINENEWLINE for i, subtest in enumerate(report_ids):NEWLINE if i == 0:NEWLINE common_report_ids = report_ids[subtest]NEWLINE else:NEWLINE common_report_ids.intersection_update(report_ids[subtest])NEWLINENEWLINE if common_report_ids:NEWLINE common_report_id = max(common_report_ids)NEWLINE report_model = batch_test._meta.get_field('report').remote_field.modelNEWLINE try:NEWLINE return report_model.objects.get(id=common_report_id)NEWLINE except report_model.DoesNotExist:NEWLINE passNEWLINE return NoneNEWLINENEWLINENEWLINEdef create_report(batch_domain):NEWLINE """NEWLINE Create the report for this domain.NEWLINE Similar to when a user is redirected to the results page.NEWLINENEWLINE """NEWLINE domain = batch_domain.domainNEWLINE if batch_domain.webtest:NEWLINE batch_test = batch_domain.webtestNEWLINE report = DomainTestReport(NEWLINE domain=domain,NEWLINE ipv6=batch_test.ipv6,NEWLINE dnssec=batch_test.dnssec,NEWLINE tls=batch_test.tls,NEWLINE appsecpriv=batch_test.appsecpriv)NEWLINE probe_reports = batch_webprobes.get_probe_reports(report)NEWLINE score = batch_webprobes.count_probe_reports_score(probe_reports)NEWLINE else:NEWLINE batch_test = batch_domain.mailtestNEWLINE report = MailTestReport(NEWLINE domain=domain,NEWLINE ipv6=batch_test.ipv6,NEWLINE dnssec=batch_test.dnssec,NEWLINE auth=batch_test.auth,NEWLINE tls=batch_test.tls)NEWLINE probe_reports = batch_mailprobes.get_probe_reports(report)NEWLINE score = batch_mailprobes.count_probe_reports_score(probe_reports)NEWLINENEWLINE report.registrar = "-Not available in batch-"NEWLINE report.score = scoreNEWLINE report.save()NEWLINE batch_test.report = reportNEWLINE batch_test.save()NEWLINENEWLINENEWLINEdef update_domain_status(batch_domain):NEWLINE """NEWLINE Check the status of the individual tests and update the domain'sNEWLINE entry status.NEWLINENEWLINE """NEWLINE if batch_domain.status == BatchDomainStatus.cancelled:NEWLINE returnNEWLINENEWLINE batch_test = batch_domain.get_batch_test()NEWLINENEWLINE if check_any_subtest_for_status(batch_test, BatchTestStatus.error):NEWLINE batch_domain.status = BatchDomainStatus.errorNEWLINE elif check_any_subtest_for_status(batch_test, BatchTestStatus.waiting):NEWLINE batch_domain.status = BatchDomainStatus.waitingNEWLINE elif check_any_subtest_for_status(batch_test, BatchTestStatus.running):NEWLINE batch_domain.status = BatchDomainStatus.runningNEWLINE else:NEWLINE batch_domain.status = BatchDomainStatus.doneNEWLINE find_or_create_report(batch_domain)NEWLINE batch_domain.status_changed = timezone.now()NEWLINE batch_domain.save(update_fields=['status_changed', 'status'])NEWLINENEWLINENEWLINEdef update_batch_status(batch_request):NEWLINE """NEWLINE Check the status of the submitted domains and update the batchNEWLINE request's status if necessary.NEWLINENEWLINE """NEWLINE if batch_request.status in (BatchRequestStatus.cancelled,NEWLINE BatchRequestStatus.done,NEWLINE BatchRequestStatus.registering,NEWLINE BatchRequestStatus.error):NEWLINE returnNEWLINENEWLINE waiting = batch_request.domains.filter(NEWLINE status=BatchDomainStatus.waiting).exists()NEWLINE running = batch_request.domains.filter(NEWLINE status=BatchDomainStatus.running).exists()NEWLINE if not waiting:NEWLINE if running:NEWLINE batch_request.status = BatchRequestStatus.runningNEWLINE else:NEWLINE batch_request.status = BatchRequestStatus.doneNEWLINE batch_request.finished_date = timezone.now()NEWLINE else:NEWLINE batch_request.status = BatchRequestStatus.liveNEWLINE batch_request.save(update_fields=['status', 'finished_date'])NEWLINENEWLINENEWLINEdef batch_callback_hook(result, task_id):NEWLINE """NEWLINE Link the result and change the status of the running test.NEWLINENEWLINE """NEWLINE if not result:NEWLINE logger.error("Post callback, no result!")NEWLINE returnNEWLINENEWLINE cache_id = redis_id.running_batch_test.id.format(task_id)NEWLINE cached = cache.get(cache_id)NEWLINE if not cached:NEWLINE logger.error(NEWLINE "Post callback, could not find task id '{}'"NEWLINE "".format(task_id))NEWLINE returnNEWLINENEWLINE batch_domain_id, subtest = cachedNEWLINE batch_domain = BatchDomain.objects.get(id=batch_domain_id)NEWLINE if batch_domain.status == BatchDomainStatus.cancelled:NEWLINE returnNEWLINENEWLINE batch_test = batch_domain.get_batch_test()NEWLINENEWLINE save_result(batch_test, subtest, result)NEWLINE cache.delete(cache_id)NEWLINE update_domain_status(batch_domain)NEWLINENEWLINENEWLINE@batch_shared_task()NEWLINEdef error_callback(request, exc, traceback):NEWLINE """NEWLINE Increase error count and change status, if an error occurs.NEWLINENEWLINE .. note:: Celery only calls this when there is an exception in the chordNEWLINE callback. This is a bug in celery. To compensate we periodicallyNEWLINE check for tests stuck in the running state withNEWLINE find_stalled_tests_and_update_db().NEWLINENEWLINE """NEWLINE logger.error("Task {0!r} raised error: {1!r}".format(request.id, exc))NEWLINE cache_id = redis_id.running_batch_test.id.format(request.id)NEWLINE cached = cache.get(cache_id)NEWLINE if not cached:NEWLINE logger.error(NEWLINE "Error callback, could not find task id '{}'"NEWLINE "".format(request.id))NEWLINE returnNEWLINENEWLINE batch_domain_id, test = cachedNEWLINE batch_domain = BatchDomain.objects.get(id=batch_domain_id)NEWLINE if batch_domain.status == BatchDomainStatus.cancelled:NEWLINE returnNEWLINENEWLINE batch_test = batch_domain.get_batch_test()NEWLINE record_subtest_error(batch_test, test)NEWLINE update_domain_status(batch_domain)NEWLINE cache.delete(cache_id)NEWLINENEWLINENEWLINEdef record_subtest_error(batch_test, subtest):NEWLINE """NEWLINE Increase and return the error count for the given subtest. Also changeNEWLINE the status if appropriate.NEWLINENEWLINE """NEWLINE error_count = getattr(batch_test, '{}_errors'.format(subtest))NEWLINE status = getattr(batch_test, '{}_status'.format(subtest))NEWLINE error_count += 1NEWLINE if status != BatchTestStatus.cancelled:NEWLINE if error_count > 2:NEWLINE status = BatchTestStatus.errorNEWLINE else:NEWLINE status = BatchTestStatus.waitingNEWLINE setattr(batch_test, '{}_status'.format(subtest), status)NEWLINE setattr(batch_test, '{}_errors'.format(subtest), error_count)NEWLINE batch_test.save(update_fields=[NEWLINE '{}_status'.format(subtest),NEWLINE '{}_errors'.format(subtest)])NEWLINE return error_countNEWLINENEWLINENEWLINEdef find_stalled_tests_and_update_db():NEWLINE """NEWLINE Find tests that have been in the running state for more than a givenNEWLINE threshold and update their status.NEWLINENEWLINE """NEWLINE running_domains = BatchDomain.objects.filter(NEWLINE status=BatchDomainStatus.running)NEWLINE now = timezone.now()NEWLINE for batch_domain in running_domains:NEWLINE timediff = (now - batch_domain.status_changed).total_seconds()NEWLINE if timediff >= settings.BATCH_MAX_RUNNING_TIME:NEWLINE if batch_domain.webtest:NEWLINE batch_test = batch_domain.webtestNEWLINE subtests = BATCH_WEBTEST['subtests']NEWLINE else:NEWLINE batch_test = batch_domain.mailtestNEWLINE subtests = BATCH_MAILTEST['subtests']NEWLINENEWLINE for subtest in subtests:NEWLINE status = getattr(batch_test, '{}_status'.format(subtest))NEWLINE if status == BatchTestStatus.running:NEWLINE errors = record_subtest_error(batch_test, subtest)NEWLINE logger.info(NEWLINE "{} errors for {}({})"NEWLINE "".format(errors, batch_domain.domain, subtest))NEWLINE update_domain_status(batch_domain)NEWLINENEWLINENEWLINEdef update_batch_request_status():NEWLINE batch_requests = BatchRequest.objects.filter(NEWLINE status__in=(BatchRequestStatus.live, BatchRequestStatus.running))NEWLINE for batch_request in batch_requests:NEWLINE update_batch_status(batch_request)NEWLINENEWLINENEWLINEdef _run_scheduler():NEWLINE """NEWLINE Submit a fixed number of domains for testing if the queue is notNEWLINE considered loaded.NEWLINENEWLINE """NEWLINE client = Rabbit(NEWLINE settings.RABBIT, settings.RABBIT_USER, settings.RABBIT_PASS)NEWLINE domains_to_test = settings.BATCH_SCHEDULER_DOMAINSNEWLINENEWLINE start_time = timer()NEWLINE find_stalled_tests_and_update_db()NEWLINE logger.info("Find stalled duration: {}".format(timer() - start_time))NEWLINENEWLINE start_time = timer()NEWLINE update_batch_request_status()NEWLINE logger.info("Update status duration: {}".format(timer() - start_time))NEWLINENEWLINE submitted = 0NEWLINE found = 0NEWLINE if not is_queue_loaded(client):NEWLINE start_time = timer()NEWLINE live_requests = get_live_requests()NEWLINE while domains_to_test > 0:NEWLINE user, batch_request = get_user_and_request(live_requests)NEWLINE if not (user or batch_request):NEWLINE breakNEWLINENEWLINE batch_domain = pick_domain(batch_request)NEWLINE if not batch_domain:NEWLINE breakNEWLINENEWLINE subtests_started = 0NEWLINE batch_test = batch_domain.get_batch_test()NEWLINE if isinstance(batch_test, BatchWebTest):NEWLINE subtests = BATCH_WEBTEST['subtests']NEWLINE else:NEWLINE subtests = BATCH_MAILTEST['subtests']NEWLINENEWLINE for subtest in subtests:NEWLINE if (getattr(batch_test, '{}_status'.format(subtest))NEWLINE == BatchTestStatus.waiting):NEWLINE started_test = check_for_result_or_start_test(NEWLINE batch_domain, batch_test, subtest,NEWLINE subtests[subtest])NEWLINE if started_test:NEWLINE subtests_started += 1NEWLINENEWLINE if subtests_started > 0:NEWLINE submitted += 1NEWLINE domains_to_test -= 1NEWLINE else:NEWLINE found += 1NEWLINE update_domain_status(batch_domain)NEWLINE logger.info("Submission duration: {}".format(timer() - start_time))NEWLINENEWLINE submitted_domains = settings.BATCH_SCHEDULER_DOMAINS - domains_to_testNEWLINE submitted_domains = submittedNEWLINE found_domains = foundNEWLINE logger.info("Submitted {} domains".format(submitted_domains))NEWLINE logger.info("Found {} domains".format(found_domains))NEWLINENEWLINENEWLINE@batch_shared_taskNEWLINEdef run():NEWLINE """NEWLINE Run the scheduler every interval only if it is not running already.NEWLINENEWLINE """NEWLINE lock_id = redis_id.batch_scheduler_lock.idNEWLINE lock_ttl = redis_id.batch_scheduler_lock.ttlNEWLINE with util.memcache_lock(lock_id, lock_ttl) as acquired:NEWLINE if acquired:NEWLINE _run_scheduler()NEWLINE returnNEWLINE logger.info("Already running...")NEWLINE |
import torchNEWLINEimport torch.nn as nnNEWLINEfrom torch.distributions import NormalNEWLINENEWLINEfrom examples.nvae.common import DecoderResidualBlockNEWLINEfrom examples.nvae.common import ResidualBlockNEWLINEfrom examples.nvae.common import SwishNEWLINEfrom examples.nvae.losses import kl_2NEWLINEfrom examples import iter_pairsNEWLINENEWLINENEWLINEclass UpsampleBlock(nn.Module):NEWLINE def __init__(self, in_channel, out_channel):NEWLINE super().__init__()NEWLINENEWLINE self._seq = nn.Sequential(NEWLINE nn.ConvTranspose2d(NEWLINE in_channel,NEWLINE out_channel,NEWLINE kernel_size=3,NEWLINE stride=2,NEWLINE padding=1,NEWLINE output_padding=1NEWLINE ),NEWLINE # nn.UpsamplingBilinear2d(scale_factor=2),NEWLINE # nn.Conv2d(in_channel, out_channel, kernel_size=3, padding=1),NEWLINE nn.BatchNorm2d(out_channel), Swish(),NEWLINE )NEWLINENEWLINE def forward(self, x):NEWLINE return self._seq(x)NEWLINENEWLINENEWLINEclass DecoderBlock(nn.Module):NEWLINENEWLINE def __init__(self, channels):NEWLINE super().__init__()NEWLINE self.channels = channelsNEWLINE self.module_list = nn.ModuleList([NEWLINE UpsampleBlock(inp, out)NEWLINE for inp, out in iter_pairs(channels)NEWLINE ])NEWLINENEWLINE def forward(self, x):NEWLINE for module in self.module_list:NEWLINE x = module(x)NEWLINE return xNEWLINENEWLINENEWLINEclass Decoder(nn.Module):NEWLINENEWLINE def __init__(self, z_dim):NEWLINE super().__init__()NEWLINENEWLINE # Input channels = z_channels * 2 = x_channels + z_channelsNEWLINE # Output channels = z_channelsNEWLINE self.decoder_blocks = nn.ModuleList([NEWLINE DecoderBlock([z_dim * 2, z_dim // 2]), # 2x upsampleNEWLINE DecoderBlock([z_dim, z_dim // 4, z_dim // 8]), # 4x upsampleNEWLINE DecoderBlock([z_dim // 4, z_dim // 16, z_dim // 32]) # 4x uplsampeNEWLINE ])NEWLINE self.decoder_residual_blocks = nn.ModuleList([NEWLINE DecoderResidualBlock(z_dim // 2, n_group=4),NEWLINE DecoderResidualBlock(z_dim // 8, n_group=2),NEWLINE DecoderResidualBlock(z_dim // 32, n_group=1)NEWLINE ])NEWLINENEWLINE # p(z_l | z_(l-1))NEWLINE self.condition_z = nn.ModuleList([NEWLINE nn.Sequential(NEWLINE ResidualBlock(z_dim // 2),NEWLINE Swish(),NEWLINE nn.Conv2d(z_dim // 2, z_dim, kernel_size=1)NEWLINE ),NEWLINE nn.Sequential(NEWLINE ResidualBlock(z_dim // 8),NEWLINE Swish(),NEWLINE nn.Conv2d(z_dim // 8, z_dim // 4, kernel_size=1)NEWLINE )NEWLINE ])NEWLINENEWLINE # p(z_l | x, z_(l-1))NEWLINE self.condition_xz = nn.ModuleList([NEWLINE nn.Sequential(NEWLINE ResidualBlock(z_dim),NEWLINE nn.Conv2d(z_dim, z_dim // 2, kernel_size=1),NEWLINE Swish(),NEWLINE nn.Conv2d(z_dim // 2, z_dim, kernel_size=1)NEWLINE ),NEWLINE nn.Sequential(NEWLINE ResidualBlock(z_dim // 4),NEWLINE nn.Conv2d(z_dim // 4, z_dim // 8, kernel_size=1),NEWLINE Swish(),NEWLINE nn.Conv2d(z_dim // 8, z_dim // 4, kernel_size=1)NEWLINE )NEWLINE ])NEWLINENEWLINE self.recon = nn.Sequential(NEWLINE ResidualBlock(z_dim // 32),NEWLINE nn.Conv2d(z_dim // 32, 3, kernel_size=1),NEWLINE )NEWLINENEWLINE def forward(self, z, xs=None):NEWLINE """NEWLINE :param z: shape. = (B, z_dim, map_h, map_w)NEWLINE :return:NEWLINE """NEWLINENEWLINE B, D, map_h, map_w = z.shapeNEWLINENEWLINE # The init h (hidden state), can be replace with learned param, but it didn't work muchNEWLINE decoder_out = torch.zeros(B, D, map_h, map_w, device=z.device, dtype=z.dtype)NEWLINENEWLINE kl_losses = []NEWLINENEWLINE for i in range(len(self.decoder_residual_blocks)):NEWLINE z_sample = torch.cat([decoder_out, z], dim=1)NEWLINE decoder_out = self.decoder_residual_blocks[i](self.decoder_blocks[i](z_sample))NEWLINENEWLINE if i == len(self.decoder_residual_blocks) - 1:NEWLINE breakNEWLINENEWLINE mu, log_var = self.condition_z[i](decoder_out).chunk(2, dim=1)NEWLINENEWLINE if xs is not None:NEWLINE delta_mu, delta_log_var = self.condition_xz[i](torch.cat([xs[i], decoder_out], dim=1)).chunk(2, dim=1)NEWLINE kl_losses.append(kl_2(delta_mu, delta_log_var, mu, log_var))NEWLINE mu = mu + delta_muNEWLINE log_var = log_var + delta_log_varNEWLINENEWLINE z = Normal(mu, torch.exp(0.5 * log_var)).rsample()NEWLINE map_h *= 2**(len(self.decoder_blocks[i].channels) - 1)NEWLINE map_w *= 2**(len(self.decoder_blocks[i].channels) - 1)NEWLINENEWLINE x_hat = torch.sigmoid(self.recon(decoder_out))NEWLINENEWLINE return x_hat, kl_lossesNEWLINE |
NEWLINE## https://www.hackster.io/ben-eagan/raspberry-pi-automated-plant-watering-with-website-8af2dcNEWLINE## https://gist.github.com/benrules2/6f490f3a0e082ae6592a630bd7abe588NEWLINENEWLINE## https://towardsdatascience.com/python-webserver-with-flask-and-raspberry-pi-398423cc6f5dNEWLINENEWLINE# https://randomnerdtutorials.com/raspberry-pi-web-server-using-flask-to-control-gpios/NEWLINENEWLINE# http://mattrichardson.com/Raspberry-Pi-Flask/NEWLINENEWLINE## Refresh auto pageNEWLINE## https://stackoverflow.com/questions/40963401/flask-dynamic-data-update-without-reload-page NEWLINENEWLINE####NEWLINE### Reading live sensorNEWLINE### https://stackoverflow.com/questions/62333250/trying-to-get-realtime-sensor-data-from-python-into-html-using-flask-and-jquery NEWLINE####NEWLINENEWLINE### loggin ###NEWLINE## https://www.raspberrypi.org/forums/viewtopic.php?t=289151 NEWLINE########NEWLINENEWLINENEWLINEfrom flask import Flask, render_template, redirect, url_for, request, jsonifyNEWLINEfrom datetime import datetimeNEWLINEimport jsonNEWLINEfrom pkg_resources import yield_linesNEWLINENEWLINEfrom plotly.subplots import _subplot_type_for_trace_typeNEWLINEfrom plotting import dash_applicationNEWLINEfrom time import sleepNEWLINENEWLINEfrom Sensor import sensor, autofeederNEWLINENEWLINE# import RPi.GPIO as GPIONEWLINEGPIO = 21NEWLINENEWLINEfrom flask.signals import template_rendered NEWLINEprint(datetime.now().strftime("%Y-%m-%d %H:%M:%s"))NEWLINENEWLINE## Measure sensorNEWLINE## Water NEWLINE # for X secNEWLINE## CalibrationNEWLINENEWLINE################NEWLINE# database #NEWLINE################NEWLINE## Soil moistureNEWLINE # date, soil moisture NEWLINE## Feeding dbNEWLINE # date, soil moisture before, after, water durationNEWLINENEWLINEapp = Flask(__name__)NEWLINEdash_application(app)NEWLINENEWLINEdef load_config():NEWLINE with open("config/config.json", "r") as js:NEWLINE return json.load(js)NEWLINENEWLINEdef write_config(data):NEWLINE with open("config/config.json", "w") as js:NEWLINE return json.dump(data,js)[email protected]('/')NEWLINEdef index():NEWLINE #return 'Hello World!'NEWLINE config_values = load_config()NEWLINE print(config_values)NEWLINE return render_template('index.html', title='main page', today=str(datetime.today()), **config_values)[email protected]('/config', methods=['POST'])NEWLINEdef configuration():NEWLINE ## Update config file NEWLINE write_config(request.form.to_dict())NEWLINE return redirect(url_for("index"))[email protected]('/calibration/<state>')NEWLINEdef calibration(state):NEWLINE [email protected]('/sensor_calibartion', methods=['GET', 'POST'])NEWLINEdef sensor_calibration(sensor, duration ):NEWLINE return jsonify({NEWLINE 'sensor': sensorNEWLINE })[email protected]('/calibration/dry')NEWLINEdef calibration_dry_page():NEWLINE sensor_calibration(GPIO)NEWLINE [email protected]('/calibration/wet')NEWLINEdef calibration_wet_page():NEWLINE passNEWLINENEWLINE NEWLINENEWLINE###################################NEWLINE## -- dynamic update -- ##NEWLINE###################################[email protected]('/update2')NEWLINEdef update2():NEWLINE return render_template('update.html')[email protected]("/update", methods=["GET", "POST"])NEWLINEdef update():NEWLINE now = datetime.now()NEWLINE return jsonify({NEWLINE "time": datetime.now().strftime("%H:%M:%S")NEWLINE })NEWLINE######################################NEWLINENEWLINE# app.route('/sensor')NEWLINE# def read_sensor():NEWLINE# passNEWLINENEWLINE# app.route('/water/<toggle>')NEWLINE# def water():NEWLINE# passNEWLINENEWLINE# app.route('/calibration')NEWLINE# def calibrate():NEWLINE# pass NEWLINE# ## calibrated dryNEWLINE# ## calibrated wet NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run(debug=True)NEWLINENEWLINENEWLINENEWLINENEWLINE |
#coding:utf-8NEWLINEfrom django.shortcuts import renderNEWLINEfrom django.http import HttpResponseNEWLINENEWLINEdef home(request):NEWLINE string = u"我在自强学堂学习Django,用它来建网站"NEWLINE TutorialList = ["HTML", "CSS", "jQuery", "Python", "Django"]NEWLINE info_dict = {'site': u'自强学堂', 'content': u'各种IT技术教程'}NEWLINE List = map(str, range(100))# 一个长度为100的 ListNEWLINE return render(request, 'home.html', {'string': string,'TutorialList': TutorialList,'info_dict': info_dict,'List':List})NEWLINEdef statictest(request):NEWLINE return render(request, 'static.html') NEWLINEdef index(request):NEWLINE return HttpResponse(u"欢迎光临 自强学堂!")NEWLINEdef add(request):NEWLINE a = request.GET['a']NEWLINE b = request.GET['b']NEWLINE c = int(a)+int(b)NEWLINE return HttpResponse(str(c))NEWLINEdef add2(request, a, b):NEWLINE c = int(a) + int(b)NEWLINE return HttpResponse(str(c))NEWLINE NEWLINEfrom django.http import HttpResponseRedirectNEWLINEfrom django.core.urlresolvers import reverseNEWLINEdef new(request, a, b):NEWLINE c = int(a) + int(b)NEWLINE return HttpResponse('new address:'+str(c))NEWLINEdef old_new_redirect(request, a, b):NEWLINE return HttpResponseRedirect(NEWLINE reverse('new', args=(a, b))NEWLINE )NEWLINENEWLINEfrom learn.models import PersonNEWLINE# 数据库操作NEWLINEdef insert(request):NEWLINE #test1= Person(name='李磊')NEWLINE if len(request.GET['realname'])==0:NEWLINE return HttpResponse("<p>请输入真实姓名!</p>")NEWLINE if len(request.GET['realname'])>20:NEWLINE return HttpResponse("<p>不能超过20个字符!</p>")NEWLINE test1= Person(name=request.GET['realname'])NEWLINE #把年龄设置为一个随机数NEWLINE import randomNEWLINE test1.age = random.randint(1, 20)NEWLINE test1.save()NEWLINE return HttpResponse("<p>数据添加成功!</p>")NEWLINE# 数据库操作NEWLINEdef all(request):NEWLINE # 初始化NEWLINE response = ""NEWLINE response1 = ""NEWLINE NEWLINE NEWLINE # 通过objects这个模型管理器的all()获得所有数据行,相当于SQL中的SELECT * FROMNEWLINE list = Person.objects.all()NEWLINE NEWLINE # filter相当于SQL中的WHERE,可设置条件过滤结果NEWLINE response2 = Person.objects.filter(id=1) NEWLINE NEWLINE # 获取单个对象NEWLINE response3 = Person.objects.get(id=1) NEWLINE NEWLINE # 限制返回的数据 相当于 SQL 中的 OFFSET 0 LIMIT 2;NEWLINE Person.objects.order_by('name')[0:2]NEWLINE NEWLINE #数据排序NEWLINE Person.objects.order_by("id")NEWLINE NEWLINE # 上面的方法可以连锁使用NEWLINE Person.objects.filter(name="w3cschool.cc").order_by("id")NEWLINE NEWLINE # 输出所有数据NEWLINE for var in list:NEWLINE response1 += var.name + " "NEWLINE response = response1NEWLINE return HttpResponse("<p>" + response + "</p>")NEWLINE NEWLINE#修改NEWLINEdef update(request):NEWLINE # 修改其中一个id=1的name字段,再save,相当于SQL中的UPDATENEWLINE test1 = Person.objects.get(id=1)NEWLINE test1.name = 'w3cschool菜鸟教程'NEWLINE test1.save()NEWLINE NEWLINE # 另外一种方式NEWLINE #Test.objects.filter(id=1).update(name='w3cschool菜鸟教程')NEWLINE NEWLINE # 修改所有的列NEWLINE # Test.objects.all().update(name='w3cschool菜鸟教程')NEWLINE NEWLINE return HttpResponse("<p>修改成功</p>")NEWLINE#删除 NEWLINEdef delete(request):NEWLINE # 删除id=1的数据NEWLINE test1 = Person.objects.get(id=100)NEWLINE test1.delete()NEWLINE # 另外一种方式NEWLINE # Test.objects.filter(id=1).delete()NEWLINE # 删除所有数据NEWLINE # Test.objects.all().delete()NEWLINE return HttpResponse("<p>删除成功</p>")NEWLINEdef upload(request):NEWLINE if request.method == 'POST':NEWLINE file_url=handle_uploaded_file(request.FILES['file'], str(request.FILES['file']))NEWLINE '''NEWLINE 返回值:upload/2016/07/20/83025aafa40f4bfbc55b19e3014f78f0f63618f9.gifNEWLINE '''NEWLINE return HttpResponse(file_url)NEWLINE else:NEWLINE return render(request, 'upload.html', {'what': 'file upload'})NEWLINE NEWLINE NEWLINEdef handle_uploaded_file(file, filename):NEWLINE import osNEWLINE import timeNEWLINE upload_dir='upload/'NEWLINE if not os.path.exists(upload_dir):NEWLINE os.mkdir(upload_dir)NEWLINE #path = upload_dir + time.strftime('%Y/%m/%d/%H/%M/%S/')NEWLINE path = upload_dir + time.strftime('%Y/%m/%d/')NEWLINE if not os.path.exists(path):NEWLINE os.makedirs(path)NEWLINE file_name = path + filenameNEWLINE with open(file_name, 'wb+') as destination:NEWLINE for chunk in file.chunks():NEWLINE destination.write(chunk)NEWLINE destination.close()NEWLINE return file_nameNEWLINE'''NEWLINE@description:session的应用,2个例子NEWLINE'''NEWLINEdef postcomment(request):NEWLINE #第一次访问的时候NEWLINE if request.session.get('has_commented', False):NEWLINE return HttpResponse("You've already commented.")NEWLINENEWLINE request.session['has_commented'] = TrueNEWLINE return HttpResponse('Thanks for your comment!')NEWLINEdef login(request):NEWLINE #m = Member.objects.get(username=request.POST['username'])NEWLINE #if m.password == request.POST['password']:NEWLINE request.session['member_id'] = 10NEWLINE return HttpResponse("You're logged in.member_id="+str(request.session.get('member_id')))NEWLINE #else:NEWLINE # return HttpResponse("Your username and password didn't match.")NEWLINE NEWLINEdef logout(request):NEWLINE HttpResponse(request.session.get('member_id'))NEWLINE #删除sessionNEWLINE try:NEWLINE del request.session['member_id']NEWLINE except KeyError:NEWLINE passNEWLINE return HttpResponse("You're logged out.member_id="+str(request.session.get('member_id')))NEWLINE###############################################################NEWLINE#一、存取CookiesNEWLINENEWLINE# 1、设置CookiesNEWLINE# response.set_cookie("cookie_key","value")NEWLINE# 2、获取CookiesNEWLINE# value = request.COOKIES["cookie_key"]NEWLINE# 3、删除CookiesNEWLINE# response.delete_cookie("cookie_key",path="/",domain=name)NEWLINE# 4、检测CookiesNEWLINE# if "cookie_name" is request.COOKIES :NEWLINE# 5、response.set_cookie() 传递一些可选的参数 描述NEWLINE# 参数 缺省值 描述NEWLINE# max_age None cookies的持续有效时间(以秒计),如果设置为 None cookies 在浏览器关闭的时候就失效了。NEWLINE# expires None cookies的过期时间,格式: "Wdy, DD-Mth-YY HH:MM:SS GMT" 如果设置这个参数,NEWLINE# 它将覆盖 max_age 参数。NEWLINE# path "/" cookie生效的路径前缀,浏览器只会把cookie回传给带有该路径的页面,这样你可以避免将NEWLINE# cookie传给站点中的其他的应用。NEWLINE# 当你的应用不处于站点顶层的时候,这个参数会非常有用。NEWLINE# domain None cookie生效的站点。你可用这个参数来构造一个跨站cookie。如, domain=".example.com"NEWLINE# 所构造的cookie对下面这些站点都是可读的: www.example.com 、 www2.example.com 和NEWLINE# an.other.sub.domain.example.com 。NEWLINE# 如果该参数设置为 None ,cookie只能由设置它的站点读取。NEWLINE# secure False 如果设置为 True ,浏览器将通过HTTPS来回传cookie。NEWLINE#二、Cookies规则NEWLINE# 1、Cookies是以字典方式存储,(Key—>Value的键值对方式存储),访问是只要访问Session的键就可以得到键对应的ValueNEWLINE# 如果:value = response.set_cookie("cookie_key","value")NEWLINE# 2、存储到客户端NEWLINE# 优点:NEWLINE# 数据存在在客户端,减轻服务器端的压力,提高网站的性能。NEWLINE# 缺点:NEWLINE# 1、安全性不高:在客户端机很容易被查看或破解用户回话信息NEWLINE##################################################################NEWLINEimport datetimeNEWLINEdef set_cookie(request,hour=0,name="admin"):NEWLINE dt = datetime.datetime.now() + datetime.timedelta(hours = int(hour))NEWLINE html ="设置用户%s为登录回话,过期时间:%s" % (name,str(dt))NEWLINE response = HttpResponse(html)NEWLINE response.set_cookie("username",name,expires=dt)NEWLINE return responseNEWLINENEWLINEdef show_cookie(request):NEWLINE html = ""NEWLINE if "username" in request.COOKIES :NEWLINE name = request.COOKIES["username"]NEWLINE if name == "admin":NEWLINE html = "用户%s 的Cookies 没有超时" % nameNEWLINE if name == "loker" :NEWLINE html = "用户%s 的Cookies 没有超时" % nameNEWLINE else: NEWLINE #过期时间为1小时以后NEWLINE dt = datetime.datetime.now() + datetime.timedelta(hours = int(1))NEWLINE name ="loker"NEWLINE html ="用户的Cookies 已经超时\n设置用户%s为登录回话,过期时间:%s" % (name,str(dt))NEWLINE response = HttpResponse(html)NEWLINE response.set_cookie("username",name,expires=dt)NEWLINE return responseNEWLINE response = HttpResponse(html)NEWLINEdef delete_cookie(request):NEWLINE html = ""NEWLINE response = HttpResponse(html)NEWLINE if "username" in request.COOKIES :NEWLINE name = request.COOKIES["username"]NEWLINENEWLINE if "username" in request.COOKIES : NEWLINE # 删除Cookies NEWLINE response.delete_cookie("username",path="/")NEWLINE return HttpResponse(name) |
# model settingsNEWLINEmodel = dict(NEWLINE type="RPN",NEWLINE pretrained="open-mmlab://resnet50_caffe",NEWLINE backbone=dict(NEWLINE type="ResNet",NEWLINE depth=50,NEWLINE num_stages=4,NEWLINE out_indices=(0, 1, 2, 3),NEWLINE frozen_stages=1,NEWLINE norm_cfg=dict(type="BN", requires_grad=False),NEWLINE norm_eval=True,NEWLINE style="caffe",NEWLINE ),NEWLINE neck=dict(NEWLINE type="FPN", in_channels=[256, 512, 1024, 2048], out_channels=256, num_outs=5NEWLINE ),NEWLINE rpn_head=dict(NEWLINE type="GARPNHead",NEWLINE in_channels=256,NEWLINE feat_channels=256,NEWLINE octave_base_scale=8,NEWLINE scales_per_octave=3,NEWLINE octave_ratios=[0.5, 1.0, 2.0],NEWLINE anchor_strides=[4, 8, 16, 32, 64],NEWLINE anchor_base_sizes=None,NEWLINE anchoring_means=[0.0, 0.0, 0.0, 0.0],NEWLINE anchoring_stds=[0.07, 0.07, 0.14, 0.14],NEWLINE target_means=(0.0, 0.0, 0.0, 0.0),NEWLINE target_stds=[0.07, 0.07, 0.11, 0.11],NEWLINE loc_filter_thr=0.01,NEWLINE loss_loc=dict(NEWLINE type="FocalLoss", use_sigmoid=True, gamma=2.0, alpha=0.25, loss_weight=1.0NEWLINE ),NEWLINE loss_shape=dict(type="BoundedIoULoss", beta=0.2, loss_weight=1.0),NEWLINE loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=True, loss_weight=1.0),NEWLINE loss_bbox=dict(type="SmoothL1Loss", beta=1.0, loss_weight=1.0),NEWLINE ),NEWLINE)NEWLINE# model training and testing settingsNEWLINEtrain_cfg = dict(NEWLINE rpn=dict(NEWLINE ga_assigner=dict(NEWLINE type="ApproxMaxIoUAssigner",NEWLINE pos_iou_thr=0.7,NEWLINE neg_iou_thr=0.3,NEWLINE min_pos_iou=0.3,NEWLINE ignore_iof_thr=-1,NEWLINE ),NEWLINE ga_sampler=dict(NEWLINE type="RandomSampler",NEWLINE num=256,NEWLINE pos_fraction=0.5,NEWLINE neg_pos_ub=-1,NEWLINE add_gt_as_proposals=False,NEWLINE ),NEWLINE assigner=dict(NEWLINE type="MaxIoUAssigner",NEWLINE pos_iou_thr=0.7,NEWLINE neg_iou_thr=0.3,NEWLINE min_pos_iou=0.3,NEWLINE ignore_iof_thr=-1,NEWLINE ),NEWLINE sampler=dict(NEWLINE type="RandomSampler",NEWLINE num=256,NEWLINE pos_fraction=0.5,NEWLINE neg_pos_ub=-1,NEWLINE add_gt_as_proposals=False,NEWLINE ),NEWLINE allowed_border=-1,NEWLINE pos_weight=-1,NEWLINE center_ratio=0.2,NEWLINE ignore_ratio=0.5,NEWLINE debug=False,NEWLINE )NEWLINE)NEWLINEtest_cfg = dict(NEWLINE rpn=dict(NEWLINE nms_across_levels=False,NEWLINE nms_pre=2000,NEWLINE nms_post=2000,NEWLINE max_num=2000,NEWLINE nms_thr=0.7,NEWLINE min_bbox_size=0,NEWLINE )NEWLINE)NEWLINE# dataset settingsNEWLINEdataset_type = "CocoDataset"NEWLINEdata_root = "data/coco/"NEWLINEimg_norm_cfg = dict(NEWLINE mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=FalseNEWLINE)NEWLINEtrain_pipeline = [NEWLINE dict(type="LoadImageFromFile"),NEWLINE dict(type="LoadAnnotations", with_bbox=True, with_label=False),NEWLINE dict(type="Resize", img_scale=(1333, 800), keep_ratio=True),NEWLINE dict(type="RandomFlip", flip_ratio=0.5),NEWLINE dict(type="Normalize", **img_norm_cfg),NEWLINE dict(type="Pad", size_divisor=32),NEWLINE dict(type="DefaultFormatBundle"),NEWLINE dict(type="Collect", keys=["img", "gt_bboxes"]),NEWLINE]NEWLINEtest_pipeline = [NEWLINE dict(type="LoadImageFromFile"),NEWLINE dict(NEWLINE type="MultiScaleFlipAug",NEWLINE img_scale=(1333, 800),NEWLINE flip=False,NEWLINE transforms=[NEWLINE dict(type="Resize", keep_ratio=True),NEWLINE dict(type="RandomFlip"),NEWLINE dict(type="Normalize", **img_norm_cfg),NEWLINE dict(type="Pad", size_divisor=32),NEWLINE dict(type="ImageToTensor", keys=["img"]),NEWLINE dict(type="Collect", keys=["img"]),NEWLINE ],NEWLINE ),NEWLINE]NEWLINEdata = dict(NEWLINE imgs_per_gpu=2,NEWLINE workers_per_gpu=2,NEWLINE train=dict(NEWLINE type=dataset_type,NEWLINE ann_file=data_root + "annotations/instances_train2017.json",NEWLINE img_prefix=data_root + "train2017/",NEWLINE pipeline=train_pipeline,NEWLINE ),NEWLINE val=dict(NEWLINE type=dataset_type,NEWLINE ann_file=data_root + "annotations/instances_val2017.json",NEWLINE img_prefix=data_root + "val2017/",NEWLINE pipeline=test_pipeline,NEWLINE ),NEWLINE test=dict(NEWLINE type=dataset_type,NEWLINE ann_file=data_root + "annotations/instances_val2017.json",NEWLINE img_prefix=data_root + "val2017/",NEWLINE pipeline=test_pipeline,NEWLINE ),NEWLINE)NEWLINEevaluation = dict(interval=1, metric="proposal_fast")NEWLINE# optimizerNEWLINEoptimizer = dict(type="SGD", lr=0.02, momentum=0.9, weight_decay=0.0001)NEWLINE# runner configsNEWLINEoptimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))NEWLINElr_config = dict(NEWLINE policy="step", warmup="linear", warmup_iters=500, warmup_ratio=1.0 / 3, step=[8, 11]NEWLINE)NEWLINEcheckpoint_config = dict(interval=1)NEWLINE# yapf:disableNEWLINElog_config = dict(NEWLINE interval=50,NEWLINE hooks=[NEWLINE dict(type="TextLoggerHook"),NEWLINE # dict(type='TensorboardLoggerHook')NEWLINE ],NEWLINE)NEWLINE# yapf:enableNEWLINE# runtime settingsNEWLINEtotal_epochs = 12NEWLINEdist_params = dict(backend="nccl")NEWLINElog_level = "INFO"NEWLINEwork_dir = "./work_dirs/ga_rpn_r50_caffe_fpn_1x"NEWLINEload_from = NoneNEWLINEresume_from = NoneNEWLINEworkflow = [("train", 1)]NEWLINE |
# coding: utf-8NEWLINENEWLINE# Copyright 2018 IBM All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""NEWLINEThe IBM Watson™ Visual Recognition service uses deep learning algorithms to identifyNEWLINEscenes, objects, and faces in images you upload to the service. You can create and trainNEWLINEa custom classifier to identify subjects that suit your needs.NEWLINE"""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINENEWLINEimport jsonNEWLINEfrom .watson_service import datetime_to_string, string_to_datetimeNEWLINEfrom os.path import basenameNEWLINEimport reNEWLINEfrom .watson_service import WatsonServiceNEWLINEfrom .utils import deprecatedNEWLINENEWLINE##############################################################################NEWLINE# ServiceNEWLINE##############################################################################NEWLINENEWLINE@deprecated("watson-developer-cloud moved to ibm-watson. To get updates, use the new package.")NEWLINEclass VisualRecognitionV3(WatsonService):NEWLINE """The Visual Recognition V3 service."""NEWLINENEWLINE default_url = 'https://gateway.watsonplatform.net/visual-recognition/api'NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE version,NEWLINE url=default_url,NEWLINE iam_apikey=None,NEWLINE iam_access_token=None,NEWLINE iam_url=None,NEWLINE ):NEWLINE """NEWLINE Construct a new client for the Visual Recognition service.NEWLINENEWLINE :param str version: The API version date to use with the service, inNEWLINE "YYYY-MM-DD" format. Whenever the API is changed in a backwardsNEWLINE incompatible way, a new minor version of the API is released.NEWLINE The service uses the API version for the date you specify, orNEWLINE the most recent version before that date. Note that you shouldNEWLINE not programmatically specify the current date at runtime, inNEWLINE case the API has been updated since your application's release.NEWLINE Instead, specify a version date that is compatible with yourNEWLINE application, and don't change it until your application isNEWLINE ready for a later version.NEWLINENEWLINE :param str url: The base url to use when contacting the service (e.g.NEWLINE "https://gateway.watsonplatform.net/visual-recognition/api").NEWLINE The base url may differ between Bluemix regions.NEWLINENEWLINE :param str iam_apikey: An API key that can be used to request IAM tokens. IfNEWLINE this API key is provided, the SDK will manage the token and handle theNEWLINE refreshing.NEWLINENEWLINE :param str iam_access_token: An IAM access token is fully managed by the application.NEWLINE Responsibility falls on the application to refresh the token, either beforeNEWLINE it expires or reactively upon receiving a 401 from the service as any requestsNEWLINE made with an expired token will fail.NEWLINENEWLINE :param str iam_url: An optional URL for the IAM service API. Defaults toNEWLINE 'https://iam.bluemix.net/identity/token'.NEWLINE """NEWLINENEWLINE WatsonService.__init__(NEWLINE self,NEWLINE vcap_services_name='watson_vision_combined',NEWLINE url=url,NEWLINE iam_apikey=iam_apikey,NEWLINE iam_access_token=iam_access_token,NEWLINE iam_url=iam_url,NEWLINE use_vcap_services=True,NEWLINE display_name='Visual Recognition')NEWLINE self.version = versionNEWLINENEWLINE #########################NEWLINE # GeneralNEWLINE #########################NEWLINENEWLINE def classify(self,NEWLINE images_file=None,NEWLINE accept_language=None,NEWLINE url=None,NEWLINE threshold=None,NEWLINE owners=None,NEWLINE classifier_ids=None,NEWLINE images_file_content_type=None,NEWLINE images_filename=None,NEWLINE **kwargs):NEWLINE """NEWLINE Classify images.NEWLINENEWLINE Classify images with built-in or custom classifiers.NEWLINENEWLINE :param file images_file: An image file (.gif, .jpg, .png, .tif) or .zip file withNEWLINE images. Maximum image size is 10 MB. Include no more than 20 images and limit theNEWLINE .zip file to 100 MB. Encode the image and .zip file names in UTF-8 if they containNEWLINE non-ASCII characters. The service assumes UTF-8 encoding if it encountersNEWLINE non-ASCII characters.NEWLINE You can also include an image with the **url** parameter.NEWLINE :param str accept_language: The desired language of parts of the response. See theNEWLINE response for details.NEWLINE :param str url: The URL of an image (.gif, .jpg, .png, .tif) to analyze. TheNEWLINE minimum recommended pixel density is 32X32 pixels, but the service tends toNEWLINE perform better with images that are at least 224 x 224 pixels. The maximum imageNEWLINE size is 10 MB.NEWLINE You can also include images with the **images_file** parameter.NEWLINE :param float threshold: The minimum score a class must have to be displayed in theNEWLINE response. Set the threshold to `0.0` to return all identified classes.NEWLINE :param list[str] owners: The categories of classifiers to apply. TheNEWLINE **classifier_ids** parameter overrides **owners**, so make sure thatNEWLINE **classifier_ids** is empty.NEWLINE - Use `IBM` to classify against the `default` general classifier. You get the sameNEWLINE result if both **classifier_ids** and **owners** parameters are empty.NEWLINE - Use `me` to classify against all your custom classifiers. However, for betterNEWLINE performance use **classifier_ids** to specify the specific custom classifiers toNEWLINE apply.NEWLINE - Use both `IBM` and `me` to analyze the image against both classifier categories.NEWLINE :param list[str] classifier_ids: Which classifiers to apply. Overrides theNEWLINE **owners** parameter. You can specify both custom and built-in classifier IDs. TheNEWLINE built-in `default` classifier is used if both **classifier_ids** and **owners**NEWLINE parameters are empty.NEWLINE The following built-in classifier IDs require no training:NEWLINE - `default`: Returns classes from thousands of general tags.NEWLINE - `food`: Enhances specificity and accuracy for images of food items.NEWLINE - `explicit`: Evaluates whether the image might be pornographic.NEWLINE :param str images_file_content_type: The content type of images_file.NEWLINE :param str images_filename: The filename for images_file.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE headers = {'Accept-Language': accept_language}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=classify'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE form_data = {}NEWLINE if images_file:NEWLINE if not images_filename and hasattr(images_file, 'name'):NEWLINE images_filename = basename(images_file.name)NEWLINE form_data['images_file'] = (images_filename, images_file,NEWLINE images_file_content_type orNEWLINE 'application/octet-stream')NEWLINE if url:NEWLINE form_data['url'] = (None, url, 'text/plain')NEWLINE if threshold:NEWLINE form_data['threshold'] = (None, threshold, 'application/json')NEWLINE if owners:NEWLINE owners = self._convert_list(owners)NEWLINE form_data['owners'] = (None, owners, 'application/json')NEWLINE if classifier_ids:NEWLINE classifier_ids = self._convert_list(classifier_ids)NEWLINE form_data['classifier_ids'] = (None, classifier_ids,NEWLINE 'application/json')NEWLINENEWLINE url = '/v3/classify'NEWLINE response = self.request(NEWLINE method='POST',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE files=form_data,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE #########################NEWLINE # FaceNEWLINE #########################NEWLINENEWLINE def detect_faces(self,NEWLINE images_file=None,NEWLINE url=None,NEWLINE images_file_content_type=None,NEWLINE images_filename=None,NEWLINE accept_language=None,NEWLINE **kwargs):NEWLINE """NEWLINE Detect faces in images.NEWLINENEWLINE **Important:** On April 2, 2018, the identity information in the response to callsNEWLINE to the Face model was removed. The identity information refers to the `name` ofNEWLINE the person, `score`, and `type_hierarchy` knowledge graph. For details about theNEWLINE enhanced Face model, see the [ReleaseNEWLINE notes](https://cloud.ibm.com/docs/services/visual-recognition/release-notes.html#2april2018).NEWLINE Analyze and get data about faces in images. Responses can include estimated ageNEWLINE and gender. This feature uses a built-in model, so no training is necessary. TheNEWLINE Detect faces method does not support general biometric facial recognition.NEWLINE Supported image formats include .gif, .jpg, .png, and .tif. The maximum image sizeNEWLINE is 10 MB. The minimum recommended pixel density is 32X32 pixels, but the serviceNEWLINE tends to perform better with images that are at least 224 x 224 pixels.NEWLINENEWLINE :param file images_file: An image file (gif, .jpg, .png, .tif.) or .zip file withNEWLINE images. Limit the .zip file to 100 MB. You can include a maximum of 15 images in aNEWLINE request.NEWLINE Encode the image and .zip file names in UTF-8 if they contain non-ASCIINEWLINE characters. The service assumes UTF-8 encoding if it encounters non-ASCIINEWLINE characters.NEWLINE You can also include an image with the **url** parameter.NEWLINE :param str url: The URL of an image to analyze. Must be in .gif, .jpg, .png, orNEWLINE .tif format. The minimum recommended pixel density is 32X32 pixels, but theNEWLINE service tends to perform better with images that are at least 224 x 224 pixels.NEWLINE The maximum image size is 10 MB. Redirects are followed, so you can use aNEWLINE shortened URL.NEWLINE You can also include images with the **images_file** parameter.NEWLINE :param str images_file_content_type: The content type of images_file.NEWLINE :param str images_filename: The filename for images_file.NEWLINE :param str accept_language: The desired language of parts of the response. See theNEWLINE response for details.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE headers = {'Accept-Language': accept_language}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=detect_faces'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE form_data = {}NEWLINE if images_file:NEWLINE if not images_filename and hasattr(images_file, 'name'):NEWLINE images_filename = basename(images_file.name)NEWLINE form_data['images_file'] = (images_filename, images_file,NEWLINE images_file_content_type orNEWLINE 'application/octet-stream')NEWLINE if url:NEWLINE form_data['url'] = (None, url, 'text/plain')NEWLINENEWLINE url = '/v3/detect_faces'NEWLINE response = self.request(NEWLINE method='POST',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE files=form_data,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE #########################NEWLINE # CustomNEWLINE #########################NEWLINENEWLINE def create_classifier(self,NEWLINE name,NEWLINE negative_examples=None,NEWLINE negative_examples_filename=None,NEWLINE **kwargs):NEWLINE """NEWLINE Create a classifier.NEWLINENEWLINE Train a new multi-faceted classifier on the uploaded image data. Create yourNEWLINE custom classifier with positive or negative examples. Include at least two sets ofNEWLINE examples, either two positive example files or one positive and one negative file.NEWLINE You can upload a maximum of 256 MB per call.NEWLINE Encode all names in UTF-8 if they contain non-ASCII characters (.zip and imageNEWLINE file names, and classifier and class names). The service assumes UTF-8 encoding ifNEWLINE it encounters non-ASCII characters.NEWLINENEWLINE :param str name: The name of the new classifier. Encode special characters inNEWLINE UTF-8.NEWLINE :param file negative_examples: A .zip file of images that do not depict the visualNEWLINE subject of any of the classes of the new classifier. Must contain a minimum of 10NEWLINE images.NEWLINE Encode special characters in the file name in UTF-8.NEWLINE :param str negative_examples_filename: The filename for negative_examples.NEWLINE :param file positive_examples: A .zip file of images that depict the visualNEWLINE subject of a class in the new classifier. You can include more than one positiveNEWLINE example file in a call.NEWLINE Specify the parameter name by appending `_positive_examples` to the class name.NEWLINE For example, `goldenretriever_positive_examples` creates the classNEWLINE **goldenretriever**.NEWLINE Include at least 10 images in .jpg or .png format. The minimum recommended imageNEWLINE resolution is 32X32 pixels. The maximum number of images is 10,000 images or 100NEWLINE MB per .zip file.NEWLINE Encode special characters in the file name in UTF-8.NEWLINE :param str positive_examples_filename: The filename for positive_examples.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE if name is None:NEWLINE raise ValueError('name must be provided')NEWLINE positive_examples_keys = [NEWLINE key for key in kwargs if re.match('^.+_positive_examples$', key)NEWLINE ]NEWLINE if not positive_examples_keys:NEWLINE raise ValueError(NEWLINE 'At least one <classname>_positive_examples parameter must be provided'NEWLINE )NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=create_classifier'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE form_data = {}NEWLINE form_data['name'] = (None, name, 'text/plain')NEWLINE if negative_examples:NEWLINE if not negative_examples_filename and hasattr(NEWLINE negative_examples, 'name'):NEWLINE negative_examples_filename = basename(negative_examples.name)NEWLINE if not negative_examples_filename:NEWLINE raise ValueError('negative_examples_filename must be provided')NEWLINE form_data['negative_examples'] = (negative_examples_filename,NEWLINE negative_examples,NEWLINE 'application/octet-stream')NEWLINE for key in positive_examples_keys:NEWLINE value = kwargs[key]NEWLINE filename = kwargs.get(key + '_filename')NEWLINE if not filename and hasattr(value, 'name'):NEWLINE filename = basename(value.name)NEWLINE form_data[key] = (filename, value, 'application/octet-stream')NEWLINENEWLINE url = '/v3/classifiers'NEWLINE response = self.request(NEWLINE method='POST',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE files=form_data,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE def delete_classifier(self, classifier_id, **kwargs):NEWLINE """NEWLINE Delete a classifier.NEWLINENEWLINE :param str classifier_id: The ID of the classifier.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE if classifier_id is None:NEWLINE raise ValueError('classifier_id must be provided')NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=delete_classifier'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE url = '/v3/classifiers/{0}'.format(NEWLINE *self._encode_path_vars(classifier_id))NEWLINE response = self.request(NEWLINE method='DELETE',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE def get_classifier(self, classifier_id, **kwargs):NEWLINE """NEWLINE Retrieve classifier details.NEWLINENEWLINE Retrieve information about a custom classifier.NEWLINENEWLINE :param str classifier_id: The ID of the classifier.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE if classifier_id is None:NEWLINE raise ValueError('classifier_id must be provided')NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=get_classifier'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE url = '/v3/classifiers/{0}'.format(NEWLINE *self._encode_path_vars(classifier_id))NEWLINE response = self.request(NEWLINE method='GET',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE def list_classifiers(self, verbose=None, **kwargs):NEWLINE """NEWLINE Retrieve a list of classifiers.NEWLINENEWLINE :param bool verbose: Specify `true` to return details about the classifiers. OmitNEWLINE this parameter to return a brief list of classifiers.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=list_classifiers'NEWLINENEWLINE params = {'version': self.version, 'verbose': verbose}NEWLINENEWLINE url = '/v3/classifiers'NEWLINE response = self.request(NEWLINE method='GET',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE def update_classifier(self,NEWLINE classifier_id,NEWLINE negative_examples=None,NEWLINE negative_examples_filename=None,NEWLINE **kwargs):NEWLINE """NEWLINE Update a classifier.NEWLINENEWLINE Update a custom classifier by adding new positive or negative classes or by addingNEWLINE new images to existing classes. You must supply at least one set of positive orNEWLINE negative examples. For details, see [Updating customNEWLINE classifiers](https://cloud.ibm.com/docs/services/visual-recognition/customizing.html#updating-custom-classifiers).NEWLINE Encode all names in UTF-8 if they contain non-ASCII characters (.zip and imageNEWLINE file names, and classifier and class names). The service assumes UTF-8 encoding ifNEWLINE it encounters non-ASCII characters.NEWLINE **Tip:** Don't make retraining calls on a classifier until the status is ready.NEWLINE When you submit retraining requests in parallel, the last request overwrites theNEWLINE previous requests. The retrained property shows the last time the classifierNEWLINE retraining finished.NEWLINENEWLINE :param str classifier_id: The ID of the classifier.NEWLINE :param file negative_examples: A .zip file of images that do not depict the visualNEWLINE subject of any of the classes of the new classifier. Must contain a minimum of 10NEWLINE images.NEWLINE Encode special characters in the file name in UTF-8.NEWLINE :param str negative_examples_filename: The filename for negative_examples.NEWLINE :param file positive_examples: A .zip file of images that depict the visualNEWLINE subject of a class in the classifier. The positive examples create or updateNEWLINE classes in the classifier. You can include more than one positive example file inNEWLINE a call.NEWLINE Specify the parameter name by appending `_positive_examples` to the class name.NEWLINE For example, `goldenretriever_positive_examples` creates the classNEWLINE `goldenretriever`.NEWLINE Include at least 10 images in .jpg or .png format. The minimum recommended imageNEWLINE resolution is 32X32 pixels. The maximum number of images is 10,000 images or 100NEWLINE MB per .zip file.NEWLINE Encode special characters in the file name in UTF-8.NEWLINE :param str positive_examples_filename: The filename for positive_examples.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE if classifier_id is None:NEWLINE raise ValueError('classifier_id must be provided')NEWLINE positive_examples_keys = [NEWLINE key for key in kwargs if re.match('^.+_positive_examples$', key)NEWLINE ]NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=update_classifier'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE form_data = {}NEWLINE if negative_examples:NEWLINE if not negative_examples_filename and hasattr(NEWLINE negative_examples, 'name'):NEWLINE negative_examples_filename = basename(negative_examples.name)NEWLINE if not negative_examples_filename:NEWLINE raise ValueError('negative_examples_filename must be provided')NEWLINE form_data['negative_examples'] = (negative_examples_filename,NEWLINE negative_examples,NEWLINE 'application/octet-stream')NEWLINE for key in positive_examples_keys:NEWLINE value = kwargs[key]NEWLINE filename = kwargs.get(key + '_filename')NEWLINE if not filename and hasattr(value, 'name'):NEWLINE filename = basename(value.name)NEWLINE form_data[key] = (filename, value, 'application/octet-stream')NEWLINENEWLINE url = '/v3/classifiers/{0}'.format(NEWLINE *self._encode_path_vars(classifier_id))NEWLINE response = self.request(NEWLINE method='POST',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE files=form_data,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINE #########################NEWLINE # Core MLNEWLINE #########################NEWLINENEWLINE def get_core_ml_model(self, classifier_id, **kwargs):NEWLINE """NEWLINE Retrieve a Core ML model of a classifier.NEWLINENEWLINE Download a Core ML model file (.mlmodel) of a custom classifier that returnsNEWLINE <tt>\"core_ml_enabled\": true</tt> in the classifier details.NEWLINENEWLINE :param str classifier_id: The ID of the classifier.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE if classifier_id is None:NEWLINE raise ValueError('classifier_id must be provided')NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=get_core_ml_model'NEWLINENEWLINE params = {'version': self.version}NEWLINENEWLINE url = '/v3/classifiers/{0}/core_ml_model'.format(NEWLINE *self._encode_path_vars(classifier_id))NEWLINE response = self.request(NEWLINE method='GET',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE accept_json=False)NEWLINE return responseNEWLINENEWLINE #########################NEWLINE # User dataNEWLINE #########################NEWLINENEWLINE def delete_user_data(self, customer_id, **kwargs):NEWLINE """NEWLINE Delete labeled data.NEWLINENEWLINE Deletes all data associated with a specified customer ID. The method has no effectNEWLINE if no data is associated with the customer ID.NEWLINE You associate a customer ID with data by passing the `X-Watson-Metadata` headerNEWLINE with a request that passes data. For more information about personal data andNEWLINE customer IDs, see [InformationNEWLINE security](https://cloud.ibm.com/docs/services/visual-recognition/information-security.html).NEWLINENEWLINE :param str customer_id: The customer ID for which all data is to be deleted.NEWLINE :param dict headers: A `dict` containing the request headersNEWLINE :return: A `DetailedResponse` containing the result, headers and HTTP status code.NEWLINE :rtype: DetailedResponseNEWLINE """NEWLINENEWLINE if customer_id is None:NEWLINE raise ValueError('customer_id must be provided')NEWLINENEWLINE headers = {}NEWLINE if 'headers' in kwargs:NEWLINE headers.update(kwargs.get('headers'))NEWLINE headers[NEWLINE 'X-IBMCloud-SDK-Analytics'] = 'service_name=watson_vision_combined;service_version=V3;operation_id=delete_user_data'NEWLINENEWLINE params = {'version': self.version, 'customer_id': customer_id}NEWLINENEWLINE url = '/v3/user_data'NEWLINE response = self.request(NEWLINE method='DELETE',NEWLINE url=url,NEWLINE headers=headers,NEWLINE params=params,NEWLINE accept_json=True)NEWLINE return responseNEWLINENEWLINENEWLINE##############################################################################NEWLINE# ModelsNEWLINE##############################################################################NEWLINENEWLINENEWLINEclass Class(object):NEWLINE """NEWLINE A category within a classifier.NEWLINENEWLINE :attr str class_name: The name of the class.NEWLINE """NEWLINENEWLINE def __init__(self, class_name):NEWLINE """NEWLINE Initialize a Class object.NEWLINENEWLINE :param str class_name: The name of the class.NEWLINE """NEWLINE self.class_name = class_nameNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a Class object from a json dictionary."""NEWLINE args = {}NEWLINE if 'class' in _dict or 'class_name' in _dict:NEWLINE args['class_name'] = _dict.get('class') or _dict.get('class_name')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'class\' not present in Class JSON')NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'class_name') and self.class_name is not None:NEWLINE _dict['class'] = self.class_nameNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this Class object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass ClassResult(object):NEWLINE """NEWLINE Result of a class within a classifier.NEWLINENEWLINE :attr str class_name: Name of the class.NEWLINE Class names are translated in the language defined by the **Accept-Language** requestNEWLINE header for the build-in classifier IDs (`default`, `food`, and `explicit`). ClassNEWLINE names of custom classifiers are not translated. The response might not be in theNEWLINE specified language when the requested language is not supported or when there is noNEWLINE translation for the class name.NEWLINE :attr float score: Confidence score for the property in the range of 0 to 1. A higherNEWLINE score indicates greater likelihood that the class is depicted in the image. TheNEWLINE default threshold for returning scores from a classifier is 0.5.NEWLINE :attr str type_hierarchy: (optional) Knowledge graph of the property. For example,NEWLINE `/fruit/pome/apple/eating apple/Granny Smith`. Included only if identified.NEWLINE """NEWLINENEWLINE def __init__(self, class_name, score, type_hierarchy=None):NEWLINE """NEWLINE Initialize a ClassResult object.NEWLINENEWLINE :param str class_name: Name of the class.NEWLINE Class names are translated in the language defined by the **Accept-Language**NEWLINE request header for the build-in classifier IDs (`default`, `food`, andNEWLINE `explicit`). Class names of custom classifiers are not translated. The responseNEWLINE might not be in the specified language when the requested language is notNEWLINE supported or when there is no translation for the class name.NEWLINE :param float score: Confidence score for the property in the range of 0 to 1. ANEWLINE higher score indicates greater likelihood that the class is depicted in the image.NEWLINE The default threshold for returning scores from a classifier is 0.5.NEWLINE :param str type_hierarchy: (optional) Knowledge graph of the property. ForNEWLINE example, `/fruit/pome/apple/eating apple/Granny Smith`. Included only ifNEWLINE identified.NEWLINE """NEWLINE self.class_name = class_nameNEWLINE self.score = scoreNEWLINE self.type_hierarchy = type_hierarchyNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a ClassResult object from a json dictionary."""NEWLINE args = {}NEWLINE if 'class' in _dict or 'class_name' in _dict:NEWLINE args['class_name'] = _dict.get('class') or _dict.get('class_name')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'class\' not present in ClassResult JSON')NEWLINE if 'score' in _dict:NEWLINE args['score'] = _dict.get('score')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'score\' not present in ClassResult JSON')NEWLINE if 'type_hierarchy' in _dict:NEWLINE args['type_hierarchy'] = _dict.get('type_hierarchy')NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'class_name') and self.class_name is not None:NEWLINE _dict['class'] = self.class_nameNEWLINE if hasattr(self, 'score') and self.score is not None:NEWLINE _dict['score'] = self.scoreNEWLINE if hasattr(self, 'type_hierarchy') and self.type_hierarchy is not None:NEWLINE _dict['type_hierarchy'] = self.type_hierarchyNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this ClassResult object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass ClassifiedImage(object):NEWLINE """NEWLINE Results for one image.NEWLINENEWLINE :attr str source_url: (optional) Source of the image before any redirects. NotNEWLINE returned when the image is uploaded.NEWLINE :attr str resolved_url: (optional) Fully resolved URL of the image after redirects areNEWLINE followed. Not returned when the image is uploaded.NEWLINE :attr str image: (optional) Relative path of the image file if uploaded directly. NotNEWLINE returned when the image is passed by URL.NEWLINE :attr ErrorInfo error: (optional) Information about what might have caused a failure,NEWLINE such as an image that is too large. Not returned when there is no error.NEWLINE :attr list[ClassifierResult] classifiers: The classifiers.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE classifiers,NEWLINE source_url=None,NEWLINE resolved_url=None,NEWLINE image=None,NEWLINE error=None):NEWLINE """NEWLINE Initialize a ClassifiedImage object.NEWLINENEWLINE :param list[ClassifierResult] classifiers: The classifiers.NEWLINE :param str source_url: (optional) Source of the image before any redirects. NotNEWLINE returned when the image is uploaded.NEWLINE :param str resolved_url: (optional) Fully resolved URL of the image afterNEWLINE redirects are followed. Not returned when the image is uploaded.NEWLINE :param str image: (optional) Relative path of the image file if uploaded directly.NEWLINE Not returned when the image is passed by URL.NEWLINE :param ErrorInfo error: (optional) Information about what might have caused aNEWLINE failure, such as an image that is too large. Not returned when there is no error.NEWLINE """NEWLINE self.source_url = source_urlNEWLINE self.resolved_url = resolved_urlNEWLINE self.image = imageNEWLINE self.error = errorNEWLINE self.classifiers = classifiersNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a ClassifiedImage object from a json dictionary."""NEWLINE args = {}NEWLINE if 'source_url' in _dict:NEWLINE args['source_url'] = _dict.get('source_url')NEWLINE if 'resolved_url' in _dict:NEWLINE args['resolved_url'] = _dict.get('resolved_url')NEWLINE if 'image' in _dict:NEWLINE args['image'] = _dict.get('image')NEWLINE if 'error' in _dict:NEWLINE args['error'] = ErrorInfo._from_dict(_dict.get('error'))NEWLINE if 'classifiers' in _dict:NEWLINE args['classifiers'] = [NEWLINE ClassifierResult._from_dict(x)NEWLINE for x in (_dict.get('classifiers'))NEWLINE ]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'classifiers\' not present in ClassifiedImage JSON'NEWLINE )NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'source_url') and self.source_url is not None:NEWLINE _dict['source_url'] = self.source_urlNEWLINE if hasattr(self, 'resolved_url') and self.resolved_url is not None:NEWLINE _dict['resolved_url'] = self.resolved_urlNEWLINE if hasattr(self, 'image') and self.image is not None:NEWLINE _dict['image'] = self.imageNEWLINE if hasattr(self, 'error') and self.error is not None:NEWLINE _dict['error'] = self.error._to_dict()NEWLINE if hasattr(self, 'classifiers') and self.classifiers is not None:NEWLINE _dict['classifiers'] = [x._to_dict() for x in self.classifiers]NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this ClassifiedImage object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass ClassifiedImages(object):NEWLINE """NEWLINE Results for all images.NEWLINENEWLINE :attr int custom_classes: (optional) Number of custom classes identified in theNEWLINE images.NEWLINE :attr int images_processed: (optional) Number of images processed for the API call.NEWLINE :attr list[ClassifiedImage] images: Classified images.NEWLINE :attr list[WarningInfo] warnings: (optional) Information about what might cause lessNEWLINE than optimal output. For example, a request sent with a corrupt .zip file and a listNEWLINE of image URLs will still complete, but does not return the expected output. NotNEWLINE returned when there is no warning.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE images,NEWLINE custom_classes=None,NEWLINE images_processed=None,NEWLINE warnings=None):NEWLINE """NEWLINE Initialize a ClassifiedImages object.NEWLINENEWLINE :param list[ClassifiedImage] images: Classified images.NEWLINE :param int custom_classes: (optional) Number of custom classes identified in theNEWLINE images.NEWLINE :param int images_processed: (optional) Number of images processed for the APINEWLINE call.NEWLINE :param list[WarningInfo] warnings: (optional) Information about what might causeNEWLINE less than optimal output. For example, a request sent with a corrupt .zip file andNEWLINE a list of image URLs will still complete, but does not return the expected output.NEWLINE Not returned when there is no warning.NEWLINE """NEWLINE self.custom_classes = custom_classesNEWLINE self.images_processed = images_processedNEWLINE self.images = imagesNEWLINE self.warnings = warningsNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a ClassifiedImages object from a json dictionary."""NEWLINE args = {}NEWLINE if 'custom_classes' in _dict:NEWLINE args['custom_classes'] = _dict.get('custom_classes')NEWLINE if 'images_processed' in _dict:NEWLINE args['images_processed'] = _dict.get('images_processed')NEWLINE if 'images' in _dict:NEWLINE args['images'] = [NEWLINE ClassifiedImage._from_dict(x) for x in (_dict.get('images'))NEWLINE ]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'images\' not present in ClassifiedImages JSON'NEWLINE )NEWLINE if 'warnings' in _dict:NEWLINE args['warnings'] = [NEWLINE WarningInfo._from_dict(x) for x in (_dict.get('warnings'))NEWLINE ]NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'custom_classes') and self.custom_classes is not None:NEWLINE _dict['custom_classes'] = self.custom_classesNEWLINE if hasattr(self,NEWLINE 'images_processed') and self.images_processed is not None:NEWLINE _dict['images_processed'] = self.images_processedNEWLINE if hasattr(self, 'images') and self.images is not None:NEWLINE _dict['images'] = [x._to_dict() for x in self.images]NEWLINE if hasattr(self, 'warnings') and self.warnings is not None:NEWLINE _dict['warnings'] = [x._to_dict() for x in self.warnings]NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this ClassifiedImages object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass Classifier(object):NEWLINE """NEWLINE Information about a classifier.NEWLINENEWLINE :attr str classifier_id: ID of a classifier identified in the image.NEWLINE :attr str name: Name of the classifier.NEWLINE :attr str owner: (optional) Unique ID of the account who owns the classifier. MightNEWLINE not be returned by some requests.NEWLINE :attr str status: (optional) Training status of classifier.NEWLINE :attr bool core_ml_enabled: (optional) Whether the classifier can be downloaded as aNEWLINE Core ML model after the training status is `ready`.NEWLINE :attr str explanation: (optional) If classifier training has failed, this field mightNEWLINE explain why.NEWLINE :attr datetime created: (optional) Date and time in Coordinated Universal Time (UTC)NEWLINE that the classifier was created.NEWLINE :attr list[Class] classes: (optional) Classes that define a classifier.NEWLINE :attr datetime retrained: (optional) Date and time in Coordinated Universal Time (UTC)NEWLINE that the classifier was updated. Might not be returned by some requests. Identical toNEWLINE `updated` and retained for backward compatibility.NEWLINE :attr datetime updated: (optional) Date and time in Coordinated Universal Time (UTC)NEWLINE that the classifier was most recently updated. The field matches either `retrained` orNEWLINE `created`. Might not be returned by some requests.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE classifier_id,NEWLINE name,NEWLINE owner=None,NEWLINE status=None,NEWLINE core_ml_enabled=None,NEWLINE explanation=None,NEWLINE created=None,NEWLINE classes=None,NEWLINE retrained=None,NEWLINE updated=None):NEWLINE """NEWLINE Initialize a Classifier object.NEWLINENEWLINE :param str classifier_id: ID of a classifier identified in the image.NEWLINE :param str name: Name of the classifier.NEWLINE :param str owner: (optional) Unique ID of the account who owns the classifier.NEWLINE Might not be returned by some requests.NEWLINE :param str status: (optional) Training status of classifier.NEWLINE :param bool core_ml_enabled: (optional) Whether the classifier can be downloadedNEWLINE as a Core ML model after the training status is `ready`.NEWLINE :param str explanation: (optional) If classifier training has failed, this fieldNEWLINE might explain why.NEWLINE :param datetime created: (optional) Date and time in Coordinated Universal TimeNEWLINE (UTC) that the classifier was created.NEWLINE :param list[Class] classes: (optional) Classes that define a classifier.NEWLINE :param datetime retrained: (optional) Date and time in Coordinated Universal TimeNEWLINE (UTC) that the classifier was updated. Might not be returned by some requests.NEWLINE Identical to `updated` and retained for backward compatibility.NEWLINE :param datetime updated: (optional) Date and time in Coordinated Universal TimeNEWLINE (UTC) that the classifier was most recently updated. The field matches eitherNEWLINE `retrained` or `created`. Might not be returned by some requests.NEWLINE """NEWLINE self.classifier_id = classifier_idNEWLINE self.name = nameNEWLINE self.owner = ownerNEWLINE self.status = statusNEWLINE self.core_ml_enabled = core_ml_enabledNEWLINE self.explanation = explanationNEWLINE self.created = createdNEWLINE self.classes = classesNEWLINE self.retrained = retrainedNEWLINE self.updated = updatedNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a Classifier object from a json dictionary."""NEWLINE args = {}NEWLINE if 'classifier_id' in _dict:NEWLINE args['classifier_id'] = _dict.get('classifier_id')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'classifier_id\' not present in Classifier JSON'NEWLINE )NEWLINE if 'name' in _dict:NEWLINE args['name'] = _dict.get('name')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'name\' not present in Classifier JSON')NEWLINE if 'owner' in _dict:NEWLINE args['owner'] = _dict.get('owner')NEWLINE if 'status' in _dict:NEWLINE args['status'] = _dict.get('status')NEWLINE if 'core_ml_enabled' in _dict:NEWLINE args['core_ml_enabled'] = _dict.get('core_ml_enabled')NEWLINE if 'explanation' in _dict:NEWLINE args['explanation'] = _dict.get('explanation')NEWLINE if 'created' in _dict:NEWLINE args['created'] = string_to_datetime(_dict.get('created'))NEWLINE if 'classes' in _dict:NEWLINE args['classes'] = [NEWLINE Class._from_dict(x) for x in (_dict.get('classes'))NEWLINE ]NEWLINE if 'retrained' in _dict:NEWLINE args['retrained'] = string_to_datetime(_dict.get('retrained'))NEWLINE if 'updated' in _dict:NEWLINE args['updated'] = string_to_datetime(_dict.get('updated'))NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'classifier_id') and self.classifier_id is not None:NEWLINE _dict['classifier_id'] = self.classifier_idNEWLINE if hasattr(self, 'name') and self.name is not None:NEWLINE _dict['name'] = self.nameNEWLINE if hasattr(self, 'owner') and self.owner is not None:NEWLINE _dict['owner'] = self.ownerNEWLINE if hasattr(self, 'status') and self.status is not None:NEWLINE _dict['status'] = self.statusNEWLINE if hasattr(self,NEWLINE 'core_ml_enabled') and self.core_ml_enabled is not None:NEWLINE _dict['core_ml_enabled'] = self.core_ml_enabledNEWLINE if hasattr(self, 'explanation') and self.explanation is not None:NEWLINE _dict['explanation'] = self.explanationNEWLINE if hasattr(self, 'created') and self.created is not None:NEWLINE _dict['created'] = datetime_to_string(self.created)NEWLINE if hasattr(self, 'classes') and self.classes is not None:NEWLINE _dict['classes'] = [x._to_dict() for x in self.classes]NEWLINE if hasattr(self, 'retrained') and self.retrained is not None:NEWLINE _dict['retrained'] = datetime_to_string(self.retrained)NEWLINE if hasattr(self, 'updated') and self.updated is not None:NEWLINE _dict['updated'] = datetime_to_string(self.updated)NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this Classifier object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass ClassifierResult(object):NEWLINE """NEWLINE Classifier and score combination.NEWLINENEWLINE :attr str name: Name of the classifier.NEWLINE :attr str classifier_id: ID of a classifier identified in the image.NEWLINE :attr list[ClassResult] classes: Classes within the classifier.NEWLINE """NEWLINENEWLINE def __init__(self, name, classifier_id, classes):NEWLINE """NEWLINE Initialize a ClassifierResult object.NEWLINENEWLINE :param str name: Name of the classifier.NEWLINE :param str classifier_id: ID of a classifier identified in the image.NEWLINE :param list[ClassResult] classes: Classes within the classifier.NEWLINE """NEWLINE self.name = nameNEWLINE self.classifier_id = classifier_idNEWLINE self.classes = classesNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a ClassifierResult object from a json dictionary."""NEWLINE args = {}NEWLINE if 'name' in _dict:NEWLINE args['name'] = _dict.get('name')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'name\' not present in ClassifierResult JSON'NEWLINE )NEWLINE if 'classifier_id' in _dict:NEWLINE args['classifier_id'] = _dict.get('classifier_id')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'classifier_id\' not present in ClassifierResult JSON'NEWLINE )NEWLINE if 'classes' in _dict:NEWLINE args['classes'] = [NEWLINE ClassResult._from_dict(x) for x in (_dict.get('classes'))NEWLINE ]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'classes\' not present in ClassifierResult JSON'NEWLINE )NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'name') and self.name is not None:NEWLINE _dict['name'] = self.nameNEWLINE if hasattr(self, 'classifier_id') and self.classifier_id is not None:NEWLINE _dict['classifier_id'] = self.classifier_idNEWLINE if hasattr(self, 'classes') and self.classes is not None:NEWLINE _dict['classes'] = [x._to_dict() for x in self.classes]NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this ClassifierResult object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass Classifiers(object):NEWLINE """NEWLINE A container for the list of classifiers.NEWLINENEWLINE :attr list[Classifier] classifiers: List of classifiers.NEWLINE """NEWLINENEWLINE def __init__(self, classifiers):NEWLINE """NEWLINE Initialize a Classifiers object.NEWLINENEWLINE :param list[Classifier] classifiers: List of classifiers.NEWLINE """NEWLINE self.classifiers = classifiersNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a Classifiers object from a json dictionary."""NEWLINE args = {}NEWLINE if 'classifiers' in _dict:NEWLINE args['classifiers'] = [NEWLINE Classifier._from_dict(x) for x in (_dict.get('classifiers'))NEWLINE ]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'classifiers\' not present in Classifiers JSON'NEWLINE )NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'classifiers') and self.classifiers is not None:NEWLINE _dict['classifiers'] = [x._to_dict() for x in self.classifiers]NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this Classifiers object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass DetectedFaces(object):NEWLINE """NEWLINE Results for all faces.NEWLINENEWLINE :attr int images_processed: Number of images processed for the API call.NEWLINE :attr list[ImageWithFaces] images: The images.NEWLINE :attr list[WarningInfo] warnings: (optional) Information about what might cause lessNEWLINE than optimal output. For example, a request sent with a corrupt .zip file and a listNEWLINE of image URLs will still complete, but does not return the expected output. NotNEWLINE returned when there is no warning.NEWLINE """NEWLINENEWLINE def __init__(self, images_processed, images, warnings=None):NEWLINE """NEWLINE Initialize a DetectedFaces object.NEWLINENEWLINE :param int images_processed: Number of images processed for the API call.NEWLINE :param list[ImageWithFaces] images: The images.NEWLINE :param list[WarningInfo] warnings: (optional) Information about what might causeNEWLINE less than optimal output. For example, a request sent with a corrupt .zip file andNEWLINE a list of image URLs will still complete, but does not return the expected output.NEWLINE Not returned when there is no warning.NEWLINE """NEWLINE self.images_processed = images_processedNEWLINE self.images = imagesNEWLINE self.warnings = warningsNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a DetectedFaces object from a json dictionary."""NEWLINE args = {}NEWLINE if 'images_processed' in _dict:NEWLINE args['images_processed'] = _dict.get('images_processed')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'images_processed\' not present in DetectedFaces JSON'NEWLINE )NEWLINE if 'images' in _dict:NEWLINE args['images'] = [NEWLINE ImageWithFaces._from_dict(x) for x in (_dict.get('images'))NEWLINE ]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'images\' not present in DetectedFaces JSON'NEWLINE )NEWLINE if 'warnings' in _dict:NEWLINE args['warnings'] = [NEWLINE WarningInfo._from_dict(x) for x in (_dict.get('warnings'))NEWLINE ]NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self,NEWLINE 'images_processed') and self.images_processed is not None:NEWLINE _dict['images_processed'] = self.images_processedNEWLINE if hasattr(self, 'images') and self.images is not None:NEWLINE _dict['images'] = [x._to_dict() for x in self.images]NEWLINE if hasattr(self, 'warnings') and self.warnings is not None:NEWLINE _dict['warnings'] = [x._to_dict() for x in self.warnings]NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this DetectedFaces object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass ErrorInfo(object):NEWLINE """NEWLINE Information about what might have caused a failure, such as an image that is tooNEWLINE large. Not returned when there is no error.NEWLINENEWLINE :attr int code: HTTP status code.NEWLINE :attr str description: Human-readable error description. For example, `File size limitNEWLINE exceeded`.NEWLINE :attr str error_id: Codified error string. For example, `limit_exceeded`.NEWLINE """NEWLINENEWLINE def __init__(self, code, description, error_id):NEWLINE """NEWLINE Initialize a ErrorInfo object.NEWLINENEWLINE :param int code: HTTP status code.NEWLINE :param str description: Human-readable error description. For example, `File sizeNEWLINE limit exceeded`.NEWLINE :param str error_id: Codified error string. For example, `limit_exceeded`.NEWLINE """NEWLINE self.code = codeNEWLINE self.description = descriptionNEWLINE self.error_id = error_idNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a ErrorInfo object from a json dictionary."""NEWLINE args = {}NEWLINE if 'code' in _dict:NEWLINE args['code'] = _dict.get('code')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'code\' not present in ErrorInfo JSON')NEWLINE if 'description' in _dict:NEWLINE args['description'] = _dict.get('description')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'description\' not present in ErrorInfo JSON'NEWLINE )NEWLINE if 'error_id' in _dict:NEWLINE args['error_id'] = _dict.get('error_id')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'error_id\' not present in ErrorInfo JSON')NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'code') and self.code is not None:NEWLINE _dict['code'] = self.codeNEWLINE if hasattr(self, 'description') and self.description is not None:NEWLINE _dict['description'] = self.descriptionNEWLINE if hasattr(self, 'error_id') and self.error_id is not None:NEWLINE _dict['error_id'] = self.error_idNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this ErrorInfo object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass Face(object):NEWLINE """NEWLINE Information about the face.NEWLINENEWLINE :attr FaceAge age: (optional) Age information about a face.NEWLINE :attr FaceGender gender: (optional) Information about the gender of the face.NEWLINE :attr FaceLocation face_location: (optional) The location of the bounding box aroundNEWLINE the face.NEWLINE """NEWLINENEWLINE def __init__(self, age=None, gender=None, face_location=None):NEWLINE """NEWLINE Initialize a Face object.NEWLINENEWLINE :param FaceAge age: (optional) Age information about a face.NEWLINE :param FaceGender gender: (optional) Information about the gender of the face.NEWLINE :param FaceLocation face_location: (optional) The location of the bounding boxNEWLINE around the face.NEWLINE """NEWLINE self.age = ageNEWLINE self.gender = genderNEWLINE self.face_location = face_locationNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a Face object from a json dictionary."""NEWLINE args = {}NEWLINE if 'age' in _dict:NEWLINE args['age'] = FaceAge._from_dict(_dict.get('age'))NEWLINE if 'gender' in _dict:NEWLINE args['gender'] = FaceGender._from_dict(_dict.get('gender'))NEWLINE if 'face_location' in _dict:NEWLINE args['face_location'] = FaceLocation._from_dict(NEWLINE _dict.get('face_location'))NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'age') and self.age is not None:NEWLINE _dict['age'] = self.age._to_dict()NEWLINE if hasattr(self, 'gender') and self.gender is not None:NEWLINE _dict['gender'] = self.gender._to_dict()NEWLINE if hasattr(self, 'face_location') and self.face_location is not None:NEWLINE _dict['face_location'] = self.face_location._to_dict()NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this Face object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass FaceAge(object):NEWLINE """NEWLINE Age information about a face.NEWLINENEWLINE :attr int min: (optional) Estimated minimum age.NEWLINE :attr int max: (optional) Estimated maximum age.NEWLINE :attr float score: Confidence score in the range of 0 to 1. A higher score indicatesNEWLINE greater confidence in the estimated value for the property.NEWLINE """NEWLINENEWLINE def __init__(self, score, min=None, max=None):NEWLINE """NEWLINE Initialize a FaceAge object.NEWLINENEWLINE :param float score: Confidence score in the range of 0 to 1. A higher scoreNEWLINE indicates greater confidence in the estimated value for the property.NEWLINE :param int min: (optional) Estimated minimum age.NEWLINE :param int max: (optional) Estimated maximum age.NEWLINE """NEWLINE self.min = minNEWLINE self.max = maxNEWLINE self.score = scoreNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a FaceAge object from a json dictionary."""NEWLINE args = {}NEWLINE if 'min' in _dict:NEWLINE args['min'] = _dict.get('min')NEWLINE if 'max' in _dict:NEWLINE args['max'] = _dict.get('max')NEWLINE if 'score' in _dict:NEWLINE args['score'] = _dict.get('score')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'score\' not present in FaceAge JSON')NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'min') and self.min is not None:NEWLINE _dict['min'] = self.minNEWLINE if hasattr(self, 'max') and self.max is not None:NEWLINE _dict['max'] = self.maxNEWLINE if hasattr(self, 'score') and self.score is not None:NEWLINE _dict['score'] = self.scoreNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this FaceAge object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass FaceGender(object):NEWLINE """NEWLINE Information about the gender of the face.NEWLINENEWLINE :attr str gender: Gender identified by the face. For example, `MALE` or `FEMALE`.NEWLINE :attr str gender_label: The word for "male" or "female" in the language defined by theNEWLINE **Accept-Language** request header.NEWLINE :attr float score: Confidence score in the range of 0 to 1. A higher score indicatesNEWLINE greater confidence in the estimated value for the property.NEWLINE """NEWLINENEWLINE def __init__(self, gender, gender_label, score):NEWLINE """NEWLINE Initialize a FaceGender object.NEWLINENEWLINE :param str gender: Gender identified by the face. For example, `MALE` or `FEMALE`.NEWLINE :param str gender_label: The word for "male" or "female" in the language definedNEWLINE by the **Accept-Language** request header.NEWLINE :param float score: Confidence score in the range of 0 to 1. A higher scoreNEWLINE indicates greater confidence in the estimated value for the property.NEWLINE """NEWLINE self.gender = genderNEWLINE self.gender_label = gender_labelNEWLINE self.score = scoreNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a FaceGender object from a json dictionary."""NEWLINE args = {}NEWLINE if 'gender' in _dict:NEWLINE args['gender'] = _dict.get('gender')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'gender\' not present in FaceGender JSON')NEWLINE if 'gender_label' in _dict:NEWLINE args['gender_label'] = _dict.get('gender_label')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'gender_label\' not present in FaceGender JSON'NEWLINE )NEWLINE if 'score' in _dict:NEWLINE args['score'] = _dict.get('score')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'score\' not present in FaceGender JSON')NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'gender') and self.gender is not None:NEWLINE _dict['gender'] = self.genderNEWLINE if hasattr(self, 'gender_label') and self.gender_label is not None:NEWLINE _dict['gender_label'] = self.gender_labelNEWLINE if hasattr(self, 'score') and self.score is not None:NEWLINE _dict['score'] = self.scoreNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this FaceGender object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass FaceLocation(object):NEWLINE """NEWLINE The location of the bounding box around the face.NEWLINENEWLINE :attr float width: Width in pixels of face region.NEWLINE :attr float height: Height in pixels of face region.NEWLINE :attr float left: X-position of top-left pixel of face region.NEWLINE :attr float top: Y-position of top-left pixel of face region.NEWLINE """NEWLINENEWLINE def __init__(self, width, height, left, top):NEWLINE """NEWLINE Initialize a FaceLocation object.NEWLINENEWLINE :param float width: Width in pixels of face region.NEWLINE :param float height: Height in pixels of face region.NEWLINE :param float left: X-position of top-left pixel of face region.NEWLINE :param float top: Y-position of top-left pixel of face region.NEWLINE """NEWLINE self.width = widthNEWLINE self.height = heightNEWLINE self.left = leftNEWLINE self.top = topNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a FaceLocation object from a json dictionary."""NEWLINE args = {}NEWLINE if 'width' in _dict:NEWLINE args['width'] = _dict.get('width')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'width\' not present in FaceLocation JSON')NEWLINE if 'height' in _dict:NEWLINE args['height'] = _dict.get('height')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'height\' not present in FaceLocation JSON')NEWLINE if 'left' in _dict:NEWLINE args['left'] = _dict.get('left')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'left\' not present in FaceLocation JSON')NEWLINE if 'top' in _dict:NEWLINE args['top'] = _dict.get('top')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'top\' not present in FaceLocation JSON')NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'width') and self.width is not None:NEWLINE _dict['width'] = self.widthNEWLINE if hasattr(self, 'height') and self.height is not None:NEWLINE _dict['height'] = self.heightNEWLINE if hasattr(self, 'left') and self.left is not None:NEWLINE _dict['left'] = self.leftNEWLINE if hasattr(self, 'top') and self.top is not None:NEWLINE _dict['top'] = self.topNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this FaceLocation object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass ImageWithFaces(object):NEWLINE """NEWLINE Information about faces in the image.NEWLINENEWLINE :attr list[Face] faces: Faces detected in the images.NEWLINE :attr str image: (optional) Relative path of the image file if uploaded directly. NotNEWLINE returned when the image is passed by URL.NEWLINE :attr str source_url: (optional) Source of the image before any redirects. NotNEWLINE returned when the image is uploaded.NEWLINE :attr str resolved_url: (optional) Fully resolved URL of the image after redirects areNEWLINE followed. Not returned when the image is uploaded.NEWLINE :attr ErrorInfo error: (optional) Information about what might have caused a failure,NEWLINE such as an image that is too large. Not returned when there is no error.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE faces,NEWLINE image=None,NEWLINE source_url=None,NEWLINE resolved_url=None,NEWLINE error=None):NEWLINE """NEWLINE Initialize a ImageWithFaces object.NEWLINENEWLINE :param list[Face] faces: Faces detected in the images.NEWLINE :param str image: (optional) Relative path of the image file if uploaded directly.NEWLINE Not returned when the image is passed by URL.NEWLINE :param str source_url: (optional) Source of the image before any redirects. NotNEWLINE returned when the image is uploaded.NEWLINE :param str resolved_url: (optional) Fully resolved URL of the image afterNEWLINE redirects are followed. Not returned when the image is uploaded.NEWLINE :param ErrorInfo error: (optional) Information about what might have caused aNEWLINE failure, such as an image that is too large. Not returned when there is no error.NEWLINE """NEWLINE self.faces = facesNEWLINE self.image = imageNEWLINE self.source_url = source_urlNEWLINE self.resolved_url = resolved_urlNEWLINE self.error = errorNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a ImageWithFaces object from a json dictionary."""NEWLINE args = {}NEWLINE if 'faces' in _dict:NEWLINE args['faces'] = [Face._from_dict(x) for x in (_dict.get('faces'))]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'faces\' not present in ImageWithFaces JSON'NEWLINE )NEWLINE if 'image' in _dict:NEWLINE args['image'] = _dict.get('image')NEWLINE if 'source_url' in _dict:NEWLINE args['source_url'] = _dict.get('source_url')NEWLINE if 'resolved_url' in _dict:NEWLINE args['resolved_url'] = _dict.get('resolved_url')NEWLINE if 'error' in _dict:NEWLINE args['error'] = ErrorInfo._from_dict(_dict.get('error'))NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'faces') and self.faces is not None:NEWLINE _dict['faces'] = [x._to_dict() for x in self.faces]NEWLINE if hasattr(self, 'image') and self.image is not None:NEWLINE _dict['image'] = self.imageNEWLINE if hasattr(self, 'source_url') and self.source_url is not None:NEWLINE _dict['source_url'] = self.source_urlNEWLINE if hasattr(self, 'resolved_url') and self.resolved_url is not None:NEWLINE _dict['resolved_url'] = self.resolved_urlNEWLINE if hasattr(self, 'error') and self.error is not None:NEWLINE _dict['error'] = self.error._to_dict()NEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this ImageWithFaces object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINENEWLINENEWLINEclass WarningInfo(object):NEWLINE """NEWLINE Information about something that went wrong.NEWLINENEWLINE :attr str warning_id: Codified warning string, such as `limit_reached`.NEWLINE :attr str description: Information about the error.NEWLINE """NEWLINENEWLINE def __init__(self, warning_id, description):NEWLINE """NEWLINE Initialize a WarningInfo object.NEWLINENEWLINE :param str warning_id: Codified warning string, such as `limit_reached`.NEWLINE :param str description: Information about the error.NEWLINE """NEWLINE self.warning_id = warning_idNEWLINE self.description = descriptionNEWLINENEWLINE @classmethodNEWLINE def _from_dict(cls, _dict):NEWLINE """Initialize a WarningInfo object from a json dictionary."""NEWLINE args = {}NEWLINE if 'warning_id' in _dict:NEWLINE args['warning_id'] = _dict.get('warning_id')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'warning_id\' not present in WarningInfo JSON'NEWLINE )NEWLINE if 'description' in _dict:NEWLINE args['description'] = _dict.get('description')NEWLINE else:NEWLINE raise ValueError(NEWLINE 'Required property \'description\' not present in WarningInfo JSON'NEWLINE )NEWLINE return cls(**args)NEWLINENEWLINE def _to_dict(self):NEWLINE """Return a json dictionary representing this model."""NEWLINE _dict = {}NEWLINE if hasattr(self, 'warning_id') and self.warning_id is not None:NEWLINE _dict['warning_id'] = self.warning_idNEWLINE if hasattr(self, 'description') and self.description is not None:NEWLINE _dict['description'] = self.descriptionNEWLINE return _dictNEWLINENEWLINE def __str__(self):NEWLINE """Return a `str` version of this WarningInfo object."""NEWLINE return json.dumps(self._to_dict(), indent=2)NEWLINENEWLINE def __eq__(self, other):NEWLINE """Return `true` when self and other are equal, false otherwise."""NEWLINE if not isinstance(other, self.__class__):NEWLINE return FalseNEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Return `true` when self and other are not equal, false otherwise."""NEWLINE return not self == otherNEWLINE |
#!/usr/bin/pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE##############################################################################NEWLINE# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #NEWLINE# #NEWLINE# Licensed under the Amazon Software License (the "License"). You may not #NEWLINE# use this file except in compliance with the License. A copy of the #NEWLINE# License is located at #NEWLINE# #NEWLINE# http://aws.amazon.com/asl/ #NEWLINE# #NEWLINE# or in the "license" file accompanying this file. This file is distributed #NEWLINE# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, #NEWLINE# express or implied. See the License for the specific language governing #NEWLINE# permissions and limitations under the License. #NEWLINE##############################################################################NEWLINEfrom botocore.exceptions import ClientErrorNEWLINEimport boto3NEWLINEimport osNEWLINEimport loggingNEWLINENEWLINECOLLECTION_NAME = os.environ['RekognitionCollectionName']NEWLINEDYNAMODB_TABLE_NAME = os.environ['DynamoDBTableName']NEWLINELOG_LEVEL = os.environ['LogLevel']NEWLINESEND_ANONYMOUS_DATA = os.environ['SendAnonymousData']NEWLINENEWLINEdynamodb = boto3.client('dynamodb')NEWLINEs3 = boto3.client('s3')NEWLINErekognition = boto3.client('rekognition')NEWLINENEWLINElogger = logging.getLogger()NEWLINElogger.setLevel(LOG_LEVEL)NEWLINENEWLINENEWLINEdef lambda_handler(event, context):NEWLINE logger.info('Invoked the IndexFace Lambda function.')NEWLINE bucket = event['Records'][0]['s3']['bucket']['name']NEWLINE key = event['Records'][0]['s3']['object']['key']NEWLINENEWLINE name = os.path.splitext(os.path.basename(key))[0]NEWLINENEWLINE # Register a face image to RekognitionNEWLINE logger.info('Register a face image to Rekognition.')NEWLINE response = rekognition.index_faces(NEWLINE Image={NEWLINE "S3Object": {NEWLINE "Bucket": bucket,NEWLINE "Name": keyNEWLINE }NEWLINE },NEWLINE CollectionId=COLLECTION_NAMENEWLINE )NEWLINENEWLINE if response['ResponseMetadata']['HTTPStatusCode'] != 200 or len(response['FaceRecords']) == 0:NEWLINE raise RuntimeError('Fail to register a face to Rekognition.')NEWLINENEWLINE faceId = response['FaceRecords'][0]['Face']['FaceId']NEWLINENEWLINE # Insert the face data to DynamoDBNEWLINE logger.info('Insert the face ID to the DynamoDB table.')NEWLINE try:NEWLINE response = dynamodb.put_item(NEWLINE TableName=DYNAMODB_TABLE_NAME,NEWLINE Item={NEWLINE 'RekognitionId': {'S': faceId},NEWLINE 'Name': {'S': name},NEWLINE }NEWLINE )NEWLINE except ClientError as err:NEWLINE rekognition.delete_faces(NEWLINE CollectionId=COLLECTION_NAME,NEWLINE FaceId=faceIdNEWLINE )NEWLINE raise errNEWLINENEWLINE # If the face image was registered successfully, delete the image from s3.NEWLINE s3.delete_object(Bucket=bucket, Key=key)NEWLINE logger.info('Registered a face image successfully.')NEWLINE |
# uncompyle6 version 3.3.1NEWLINE# Python bytecode 3.6 (3379)NEWLINE# Decompiled from: Python 3.6.2 (v3.6.2:5fd33b5926, Jul 16 2017, 20:11:06) NEWLINE# [GCC 4.2.1 (Apple Inc. build 5666) (dot 3)]NEWLINE# Embedded file name: ../../shared/problems/CR/problem1050_CR.pyNEWLINE# Compiled at: 2019-03-12 17:52:21NEWLINE# Size of source mod 2**32: 1123 bytesNEWLINE__author__ = 'patras'NEWLINEfrom domain_chargeableRobot import *NEWLINEfrom timer import DURATIONNEWLINEfrom state import stateNEWLINEDURATION.TIME = {'put':5, NEWLINE 'take':5, NEWLINE 'perceive':3, NEWLINE 'charge':10, NEWLINE 'move':10, NEWLINE 'moveToEmergency':20, NEWLINE 'moveCharger':15, NEWLINE 'addressEmergency':20, NEWLINE 'wait':10}NEWLINEDURATION.COUNTER = {'put':5, NEWLINE 'take':5, NEWLINE 'perceive':3, NEWLINE 'charge':10, NEWLINE 'move':10, NEWLINE 'moveToEmergency':20, NEWLINE 'moveCharger':15, NEWLINE 'addressEmergency':20, NEWLINE 'wait':10}NEWLINErv.LOCATIONS = [NEWLINE 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]NEWLINErv.EDGES = {1:[2], 2:[1, 3], 3:[2, 4], 4:[5, 3, 6, 7], 5:[4, 9], 6:[4, 10], 7:[4, 8], 8:[7], 9:[5], 10:[6]}NEWLINErv.OBJECTS = ['o1']NEWLINErv.ROBOTS = [NEWLINE 'r1']NEWLINENEWLINEdef ResetState():NEWLINE state.loc = {'r1': 1}NEWLINE state.charge = {'r1': 3}NEWLINE state.load = {'r1': NIL}NEWLINE state.pos = {'c1':1, 'o1':9}NEWLINE state.containers = {1:[], 2:[], 3:[], 4:[], 5:[], 6:[], 7:[], 8:[], 9:['o1'], 10:[]}NEWLINE state.emergencyHandling = {'r1':False, 'r2':False}NEWLINE state.view = {}NEWLINE for l in rv.LOCATIONS:NEWLINE state.view[l] = FalseNEWLINENEWLINENEWLINEtasks = {4: [['fetch', 'r1', 'o1']]}NEWLINEeventsEnv = {}NEWLINE# okay decompiling __pycache__/problem1050_CR.cpython-36.pycNEWLINE |
# -*- coding: utf-8 -*-NEWLINEfrom hashlib import sha256NEWLINENEWLINEfrom .sha1 import SHA1NEWLINENEWLINENEWLINEdef hmac(key, message, hash_class):NEWLINE block_size = hash_class().block_sizeNEWLINENEWLINE if len(key) > block_size:NEWLINE key = hash_class(key).digest()NEWLINE key = key.ljust(block_size, b"\x00")NEWLINENEWLINE mac = message.encode() if isinstance(message, str) else messageNEWLINE for pad_byte in b"\x5c", b"\x36":NEWLINE prefix = bytes(kb ^ pb for kb, pb in zip(key, pad_byte * block_size))NEWLINE mac = hash_class(prefix + mac).digest()NEWLINENEWLINE return macNEWLINENEWLINENEWLINEdef hmac_sha1(key, message):NEWLINE return hmac(key, message, SHA1)NEWLINENEWLINENEWLINEdef hmac_sha256(key, message):NEWLINE return hmac(key, message, sha256)NEWLINE |
import sysNEWLINEimport numpy as npNEWLINEfrom acd.util import tiling_2d as tilingNEWLINEfrom acd.scores.cd import cd, cd_textNEWLINEfrom skimage import measure # for connected componentsNEWLINEfrom math import ceilNEWLINEfrom scipy.signal import convolve2dNEWLINEfrom copy import deepcopyNEWLINEfrom acd.scores import score_funcsNEWLINENEWLINENEWLINE# score doesn't have to just be prediction for labelNEWLINEdef refine_scores(scores, lab_num):NEWLINE return scores[:, lab_num]NEWLINENEWLINENEWLINE# higher scores are more likely to be pickedNEWLINEdef threshold_scores(scores, percentile_include, method):NEWLINE X = scoresNEWLINENEWLINE # pick more when more is already pickedNEWLINE num_picked = np.sum(np.isnan(scores))NEWLINE if num_picked > scores.size / 3:NEWLINE percentile_include -= 15NEWLINENEWLINE thresh = np.nanpercentile(X, percentile_include)NEWLINE # thresh = np.max(X) # pick only 1 pixel at a timeNEWLINE im_thresh = np.logical_and(scores >= thresh, ~np.isnan(scores))NEWLINE # scores >= thresh #np.logical_and(scores >= thresh, scores != 0)NEWLINENEWLINE # make sure we pick somethingNEWLINE while np.sum(im_thresh) == 0:NEWLINE percentile_include -= 4NEWLINE thresh = np.nanpercentile(X, percentile_include)NEWLINE # thresh = np.max(X) # pick only 1 pixel at a timeNEWLINE im_thresh = np.logical_and(scores >= thresh, ~np.isnan(scores))NEWLINE # np.logical_and(scores >= thresh, scores != 0)NEWLINE return im_threshNEWLINENEWLINENEWLINE# if 3 sides of a pixel are selected, also select the pixelNEWLINEfilt = np.zeros((3, 3))NEWLINEfilt[:, 1] = 1 # middle columnNEWLINEfilt[1, :] = 1 # middle rowNEWLINENEWLINENEWLINEdef smooth_im_thresh(im_thresh_old, im_thresh):NEWLINE im = im_thresh_old + im_threshNEWLINE im_count_neighbors = convolve2d(im, filt, mode='same')NEWLINE pixels_to_add = np.logical_and(np.logical_not(im), im_count_neighbors >= 3)NEWLINE return im + pixels_to_addNEWLINENEWLINENEWLINE# establish correspondence between segsNEWLINEdef establish_correspondence(seg1, seg2):NEWLINE seg_out = np.zeros(seg1.shape, dtype='int64')NEWLINE new_counter = 0NEWLINENEWLINE num_segs = int(np.max(seg2))NEWLINE remaining = list(range(1, 12)) # only have 10 colors thoughNEWLINE for i in range(1, num_segs + 1):NEWLINE seg = seg2 == iNEWLINE old_seg = seg1[seg]NEWLINE matches = np.unique(old_seg[old_seg != 0])NEWLINE num_matches = matches.sizeNEWLINENEWLINE # new segNEWLINE if num_matches == 0:NEWLINE new_counter -= 1NEWLINE seg_out[seg] = new_counterNEWLINENEWLINE # 1 matchNEWLINE elif num_matches == 1:NEWLINE seg_out[seg] = matches[0]NEWLINE remaining.remove(matches[0])NEWLINENEWLINE # >1 matches (segs merged)NEWLINE else:NEWLINE seg_out[seg] = min(matches)NEWLINE remaining.remove(min(matches))NEWLINENEWLINE # assign new segsNEWLINE while new_counter < 0:NEWLINE seg_out[seg_out == new_counter] = min(remaining)NEWLINE remaining.remove(min(remaining))NEWLINE new_counter += 1NEWLINENEWLINE return seg_out # seg2NEWLINENEWLINENEWLINE# agglomerate - black out selected pixels from before and resweep over the entire imageNEWLINEdef agglomerate(model, pred_ims, percentile_include, method, sweep_dim,NEWLINE im_orig, lab_num, num_iters=5, im_torch=None, model_type='mnist', device='cuda'):NEWLINE # set up shapesNEWLINE R = im_orig.shape[0]NEWLINE C = im_orig.shape[1]NEWLINE size_downsampled = (ceil(R / sweep_dim), ceil(C / sweep_dim)) # effectively downsampledNEWLINENEWLINE # get scoresNEWLINE tiles = tiling.gen_tiles(im_orig, fill=0, method=method, sweep_dim=sweep_dim)NEWLINE scores_orig_raw = score_funcs.get_scores_2d(model, method, ims=tiles, im_torch=im_torch,NEWLINE pred_ims=pred_ims, model_type=model_type, device=device)NEWLINE scores_track = np.copy(refine_scores(scores_orig_raw, lab_num)).reshape(NEWLINE size_downsampled) # keep track of these scoresNEWLINENEWLINE # threshold imNEWLINE im_thresh = threshold_scores(scores_track, percentile_include, method)NEWLINENEWLINE # initialize listsNEWLINE scores_list = [np.copy(scores_track)]NEWLINE im_thresh_list = [im_thresh]NEWLINE comps_list = []NEWLINE if not method == 'cd':NEWLINE comp_scores_raw_list = [{0: score_funcs.get_scores_2d(model, 'build_up',NEWLINE ims=np.expand_dims(im_orig, 0), # score for full imageNEWLINE im_torch=im_torch, pred_ims=pred_ims,NEWLINE model_type=model_type, device=device)[0]}]NEWLINE else:NEWLINE comp_scores_raw_list = [{0: score_funcs.get_scores_2d(model, method,NEWLINE ims=np.expand_dims(np.ones(im_orig.transpose().shape), 0),NEWLINE # score for full imageNEWLINE im_torch=im_torch, pred_ims=pred_ims,NEWLINE model_type=model_type, device=device)[0]}]NEWLINE comp_scores_raw_combined_list = []NEWLINENEWLINE # iterateNEWLINE for step in range(num_iters):NEWLINE # if already selected all pixels then breakNEWLINE if np.sum(im_thresh_list[-1]) == R * C:NEWLINE breakNEWLINENEWLINE # find connected components for regionsNEWLINE comps = np.copy(measure.label(im_thresh_list[-1], background=0, connectivity=2))NEWLINENEWLINE # establish correspondenceNEWLINE if step > 0:NEWLINE comps_orig = np.copy(comps)NEWLINE try:NEWLINE comps = establish_correspondence(comps_list[-1], comps_orig)NEWLINE except:NEWLINE comps = comps_origNEWLINE # plt.imshow(comps)NEWLINE # plt.show()NEWLINENEWLINE comp_tiles = {} # stores tiles corresponding to each tileNEWLINE if not method == 'cd':NEWLINE comps_combined_tile = np.zeros(shape=im_orig.shape) # stores all comp tiles combinedNEWLINE else:NEWLINE comps_combined_tile = np.zeros(shape=(R, C)) # stores all comp tiles combinedNEWLINE comp_surround_tiles = {} # stores tiles around comp_tilesNEWLINE comp_surround_idxs = {}NEWLINENEWLINE # make tilesNEWLINE comp_nums = np.unique(comps)NEWLINE comp_nums = comp_nums[comp_nums > 0] # remove 0NEWLINE for comp_num in comp_nums:NEWLINE if comp_num > 0:NEWLINE # make component tileNEWLINE comp_tile_downsampled = (comps == comp_num)NEWLINE comp_tiles[comp_num] = tiling.gen_tile_from_comp(im_orig, comp_tile_downsampled,NEWLINE sweep_dim, method) # this is full sizeNEWLINE comp_tile_binary = tiling.gen_tile_from_comp(im_orig, comp_tile_downsampled,NEWLINE sweep_dim, 'cd') # this is full sizeNEWLINE # print('comps sizes', comps_combined_tile.shape, comp_tiles[comp_num].shape)NEWLINE comps_combined_tile += comp_tiles[comp_num]NEWLINENEWLINE # generate tiles and corresponding idxs around componentNEWLINE comp_surround_tiles[comp_num], comp_surround_idxs[comp_num] = \NEWLINE tiling.gen_tiles_around_baseline(im_orig, comp_tile_binary, method=method, sweep_dim=sweep_dim)NEWLINENEWLINE # predict for all tilesNEWLINE comp_scores_raw_dict = {} # dictionary of {comp_num: comp_score}NEWLINE for comp_num in comp_nums:NEWLINE tiles = np.concatenate((np.expand_dims(comp_tiles[comp_num], 0), # baseline tile at 0NEWLINE np.expand_dims(comps_combined_tile, 0), # combined tile at 1NEWLINE comp_surround_tiles[comp_num])) # all others afterwardsNEWLINE scores_raw = score_funcs.get_scores_2d(model, method, ims=tiles, im_torch=im_torch,NEWLINE pred_ims=pred_ims, model_type=model_type)NEWLINENEWLINE # decipher scoresNEWLINE score_comp = np.copy(refine_scores(scores_raw, lab_num)[0])NEWLINE scores_tiles = np.copy(refine_scores(scores_raw, lab_num)[2:])NEWLINENEWLINE # store the predicted class scoresNEWLINE comp_scores_raw_dict[comp_num] = np.copy(scores_raw[0])NEWLINE score_comps_raw_combined = np.copy(scores_raw[1])NEWLINENEWLINE # update pixel scoresNEWLINE tiles_idxs = comp_surround_idxs[comp_num]NEWLINE for i in range(len(scores_tiles)):NEWLINE (r, c) = tiles_idxs[i]NEWLINE scores_track[r, c] = np.max(scores_tiles[i] - score_comp) # todo: subtract off previous comp / weight?NEWLINENEWLINE # get class preds and thresholded imageNEWLINE scores_track[im_thresh_list[-1]] = np.nanNEWLINE im_thresh = threshold_scores(scores_track, percentile_include, method)NEWLINE im_thresh_smoothed = smooth_im_thresh(im_thresh_list[-1], im_thresh)NEWLINENEWLINE # add to listsNEWLINE scores_list.append(np.copy(scores_track))NEWLINE im_thresh_list.append(im_thresh_smoothed)NEWLINE comps_list.append(comps)NEWLINE comp_scores_raw_list.append(comp_scores_raw_dict)NEWLINE comp_scores_raw_combined_list.append(score_comps_raw_combined)NEWLINENEWLINE # pad first imageNEWLINE comps_list = [np.zeros(im_orig.shape)] + comps_listNEWLINENEWLINE lists = {'scores_list': scores_list, # float arrs of scores tracked over time (NaN for already picked)NEWLINE 'im_thresh_list': im_thresh_list, # boolean array of selected pixels over timeNEWLINE 'comps_list': comps_list, # numpy arrs (each component is a different number, 0 for background)NEWLINE 'comp_scores_raw_list': comp_scores_raw_list, # dicts, each key is a number corresponding to a componentNEWLINE 'comp_scores_raw_combined_list': comp_scores_raw_combined_list,NEWLINE # arrs representing scores for all current comps combinedNEWLINE 'scores_orig_raw': scores_orig_raw,NEWLINE 'num_before_final': len(im_thresh_list)} # one arr with original scores of pixelsNEWLINE lists = agglomerate_final(lists, model, pred_ims, percentile_include, method, sweep_dim,NEWLINE im_orig, lab_num, num_iters=5, im_torch=im_torch, model_type=model_type)NEWLINENEWLINE return listsNEWLINENEWLINENEWLINE# agglomerate the final blobsNEWLINEdef agglomerate_final(lists, model, pred_ims, percentile_include, method, sweep_dim,NEWLINE im_orig, lab_num, num_iters=5, im_torch=None, model_type='mnist'):NEWLINE # while multiple types of blobsNEWLINE while (np.unique(lists['comps_list'][-1]).size > 2):NEWLINE # for q in range(3):NEWLINE comps = np.copy(lists['comps_list'][-1])NEWLINE comp_scores_raw_dict = deepcopy(lists['comp_scores_raw_list'][-1])NEWLINENEWLINE # todo: initially merge really small blobs with nearest big blobsNEWLINE # if q == 0:NEWLINENEWLINE # make tiles by combining pairs in compsNEWLINE comp_tiles = {} # stores tiles corresponding to each tileNEWLINE for comp_num in np.unique(comps):NEWLINE if comp_num > 0:NEWLINE # make component tileNEWLINE comp_tile_downsampled = (comps == comp_num)NEWLINE comp_tiles[comp_num] = tiling.gen_tile_from_comp(im_orig, comp_tile_downsampled,NEWLINE sweep_dim, method) # this is full sizeNEWLINENEWLINE # make combined tilesNEWLINE comp_tiles_comb = {}NEWLINE for comp_num1 in np.unique(comps):NEWLINE for comp_num2 in np.unique(comps):NEWLINE if 0 < comp_num1 < comp_num2:NEWLINE comp_tiles_comb[(comp_num1, comp_num2)] = tiling.combine_tiles(comp_tiles[comp_num1],NEWLINE comp_tiles[comp_num2], method)NEWLINENEWLINE # predict for all tilesNEWLINE comp_max_score_diff = -1e10NEWLINE comp_max_key_pair = NoneNEWLINE comp_max_scores_raw = NoneNEWLINE for key in comp_tiles_comb.keys():NEWLINE # calculate scoresNEWLINE tiles = 1.0 * np.expand_dims(comp_tiles_comb[key], 0)NEWLINE scores_raw = score_funcs.get_scores_2d(model, method, ims=tiles, im_torch=im_torch,NEWLINE pred_ims=pred_ims, model_type=model_type)NEWLINENEWLINE # refine scores for correct class - todo this doesn't work with refine_scoresNEWLINE score_comp = np.copy(refine_scores(scores_raw, lab_num)[0])NEWLINE # score_orig = np.max(refine_scores(np.expand_dims(comp_scores_raw_dict[key[0]], 0), lab_num)[0],NEWLINE # refine_scores(np.expand_dims(comp_scores_raw_dict[key[1]], 0), lab_num)[0])NEWLINE score_orig = max(comp_scores_raw_dict[key[0]][lab_num], comp_scores_raw_dict[key[1]][lab_num])NEWLINE score_diff = score_comp - score_origNEWLINENEWLINE # find best scoreNEWLINE if score_diff > comp_max_score_diff:NEWLINE comp_max_score_diff = score_diffNEWLINE comp_max_key_pair = keyNEWLINE comp_max_scores_raw = np.copy(scores_raw[0]) # store the predicted class scoresNEWLINENEWLINE # merge highest scoring blob pairNEWLINE comps[comps == comp_max_key_pair[1]] = comp_max_key_pair[0]NEWLINENEWLINE # update highest scoring blob pair scoreNEWLINE comp_scores_raw_dict[comp_max_key_pair[0]] = comp_max_scores_rawNEWLINE comp_scores_raw_dict.pop(comp_max_key_pair[1])NEWLINENEWLINE # add to listsNEWLINE lists['comps_list'].append(comps)NEWLINE lists['comp_scores_raw_list'].append(comp_scores_raw_dict)NEWLINE lists['scores_list'].append(lists['scores_list'][-1])NEWLINE lists['im_thresh_list'].append(lists['im_thresh_list'][-1])NEWLINE lists['comp_scores_raw_combined_list'].append(lists['comp_scores_raw_combined_list'][-1])NEWLINENEWLINE return listsNEWLINE |
"""NEWLINECode from https://github.com/ray-project/ray/blob/ray-0.8.7/rllib/agents/dqn/learner_thread.pyNEWLINEand https://github.com/ray-project/ray/blob/ray-0.8.7/rllib/utils/window_stat.pyNEWLINE"""NEWLINEfrom __future__ import annotationsNEWLINENEWLINEimport queueNEWLINEimport threadingNEWLINENEWLINEfrom ray.util.timer import _Timer as TimerStatNEWLINENEWLINEfrom muzero.metrics import get_learner_statsNEWLINEfrom muzero.policy import LEARNER_STATS_KEYNEWLINEfrom muzero.sample_batch import DEFAULT_POLICY_IDNEWLINENEWLINELEARNER_QUEUE_MAX_SIZE = 8NEWLINENEWLINENEWLINEimport numpy as npNEWLINENEWLINENEWLINEclass WindowStat:NEWLINE def __init__(self, name, n):NEWLINE self.name = nameNEWLINE self.items = [None] * nNEWLINE self.idx = 0NEWLINE self.count = 0NEWLINENEWLINE def push(self, obj):NEWLINE self.items[self.idx] = objNEWLINE self.idx += 1NEWLINE self.count += 1NEWLINE self.idx %= len(self.items)NEWLINENEWLINE def stats(self):NEWLINE if not self.count:NEWLINE _quantiles = []NEWLINE else:NEWLINE _quantiles = np.nanpercentile(self.items[:self.count],NEWLINE [0, 10, 50, 90, 100]).tolist()NEWLINE return {NEWLINE self.name + "_count": int(self.count),NEWLINE self.name + "_mean": float(np.nanmean(self.items[:self.count])),NEWLINE self.name + "_std": float(np.nanstd(self.items[:self.count])),NEWLINE self.name + "_quantiles": _quantiles,NEWLINE }NEWLINENEWLINENEWLINEclass LearnerThread(threading.Thread):NEWLINE """Background thread that updates the local model from replay data.NEWLINE The learner thread communicates with the main thread through Queues. ThisNEWLINE is needed since Ray operations can only be run on the main thread. InNEWLINE addition, moving heavyweight gradient ops session runs off the main threadNEWLINE improves overall throughput.NEWLINE """NEWLINENEWLINE def __init__(self, local_worker):NEWLINE threading.Thread.__init__(self)NEWLINE self.learner_queue_size = WindowStat("size", 50)NEWLINE self.local_worker = local_workerNEWLINE self.inqueue = queue.Queue(maxsize=local_worker.config['learner_queue_size'])NEWLINE self.outqueue = queue.Queue()NEWLINE self.queue_timer = TimerStat()NEWLINE self.grad_timer = TimerStat()NEWLINE self.overall_timer = TimerStat()NEWLINE self.daemon = TrueNEWLINE self.weights_updated = FalseNEWLINE self.stopped = FalseNEWLINE self.stats = {}NEWLINENEWLINE def run(self):NEWLINE while not self.stopped:NEWLINE self.step()NEWLINENEWLINE def step(self):NEWLINE with self.overall_timer:NEWLINE with self.queue_timer:NEWLINE ra, batch = self.inqueue.get()NEWLINE if batch is not None:NEWLINE prio_dict = {}NEWLINE with self.grad_timer:NEWLINE info = self.local_worker.learn_on_batch(batch)NEWLINE pid = DEFAULT_POLICY_IDNEWLINE p = info.get(NEWLINE "replay_p",NEWLINE info[LEARNER_STATS_KEY].get("replay_p"))NEWLINE prio_dict[pid] = (batch.data.get("batch_indexes"), p)NEWLINE self.stats[pid] = get_learner_stats(info)NEWLINE self.grad_timer.push_units_processed(batch.count)NEWLINE self.outqueue.put((ra, prio_dict, batch.count))NEWLINE self.learner_queue_size.push(self.inqueue.qsize())NEWLINE self.weights_updated = TrueNEWLINE self.overall_timer.push_units_processed(batch and batch.countNEWLINE or 0) |
import osNEWLINEimport ycm_coreNEWLINENEWLINEflags = [NEWLINE'-D__IPHONE_OS_VERSION_MIN_REQUIRED=70000',NEWLINE'-resource-dir',NEWLINE'/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/clang/7.0.0',NEWLINE'-mios-simulator-version-min=7.0',NEWLINE'-arch i386',NEWLINE'-fblocks',NEWLINE'-fobjc-runtime=ios-7.0.0',NEWLINE'-fencode-extended-block-signature',NEWLINE'-fobjc-arc',NEWLINE'-fobjc-exceptions',NEWLINE'-fexceptions',NEWLINE'-x',NEWLINE'objective-c',NEWLINE'-F/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/Library/Frameworks',NEWLINE'-F/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/System/Library/Frameworks',NEWLINE'-I/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/System/Library/Frameworks/Foundation.framework/Headers',NEWLINE'-I/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include',NEWLINE'-I/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../lib/clang/7.0.0/include',NEWLINE'-I/Library/Developer/CommandLineTools/usr/include',NEWLINE'-ISUB./Example/Pods/Headers/Public',NEWLINE#custom definition, include subfoldersNEWLINE'-ISUB./Pods/Headers/Public',NEWLINE'-I./Pod/Classes',NEWLINE'-isysroot',NEWLINE'/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk',NEWLINE'-MMD',NEWLINE]NEWLINENEWLINESOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]NEWLINENEWLINE# Set this to the absolute path to the folder (NOT the file!) containing theNEWLINE# compile_commands.json file to use that instead of 'flags'. See here forNEWLINE# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.htmlNEWLINE#NEWLINE# You can get CMake to generate this file for you by adding:NEWLINE# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )NEWLINE# to your CMakeLists.txt file.NEWLINE#NEWLINE# Most projects will NOT need to set this to anything; you can just change theNEWLINE# 'flags' list of compilation flags. Notice that YCM itself uses that approach.NEWLINEcompilation_database_folder = ''NEWLINENEWLINEif os.path.exists( compilation_database_folder ):NEWLINE database = ycm_core.CompilationDatabase( compilation_database_folder )NEWLINEelse:NEWLINE database = NoneNEWLINENEWLINESOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]NEWLINENEWLINEdef DirectoryOfThisScript():NEWLINE return os.path.dirname( os.path.abspath( __file__ ) )NEWLINENEWLINENEWLINEdef Subdirectories(directory):NEWLINE res = []NEWLINE for path, subdirs, files in os.walk(directory):NEWLINE for name in subdirs:NEWLINE item = os.path.join(path, name)NEWLINE res.append(item)NEWLINE return resNEWLINENEWLINEdef IncludeFlagsOfSubdirectory( flags, working_directory ):NEWLINE if not working_directory:NEWLINE return list( flags )NEWLINE new_flags = []NEWLINE make_next_include_subdir = FalseNEWLINE path_flags = [ '-ISUB']NEWLINE for flag in flags:NEWLINE # include the directory of flag as wellNEWLINE new_flag = [flag.replace('-ISUB', '-I')]NEWLINENEWLINE if make_next_include_subdir:NEWLINE make_next_include_subdir = FalseNEWLINE for subdir in Subdirectories(os.path.join(working_directory, flag)):NEWLINE new_flag.append('-I')NEWLINE new_flag.append(subdir)NEWLINENEWLINE for path_flag in path_flags:NEWLINE if flag == path_flag:NEWLINE make_next_include_subdir = TrueNEWLINE breakNEWLINENEWLINE if flag.startswith( path_flag ):NEWLINE path = flag[ len( path_flag ): ]NEWLINE for subdir in Subdirectories(os.path.join(working_directory, path)):NEWLINE new_flag.append('-I' + subdir)NEWLINE breakNEWLINENEWLINE new_flags =new_flags + new_flagNEWLINE return new_flagsNEWLINENEWLINEdef MakeRelativePathsInFlagsAbsolute( flags, working_directory ):NEWLINE if not working_directory:NEWLINE return list( flags )NEWLINE #add include subfolders as wellNEWLINE flags = IncludeFlagsOfSubdirectory( flags, working_directory )NEWLINE new_flags = []NEWLINE make_next_absolute = FalseNEWLINE path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]NEWLINE for flag in flags:NEWLINE new_flag = flagNEWLINENEWLINE if make_next_absolute:NEWLINE make_next_absolute = FalseNEWLINE if not flag.startswith( '/' ):NEWLINE new_flag = os.path.join( working_directory, flag )NEWLINENEWLINE for path_flag in path_flags:NEWLINE if flag == path_flag:NEWLINE make_next_absolute = TrueNEWLINE breakNEWLINENEWLINE if flag.startswith( path_flag ):NEWLINE path = flag[ len( path_flag ): ]NEWLINE new_flag = path_flag + os.path.join( working_directory, path )NEWLINE breakNEWLINENEWLINE if new_flag:NEWLINE new_flags.append( new_flag )NEWLINE return new_flagsNEWLINENEWLINENEWLINEdef IsHeaderFile( filename ):NEWLINE extension = os.path.splitext( filename )[ 1 ]NEWLINE return extension in [ '.h', '.hxx', '.hpp', '.hh' ]NEWLINENEWLINENEWLINEdef GetCompilationInfoForFile( filename ):NEWLINE # The compilation_commands.json file generated by CMake does not have entriesNEWLINE # for header files. So we do our best by asking the db for flags for aNEWLINE # corresponding source file, if any. If one exists, the flags for that fileNEWLINE # should be good enough.NEWLINE if IsHeaderFile( filename ):NEWLINE basename = os.path.splitext( filename )[ 0 ]NEWLINE for extension in SOURCE_EXTENSIONS:NEWLINE replacement_file = basename + extensionNEWLINE if os.path.exists( replacement_file ):NEWLINE compilation_info = database.GetCompilationInfoForFile(NEWLINE replacement_file )NEWLINE if compilation_info.compiler_flags_:NEWLINE return compilation_infoNEWLINE return NoneNEWLINE return database.GetCompilationInfoForFile( filename )NEWLINENEWLINENEWLINEdef FlagsForFile( filename, **kwargs ):NEWLINE if database:NEWLINE # Bear in mind that compilation_info.compiler_flags_ does NOT return aNEWLINE # python list, but a "list-like" StringVec objectNEWLINE compilation_info = GetCompilationInfoForFile( filename )NEWLINE if not compilation_info:NEWLINE return NoneNEWLINENEWLINE final_flags = MakeRelativePathsInFlagsAbsolute(NEWLINE compilation_info.compiler_flags_,NEWLINE compilation_info.compiler_working_dir_ )NEWLINENEWLINE # NOTE: This is just for YouCompleteMe; it's highly likely that your projectNEWLINE # does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOURNEWLINE # ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT.NEWLINE # try:NEWLINE # final_flags.remove( '-stdlib=libc++' )NEWLINE # except ValueError:NEWLINE # passNEWLINE else:NEWLINE relative_to = DirectoryOfThisScript()NEWLINE final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )NEWLINENEWLINE return {NEWLINE 'flags': final_flags,NEWLINE 'do_cache': TrueNEWLINE }NEWLINENEWLINE# if __name__ == '__main__':NEWLINE # # res = subdirectory( DirectoryOfThisScript())NEWLINE # flags = [NEWLINE # '-D__IPHONE_OS_VERSION_MIN_REQUIRED=70000',NEWLINE # '-x',NEWLINE # 'objective-c',NEWLINE # '-F/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/Library/Frameworks',NEWLINE # '-ISUB./Pods/Headers/Public',NEWLINE # '-MMD',NEWLINE # ]NEWLINENEWLINE # print IncludeFlagsOfSubdirectory( flags, DirectoryOfThisScript() )NEWLINENEWLINENEWLINENEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""NEWLINEPreprocessors.NEWLINE"""NEWLINEimport reNEWLINENEWLINEimport numpy as npNEWLINEfrom allennlp.modules.elmo import Elmo, batch_to_idsNEWLINEfrom sklearn.base import BaseEstimator, TransformerMixinNEWLINEfrom sklearn.externals import joblibNEWLINEfrom keras.utils.np_utils import to_categoricalNEWLINEfrom keras.preprocessing.sequence import pad_sequencesNEWLINENEWLINEfrom anago.utils import VocabularyNEWLINENEWLINEoptions_file = 'https://s3-us-west-2.amazonaws.com/allennlp/models/elmo/2x4096_512_2048cnn_2xhighway/elmo_2x4096_512_2048cnn_2xhighway_options.json'NEWLINEweight_file = 'https://s3-us-west-2.amazonaws.com/allennlp/models/elmo/2x4096_512_2048cnn_2xhighway/elmo_2x4096_512_2048cnn_2xhighway_weights.hdf5'NEWLINENEWLINENEWLINEdef normalize_number(text):NEWLINE return re.sub(r'[0-90123456789]', r'0', text)NEWLINENEWLINENEWLINEclass IndexTransformer(BaseEstimator, TransformerMixin):NEWLINE """Convert a collection of raw documents to a document id matrix.NEWLINENEWLINE Attributes:NEWLINE _use_char: boolean. Whether to use char feature.NEWLINE _num_norm: boolean. Whether to normalize text.NEWLINE _word_vocab: dict. A mapping of words to feature indices.NEWLINE _char_vocab: dict. A mapping of chars to feature indices.NEWLINE _label_vocab: dict. A mapping of labels to feature indices.NEWLINE """NEWLINENEWLINE def __init__(self, lower=True, num_norm=True,NEWLINE use_char=True, initial_vocab=None):NEWLINE """Create a preprocessor object.NEWLINENEWLINE Args:NEWLINE lower: boolean. Whether to convert the texts to lowercase.NEWLINE use_char: boolean. Whether to use char feature.NEWLINE num_norm: boolean. Whether to normalize text.NEWLINE initial_vocab: Iterable. Initial vocabulary for expanding word_vocab.NEWLINE """NEWLINE self._num_norm = num_normNEWLINE self._use_char = use_charNEWLINE self._word_vocab = Vocabulary(lower=lower)NEWLINE self._char_vocab = Vocabulary(lower=False)NEWLINE self._label_vocab = Vocabulary(lower=False, unk_token=False)NEWLINENEWLINE if initial_vocab:NEWLINE self._word_vocab.add_documents([initial_vocab])NEWLINE self._char_vocab.add_documents(initial_vocab)NEWLINENEWLINE def fit(self, X, y):NEWLINE """Learn vocabulary from training set.NEWLINENEWLINE Args:NEWLINE X : iterable. An iterable which yields either str, unicode or file objects.NEWLINENEWLINE Returns:NEWLINE self : IndexTransformer.NEWLINE """NEWLINE self._word_vocab.add_documents(X)NEWLINE self._label_vocab.add_documents(y)NEWLINE if self._use_char:NEWLINE for doc in X:NEWLINE self._char_vocab.add_documents(doc)NEWLINENEWLINE self._word_vocab.build()NEWLINE self._char_vocab.build()NEWLINE self._label_vocab.build()NEWLINENEWLINE return selfNEWLINENEWLINE def transform(self, X, y=None):NEWLINE """Transform documents to document ids.NEWLINENEWLINE Uses the vocabulary learned by fit.NEWLINENEWLINE Args:NEWLINE X : iterableNEWLINE an iterable which yields either str, unicode or file objects.NEWLINE y : iterabl, label strings.NEWLINENEWLINE Returns:NEWLINE features: document id matrix.NEWLINE y: label id matrix.NEWLINE """NEWLINE word_ids = [self._word_vocab.doc2id(doc) for doc in X]NEWLINE word_ids = pad_sequences(word_ids, padding='post')NEWLINENEWLINE if self._use_char:NEWLINE char_ids = [[self._char_vocab.doc2id(w) for w in doc] for doc in X]NEWLINE char_ids = pad_nested_sequences(char_ids)NEWLINE features = [word_ids, char_ids]NEWLINE else:NEWLINE features = word_idsNEWLINENEWLINE if y is not None:NEWLINE y = [self._label_vocab.doc2id(doc) for doc in y]NEWLINE y = pad_sequences(y, padding='post')NEWLINE y = to_categorical(y, self.label_size).astype(int)NEWLINE # In 2018/06/01, to_categorical is a bit strange.NEWLINE # >>> to_categorical([[1,3]], num_classes=4).shapeNEWLINE # (1, 2, 4)NEWLINE # >>> to_categorical([[1]], num_classes=4).shapeNEWLINE # (1, 4)NEWLINE # So, I expand dimensions when len(y.shape) == 2.NEWLINE y = y if len(y.shape) == 3 else np.expand_dims(y, axis=0)NEWLINE return features, yNEWLINE else:NEWLINE return featuresNEWLINENEWLINE def fit_transform(self, X, y=None, **params):NEWLINE """Learn vocabulary and return document id matrix.NEWLINENEWLINE This is equivalent to fit followed by transform.NEWLINENEWLINE Args:NEWLINE X : iterableNEWLINE an iterable which yields either str, unicode or file objects.NEWLINENEWLINE Returns:NEWLINE list : document id matrix.NEWLINE list: label id matrix.NEWLINE """NEWLINE return self.fit(X, y).transform(X, y)NEWLINENEWLINE def inverse_transform(self, y, lengths=None):NEWLINE """Return label strings.NEWLINENEWLINE Args:NEWLINE y: label id matrix.NEWLINE lengths: sentences length.NEWLINENEWLINE Returns:NEWLINE list: list of list of strings.NEWLINE """NEWLINE y = np.argmax(y, -1)NEWLINE inverse_y = [self._label_vocab.id2doc(ids) for ids in y]NEWLINE if lengths is not None:NEWLINE inverse_y = [iy[:l] for iy, l in zip(inverse_y, lengths)]NEWLINENEWLINE return inverse_yNEWLINENEWLINE @propertyNEWLINE def word_vocab_size(self):NEWLINE return len(self._word_vocab)NEWLINENEWLINE @propertyNEWLINE def char_vocab_size(self):NEWLINE return len(self._char_vocab)NEWLINENEWLINE @propertyNEWLINE def label_size(self):NEWLINE return len(self._label_vocab)NEWLINENEWLINE def save(self, file_path):NEWLINE joblib.dump(self, file_path)NEWLINENEWLINE @classmethodNEWLINE def load(cls, file_path):NEWLINE p = joblib.load(file_path)NEWLINENEWLINE return pNEWLINENEWLINENEWLINEdef pad_nested_sequences(sequences, dtype='int32'):NEWLINE """Pads nested sequences to the same length.NEWLINENEWLINE This function transforms a list of list sequencesNEWLINE into a 3D Numpy array of shape `(num_samples, max_sent_len, max_word_len)`.NEWLINENEWLINE Args:NEWLINE sequences: List of lists of lists.NEWLINE dtype: Type of the output sequences.NEWLINENEWLINE # ReturnsNEWLINE x: Numpy array.NEWLINE """NEWLINE max_sent_len = 0NEWLINE max_word_len = 0NEWLINE for sent in sequences:NEWLINE max_sent_len = max(len(sent), max_sent_len)NEWLINE for word in sent:NEWLINE max_word_len = max(len(word), max_word_len)NEWLINENEWLINE x = np.zeros((len(sequences), max_sent_len, max_word_len)).astype(dtype)NEWLINE for i, sent in enumerate(sequences):NEWLINE for j, word in enumerate(sent):NEWLINE x[i, j, :len(word)] = wordNEWLINENEWLINE return xNEWLINENEWLINENEWLINEclass ELMoTransformer(IndexTransformer):NEWLINENEWLINE def __init__(self, lower=True, num_norm=True,NEWLINE use_char=True, initial_vocab=None):NEWLINE super(ELMoTransformer, self).__init__(lower, num_norm, use_char, initial_vocab)NEWLINE self._elmo = Elmo(options_file, weight_file, 2, dropout=0)NEWLINENEWLINE def transform(self, X, y=None):NEWLINE """Transform documents to document ids.NEWLINENEWLINE Uses the vocabulary learned by fit.NEWLINENEWLINE Args:NEWLINE X : iterableNEWLINE an iterable which yields either str, unicode or file objects.NEWLINE y : iterabl, label strings.NEWLINENEWLINE Returns:NEWLINE features: document id matrix.NEWLINE y: label id matrix.NEWLINE """NEWLINE word_ids = [self._word_vocab.doc2id(doc) for doc in X]NEWLINE word_ids = pad_sequences(word_ids, padding='post')NEWLINENEWLINE char_ids = [[self._char_vocab.doc2id(w) for w in doc] for doc in X]NEWLINE char_ids = pad_nested_sequences(char_ids)NEWLINENEWLINE character_ids = batch_to_ids(X)NEWLINE elmo_embeddings = self._elmo(character_ids)['elmo_representations'][1]NEWLINE elmo_embeddings = elmo_embeddings.detach().numpy()NEWLINENEWLINE features = [word_ids, char_ids, elmo_embeddings]NEWLINENEWLINE if y is not None:NEWLINE y = [self._label_vocab.doc2id(doc) for doc in y]NEWLINE y = pad_sequences(y, padding='post')NEWLINE y = to_categorical(y, self.label_size).astype(int)NEWLINE # In 2018/06/01, to_categorical is a bit strange.NEWLINE # >>> to_categorical([[1,3]], num_classes=4).shapeNEWLINE # (1, 2, 4)NEWLINE # >>> to_categorical([[1]], num_classes=4).shapeNEWLINE # (1, 4)NEWLINE # So, I expand dimensions when len(y.shape) == 2.NEWLINE y = y if len(y.shape) == 3 else np.expand_dims(y, axis=0)NEWLINE return features, yNEWLINE else:NEWLINE return featuresNEWLINE |
# Copyright 2021 NVIDIA CorporationNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINENEWLINEimport legate.numpy as lgNEWLINENEWLINENEWLINEdef test():NEWLINE # test data type conversionNEWLINE x = lg.array([1, 2, 3])NEWLINE y = lg.array([1.0, 2.0, 3.0])NEWLINENEWLINE assert lg.max(x) == lg.max(y)NEWLINENEWLINE returnNEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE test()NEWLINE |
import clickNEWLINEfrom collections import CounterNEWLINEfrom typing import List, TupleNEWLINENEWLINEfrom .utils import assertCommitsNEWLINENEWLINENEWLINEclass EmailRedactParamType(click.ParamType):NEWLINE name = 'emailredact'NEWLINENEWLINE def convert(self, value, param, ctx) -> Tuple[str, str, str]:NEWLINE if ":" in value:NEWLINE try:NEWLINE old, new = value.split(":", maxsplit=1)NEWLINE name = ""NEWLINE if ":" in new:NEWLINE new, name = new.split(":")NEWLINE return (old, new, name)NEWLINE except ValueError:NEWLINE self.fail(NEWLINE f'{value} is not in the format 'NEWLINE 'old-email[:new-email[:new-name]]',NEWLINE param, ctx,NEWLINE )NEWLINE return (value, "", "")NEWLINENEWLINENEWLINEEMAIL_REDACT = EmailRedactParamType()NEWLINEGHNOREPLY = "{username}@users.noreply.github.com"[email protected]('redact-email')[email protected]('addresses', nargs=-1, type=EMAIL_REDACT)[email protected]('-r', '--replacement', type=str,NEWLINE default="[email protected]",NEWLINE help="Email address used as replacement.")[email protected]('-g', '--use-github-noreply', 'use_ghnoreply', is_flag=True,NEWLINE help="Interpret custom replacements as GitHub usernames"NEWLINE " and construct noreply addresses.")[email protected]_contextNEWLINEdef redact_email(ctx: click.Context,NEWLINE addresses: List[Tuple[str, str, str]],NEWLINE replacement: str,NEWLINE use_ghnoreply: bool) -> None:NEWLINE """Redact email addresses from existing commits."""NEWLINE if not addresses:NEWLINE return # nothing to doNEWLINENEWLINE assertCommits(ctx)NEWLINE repo = ctx.obj.repoNEWLINENEWLINE env_cmd = ""NEWLINE with click.progressbar(addresses,NEWLINE label="Redacting emails") as bar:NEWLINE for old, new, name in bar:NEWLINE if new and use_ghnoreply:NEWLINE new = GHNOREPLY.format(username=new)NEWLINE if not new:NEWLINE new = replacementNEWLINE env_cmd += get_env_cmd("COMMITTER", old, new, name)NEWLINE env_cmd += get_env_cmd("AUTHOR", old, new, name)NEWLINE filter_cmd = ["git", "filter-branch", "-f",NEWLINE "--env-filter", env_cmd,NEWLINE "--",NEWLINE "HEAD"]NEWLINE repo.git.execute(command=filter_cmd)NEWLINENEWLINENEWLINEdef get_env_cmd(role: str, old: str, new: str, name: str) -> str:NEWLINE name_env = f'GIT_{role}_NAME="{name}"'NEWLINE return (NEWLINE f'if test "$GIT_{role}_EMAIL" = "{old}"; then 'NEWLINE f'export GIT_{role}_EMAIL="{new}" 'NEWLINE f'{name_env if name else ""}; 'NEWLINE 'fi; 'NEWLINE )[email protected]('list-email')[email protected]('-a', '--all', 'check_all', is_flag=True,NEWLINE help="Include all local references.")[email protected]('-e', '--email-only', is_flag=True,NEWLINE help="Only consider actors' email address when counting contributions.")[email protected]_contextNEWLINEdef list_email(ctx: click.Context, check_all: bool, email_only: bool) -> None:NEWLINE """List all author and committer identities."""NEWLINE assertCommits(ctx)NEWLINE repo = ctx.obj.repoNEWLINE commits = repo.iter_commits("HEAD" if not check_all else "--all")NEWLINE authors: Counter[str] = Counter()NEWLINE committers: Counter[str] = Counter()NEWLINE if email_only:NEWLINE to_str = lambda a: a.emailNEWLINE else:NEWLINE to_str = _actor_to_strNEWLINE for commit in commits:NEWLINE authors[to_str(commit.author)] += 1NEWLINE committers[to_str(commit.committer)] += 1NEWLINE total = authors + committersNEWLINE for actor in sorted(total):NEWLINE print(f"{actor} (total: {total[actor]}, author: {authors[actor]}, committer: {committers[actor]})")NEWLINENEWLINEdef _actor_to_str(actor):NEWLINE return f"{actor.name} <{actor.email}>"NEWLINE |
import randomNEWLINEimport streambarNEWLINEimport tempfileNEWLINEimport pandas as pdNEWLINEimport osNEWLINEimport torchNEWLINEimport bar_utils as buNEWLINEfrom torch.utils.data import DatasetNEWLINEimport numpy as npNEWLINEfrom collections import defaultdictNEWLINEfrom statsmodels.tsa.stattools import adfullerNEWLINENEWLINEimport reNEWLINENEWLINEimport loggingNEWLINENEWLINENEWLINEdef default_stock_loader(path, channels_first=True, as_df=False, cache="F:/cache"):NEWLINE cached = os.path.join(cache, re.sub("[^A-Za-z0-9]+", "-", path)) + "-time-60.csv"NEWLINE if not os.path.exists(cached):NEWLINE # parameter is the pathNEWLINE with tempfile.NamedTemporaryFile(delete=False) as f:NEWLINE # close file (otherwise it cannot be opened)NEWLINE f.close()NEWLINENEWLINE # convert to barsNEWLINE streambar.time(path, f.name, size=60)NEWLINENEWLINE # read back in as csvNEWLINE df = pd.read_csv(f.name)NEWLINENEWLINE # and unlink the temporary fileNEWLINE os.unlink(f.name)NEWLINENEWLINE # fill the gapsNEWLINE df = bu.fill_timebar_gaps(df, use_tqdm=False)NEWLINENEWLINE # write the cached versionNEWLINE df.to_csv(cached, index=False)NEWLINE else:NEWLINE # there is a cached version, load itNEWLINE df = pd.read_csv(cached)NEWLINENEWLINE # remove timestamps, skewness and kurtosisNEWLINE df.drop(columns=["first", "last", "skewness", "kurtosis"], inplace=True)NEWLINE df.dropna(inplace=True)NEWLINENEWLINE # return which columns are prices as wellNEWLINE isprice = np.array(NEWLINE [NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE False,NEWLINE True,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE True,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE ]NEWLINE )NEWLINENEWLINE # return the dataframe values as tensor (channels, time)NEWLINE return (torch.FloatTensor(df.values.T) if not as_df else df), isprice, cachedNEWLINENEWLINENEWLINEdef iqr_mean(series, low=0.1, high=0.9):NEWLINE # filter the valuesNEWLINE vals = series[(series > series.quantile(low)) & (series < series.quantile(high))]NEWLINE return vals.mean()NEWLINENEWLINENEWLINEdef volume_stock_loader(path, channels_first=True, as_df=False, cache="F:/cache"):NEWLINE # load the time bars for this entryNEWLINE df_time, _, _ = default_stock_loader(NEWLINE path, channels_first=channels_first, as_df=True, cache=cacheNEWLINE )NEWLINENEWLINE # count the volume up and divide by 400NEWLINE per_bin = int(iqr_mean(df_time["volume"]))NEWLINENEWLINE # get the cached valueNEWLINE cached = os.path.join(cache, re.sub("[^A-Za-z0-9]+", "-", path)) + (NEWLINE "-volume-%d.csv" % per_binNEWLINE )NEWLINENEWLINE # loggingNEWLINE logging.debug("Searching for cached file %s", cached)NEWLINENEWLINE # someNEWLINE if not os.path.exists(cached):NEWLINE # cached file is not foundNEWLINE logging.debug(" -- not found, generating.")NEWLINENEWLINE # parameter is the pathNEWLINE with tempfile.NamedTemporaryFile(delete=False) as f:NEWLINE # close file (otherwise it cannot be opened)NEWLINE f.close()NEWLINENEWLINE # convert to barsNEWLINE streambar.volume(path, f.name, size=per_bin)NEWLINENEWLINE # read back in as csvNEWLINE df = pd.read_csv(f.name)NEWLINENEWLINE # and unlink the temporary fileNEWLINE os.unlink(f.name)NEWLINENEWLINE # write the cached versionNEWLINE df.to_csv(cached, index=False)NEWLINE else:NEWLINE # cached file is not foundNEWLINE logging.debug(" -- found.")NEWLINENEWLINE # there is a cached version, load itNEWLINE df = pd.read_csv(cached)NEWLINENEWLINE # remove timestamps, skewness and kurtosisNEWLINE df.drop(columns=["first", "last", "skewness", "kurtosis"], inplace=True)NEWLINE df.dropna(inplace=True)NEWLINENEWLINE # return which columns are prices as wellNEWLINE isprice = np.array(NEWLINE [NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE False,NEWLINE True,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE True,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE ]NEWLINE )NEWLINENEWLINE # return the dataframe values as tensor (channels, time)NEWLINE return (torch.FloatTensor(df.values.T) if not as_df else df), isprice, cachedNEWLINENEWLINENEWLINEdef dollars_stock_loader(path, channels_first=True, as_df=False, cache="F:/cache"):NEWLINE # load the time bars for this entryNEWLINE df_time, _, _ = default_stock_loader(NEWLINE path, channels_first=channels_first, as_df=True, cache=cacheNEWLINE )NEWLINENEWLINE # count the dollar up and take the medianNEWLINE per_bin = int(iqr_mean(df_time["dollars"]))NEWLINENEWLINE # try to find cached fileNEWLINE cached = os.path.join(cache, re.sub("[^A-Za-z0-9]+", "-", path)) + (NEWLINE "-dollars-%d.csv" % per_binNEWLINE )NEWLINENEWLINE # loggingNEWLINE logging.debug("Searching for cached file %s", cached)NEWLINENEWLINE # if it does not exist, we have to make itNEWLINE if not os.path.exists(cached):NEWLINE # cached file is not foundNEWLINE logging.debug(" -- not found, generating.")NEWLINENEWLINE # parameter is the pathNEWLINE with tempfile.NamedTemporaryFile(delete=False) as f:NEWLINE # close file (otherwise it cannot be opened)NEWLINE f.close()NEWLINENEWLINE # convert to barsNEWLINE streambar.dollar(path, f.name, size=per_bin)NEWLINENEWLINE # read back in as csvNEWLINE df = pd.read_csv(f.name)NEWLINENEWLINE # and unlink the temporary fileNEWLINE os.unlink(f.name)NEWLINENEWLINE # write the cached versionNEWLINE df.to_csv(cached, index=False)NEWLINE else:NEWLINE # cached file is not foundNEWLINE logging.debug(" -- found.")NEWLINENEWLINE # there is a cached version, load itNEWLINE df = pd.read_csv(cached)NEWLINENEWLINE # remove timestamps, skewness and kurtosisNEWLINE df.drop(columns=["first", "last", "skewness", "kurtosis"], inplace=True)NEWLINE df.dropna(inplace=True)NEWLINENEWLINE # return which columns are prices as wellNEWLINE isprice = np.array(NEWLINE [NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE True,NEWLINE False,NEWLINE True,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE True,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE False,NEWLINE ]NEWLINE )NEWLINENEWLINE # return the dataframe values as tensor (channels, time)NEWLINE return (torch.FloatTensor(df.values.T) if not as_df else df), isprice, cachedNEWLINENEWLINENEWLINEclass StockDataset(Dataset):NEWLINE def __init__(NEWLINE self,NEWLINE opt, # optionsNEWLINE flist, # file listNEWLINE length=120, # sequence lengthNEWLINE loader=default_stock_loader,NEWLINE redo_bars=False,NEWLINE ):NEWLINE # store root directory and optionsNEWLINE self.opt = optNEWLINENEWLINE # store the file list and the 'prev' listNEWLINE self.flist = flist["file"]NEWLINE self.plist = flist["prev"]NEWLINENEWLINE # store file loaderNEWLINE self.loader = loaderNEWLINENEWLINE # store lengthNEWLINE self.length = lengthNEWLINENEWLINE # how to cache the dataNEWLINE self.cache = {}NEWLINENEWLINE # a self-filling dict of dicts, noting the offsets in the fileNEWLINE self.profits = defaultdict(dict)NEWLINENEWLINE self.mean, self.std = {}, {}NEWLINENEWLINE # full mappingNEWLINE self.mapping = []NEWLINE self.mark = {}NEWLINE self.mark_raw = {}NEWLINENEWLINE self.counter = defaultdict(int)NEWLINENEWLINE self.adf = []NEWLINENEWLINE # preload all dataNEWLINE logging.info("Preloading data...")NEWLINE for i in range(len(flist)):NEWLINE self.getidx(i)NEWLINE logging.info("Done preloading.")NEWLINENEWLINE # take all lengths for insight purposesNEWLINE lens = [d[0].shape[1] for d in self.cache.values()]NEWLINENEWLINE # debug statementNEWLINE logging.info(NEWLINE "Loaded Dataset Number of Bars (%s): Mean %.3f and Std %.3f",NEWLINE self.opt.bar_type,NEWLINE np.mean(lens),NEWLINE np.std(lens),NEWLINE )NEWLINENEWLINE print(np.mean(self.adf), np.std(self.adf))NEWLINENEWLINE def getidx(self, idx):NEWLINE assert idx < len(self.flist), "index out of range"NEWLINENEWLINE # if the index is not already in cache, store itNEWLINE if idx in self.cache:NEWLINE return self.cache[idx]NEWLINENEWLINE # debugging infoNEWLINE logging.debug("Going to load %s", self.flist[idx])NEWLINENEWLINE # assign to theNEWLINE self.cache[idx] = self.loader(self.flist[idx])NEWLINENEWLINE # get the dataNEWLINE data, isprice, _ = self.cache[idx]NEWLINENEWLINE # because prices are non-stationary we'll divide those (make logreturns sequence)NEWLINE data[isprice, :] = torch.FloatTensor(NEWLINE np.hstack(NEWLINE [NEWLINE np.zeros_like(isprice[isprice]).reshape(-1, 1),NEWLINE np.diff(np.log(data[isprice, :])),NEWLINE ]NEWLINE )NEWLINE )NEWLINENEWLINE # calculate the profitability of getting in at time X, 4th channel is the close price!NEWLINE closes = self.cache[idx][0][3]NEWLINENEWLINE # markNEWLINE mark = np.exp(NEWLINE (NEWLINE closes[self.opt.prediction_step + self.length :]NEWLINE - closes[self.length : -self.opt.prediction_step]NEWLINE ).numpy()NEWLINE )NEWLINENEWLINE # numpy range of everythingNEWLINE indices = np.arange(len(mark))NEWLINENEWLINE @np.vectorizeNEWLINE def classify(mark):NEWLINE if mark < 0.995:NEWLINE return 0NEWLINE if mark >= 0.995 and mark <= 0.998:NEWLINE return 1NEWLINE if mark > 0.998 and mark < 1.002:NEWLINE return 2NEWLINE if mark >= 1.002 and mark <= 1.005:NEWLINE return 3NEWLINE else:NEWLINE return 4NEWLINENEWLINE # store the markNEWLINE self.mark[idx] = classify(mark)NEWLINE self.mark_raw[idx] = markNEWLINENEWLINE # subdivideNEWLINE self.profits[0][idx] = indices[self.mark[idx] == 0]NEWLINE self.profits[1][idx] = indices[self.mark[idx] == 1]NEWLINE self.profits[2][idx] = indices[self.mark[idx] == 2]NEWLINE self.profits[3][idx] = indices[self.mark[idx] == 3]NEWLINE self.profits[4][idx] = indices[self.mark[idx] == 4]NEWLINENEWLINE self.counter[0] += len(self.profits[0][idx])NEWLINE self.counter[1] += len(self.profits[1][idx])NEWLINE self.counter[2] += len(self.profits[2][idx])NEWLINE self.counter[3] += len(self.profits[3][idx])NEWLINE self.counter[4] += len(self.profits[4][idx])NEWLINENEWLINE if len(self.profits[0][idx]) == 0:NEWLINE del self.profits[0][idx]NEWLINE if len(self.profits[1][idx]) == 0:NEWLINE del self.profits[1][idx]NEWLINE if len(self.profits[2][idx]) == 0:NEWLINE del self.profits[2][idx]NEWLINE if len(self.profits[3][idx]) == 0:NEWLINE del self.profits[3][idx]NEWLINE if len(self.profits[4][idx]) == 0:NEWLINE del self.profits[4][idx]NEWLINENEWLINE # get the dataNEWLINE data, isprice, c = self.cache[idx]NEWLINENEWLINE # mean for this dayNEWLINE self.mean[idx] = torch.mean(data, axis=1).reshape(-1, 1)NEWLINE self.std[idx] = torch.std(data, axis=1).reshape(-1, 1)NEWLINENEWLINE # use previous day, except on the first dayNEWLINE previdx = max(idx - 1, 0)NEWLINENEWLINE self.adf.append(adfuller(data[3, :])[0])NEWLINENEWLINE # normalize the data using the previous dayNEWLINE normalized = (data - self.mean[previdx]) / self.std[previdx]NEWLINENEWLINE # check and fix if neededNEWLINE if (torch.max(torch.abs(normalized), axis=1).values > 100).any():NEWLINE logging.warning("HIGH NORMALIZED DATAPOINT IN %s...", self.flist[idx])NEWLINE logging.debug("mean %s std %s", self.mean[previdx], self.std[previdx])NEWLINE logging.debug("normalized %s", normalized.abs().max(axis=1))NEWLINE logging.debug("original %s %s", data.min(axis=1), data.max(axis=1))NEWLINENEWLINE # we correct it during training...NEWLINE if not self.opt.nosubsampledata:NEWLINE logging.info(" -- corrected by removal")NEWLINE data = data[:, torch.max(torch.abs(normalized), axis=0).values < 100]NEWLINENEWLINE # correct the mean for this day (otherwise it bleeds to the next day)NEWLINE self.mean[idx] = torch.mean(data, axis=1).reshape(-1, 1)NEWLINE self.std[idx] = torch.std(data, axis=1).reshape(-1, 1)NEWLINENEWLINE # normalize the data using the previous day againNEWLINE normalized = (data - self.mean[previdx]) / self.std[previdx]NEWLINENEWLINE # standardize the data on load using previous dayNEWLINE self.cache[idx] = normalized, isprice, cNEWLINENEWLINE # add the mapping of all possible samplesNEWLINE self.mapping.extend([(idx, i) for i in range(0, data.shape[1] - self.length)])NEWLINENEWLINE # we can return it (if it wasn't there, it was made by the previous statement)NEWLINE return self.cache[idx]NEWLINENEWLINE def get_mark(self, idx, start_idx):NEWLINE if idx not in self.cache:NEWLINE self.getidx(idx)NEWLINE marks = self.mark[idx]NEWLINE return -1 if start_idx >= len(marks) else marks[start_idx]NEWLINENEWLINE def get_mark_raw(self, idx, start_idx):NEWLINE if idx not in self.cache:NEWLINE self.getidx(idx)NEWLINE marks = self.mark_raw[idx]NEWLINE return 0.0 if start_idx >= len(marks) else marks[start_idx]NEWLINENEWLINE def getprofits(self, idx, profit):NEWLINE if idx not in self.cache:NEWLINE self.getidx(idx)NEWLINE return self.profits[profit][idx]NEWLINENEWLINE def get_item_at_index(self, index, start_idx):NEWLINE # get the file at the indexNEWLINE data, _, fname = self.getidx(index)NEWLINENEWLINE # cut the sequenceNEWLINE data = data[:, start_idx : (start_idx + self.length)].clone()NEWLINENEWLINE # return the data and the indexNEWLINE return data, fname, self.get_mark_raw(index, start_idx), start_idxNEWLINENEWLINE def __getitem__(self, index):NEWLINE if not self.opt.nosubsampledata:NEWLINE # get the file at the indexNEWLINE data, _, _ = self.getidx(index)NEWLINENEWLINE # pick a random numberNEWLINE start_idx = random.randint(0, data.shape[1] - self.length - 1)NEWLINE else:NEWLINE # otherwise, we have a pre-selected mappingNEWLINE index, start_idx = self.mapping[index]NEWLINENEWLINE # get the item at the start indexNEWLINE return self.get_item_at_index(index, start_idx)NEWLINENEWLINE def __len__(self):NEWLINE if not self.opt.nosubsampledata:NEWLINE return len(self.flist)NEWLINE else:NEWLINE return len(self.mapping)NEWLINENEWLINE def sample_profit(self, profit):NEWLINE """get a single datapoint with future profits as in profit."""NEWLINE randidx = np.random.choice(np.array(list(self.profits[profit].keys())))NEWLINE return self.get_item_at_index(NEWLINE randidx, np.random.choice(self.profits[profit][randidx])NEWLINE )NEWLINENEWLINE def get_data_by_profit(self, profit, batch_size=20):NEWLINE """NEWLINE get data by future profit id, 0, 1, 2, 3, 4.NEWLINE 0 : < -2%NEWLINE 1 : -2% to -0.5%NEWLINE 2 : -0.5% to 0.5%NEWLINE 3 : 0.5% to 2%NEWLINE 4 : > 2%NEWLINENEWLINE quite arbitrary classes. @todo rebalance?NEWLINE """NEWLINE assert profit >= 0 and profit <= 5, "Invalid profit enum value"NEWLINENEWLINE batch = torch.zeros(batch_size, 20, self.length)NEWLINE for idx in range(batch_size):NEWLINE batch[idx, :, :], _, _, _ = self.sample_profit(profit)NEWLINENEWLINE return batchNEWLINENEWLINENEWLINEdef get_dataloaders(opt):NEWLINE """creates and returns the stock dataset and dataloaders, either withNEWLINE train/val split, or train+val/test split.NEWLINENEWLINE :param opt:NEWLINE :return: train_loader, train_dataset,NEWLINE test_loader, test_dataset - corresponds to validation or test set depending on opt.validateNEWLINE """NEWLINE num_workers = 1NEWLINENEWLINE # find full file list, sort it (because of temporal dependence)NEWLINE train = pd.read_csv(os.path.join(opt.data_input_dir, "train.csv"))NEWLINE valid = pd.read_csv(os.path.join(opt.data_input_dir, "valid.csv"))NEWLINE test = pd.read_csv(os.path.join(opt.data_input_dir, "test.csv"))NEWLINENEWLINE loader = NoneNEWLINENEWLINE if opt.bar_type == "time":NEWLINE loader = default_stock_loaderNEWLINE elif opt.bar_type == "volume":NEWLINE loader = volume_stock_loaderNEWLINE elif opt.bar_type == "dollars":NEWLINE loader = dollars_stock_loaderNEWLINE else:NEWLINE raise ValueError("Incorrect Bar Type!")NEWLINENEWLINE if opt.validate:NEWLINE logging.info("Using Train / Val Split")NEWLINENEWLINE train_dataset = StockDataset(opt, train, loader=loader)NEWLINE test_dataset = StockDataset(opt, valid, loader=loader)NEWLINENEWLINE else:NEWLINE logging.info("Using Train+Val / Test Split")NEWLINE train_dataset = StockDataset(NEWLINE opt, pd.concat([train, valid], ignore_index=True), loader=loaderNEWLINE )NEWLINE test_dataset = StockDataset(opt, test, loader=loader)NEWLINENEWLINE train_loader = torch.utils.data.DataLoader(NEWLINE dataset=train_dataset,NEWLINE batch_size=opt.batch_size_multiGPU,NEWLINE shuffle=(not opt.noshuffle),NEWLINE drop_last=True,NEWLINE num_workers=num_workers,NEWLINE )NEWLINENEWLINE test_loader = torch.utils.data.DataLoader(NEWLINE dataset=test_dataset,NEWLINE batch_size=opt.batch_size_multiGPU,NEWLINE shuffle=False,NEWLINE drop_last=True,NEWLINE num_workers=num_workers,NEWLINE )NEWLINENEWLINE return train_loader, train_dataset, test_loader, test_datasetNEWLINE |
"""NEWLINEPRACTICE Exam 3.NEWLINENEWLINEThis problem provides practice at:NEWLINE *** FOR and WHILE loops. ***NEWLINENEWLINEAuthors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher,NEWLINE Mark Hays, Amanda Stouder, Aaron Wilkin, their colleagues,NEWLINE and Marcus Hughes-Oliver.NEWLINE""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.NEWLINENEWLINE###############################################################################NEWLINE# Students:NEWLINE#NEWLINE# These problems have DIFFICULTY and TIME ratings:NEWLINE# DIFFICULTY rating: 1 to 10, where:NEWLINE# 1 is very easyNEWLINE# 3 is an "easy" Test 2 question.NEWLINE# 5 is a "typical" Test 2 question.NEWLINE# 7 is a "hard" Test 2 question.NEWLINE# 10 is an EXTREMELY hard problem (too hard for a Test 2 question)NEWLINE#NEWLINE# TIME ratings: A ROUGH estimate of the number of minutes that weNEWLINE# would expect a well-prepared student to take on the problem.NEWLINE#NEWLINE# IMPORTANT: For ALL the problems in this module,NEWLINE# if you reach the time estimate and are NOT close to a solution,NEWLINE# STOP working on that problem and ASK YOUR INSTRUCTOR FOR HELPNEWLINE# on it, in class or via Piazza.NEWLINE###############################################################################NEWLINENEWLINEimport simple_testing as stNEWLINEimport mathNEWLINENEWLINENEWLINEdef main():NEWLINE """ Calls the TEST functions in this module. """NEWLINE run_test_practice_problem3()NEWLINENEWLINE# -----------------------------------------------------------------------------NEWLINE# Students: Some of the testing code below uses SimpleTestCase objects,NEWLINE# from the imported simple_testing (st) module.NEWLINE# -----------------------------------------------------------------------------NEWLINENEWLINENEWLINEdef run_test_practice_problem3():NEWLINE """ Tests the practice_problem3 function. """NEWLINE ###########################################################################NEWLINE # DONE: 2. Implement this TEST function.NEWLINE # It TESTS the practice_problem3 function defined below.NEWLINE # Include at least ** 2 ** ADDITIONAL tests beyond those we wrote.NEWLINE #NEWLINE # Try to choose tests that might expose errors in your code!NEWLINE #NEWLINE # As usual, include both EXPECTED and ACTUAL results in your testsNEWLINE # and compute the latter BY HAND (not by running your program).NEWLINE ###########################################################################NEWLINE # DIFFICULTY AND TIME RATINGS (see top of this file for explanation)NEWLINE # DIFFICULTY: 3NEWLINE # TIME ESTIMATE: 10 minutes.NEWLINE ###########################################################################NEWLINENEWLINE # -------------------------------------------------------------------------NEWLINE # 13 tests, plus a 14th after these.NEWLINE # They use the imported simple_testing (st) module.NEWLINE # Each test is a SimpleTestCase with 3 arguments:NEWLINE # -- the function to test,NEWLINE # -- a list containing the argument(s) to send to the function,NEWLINE # -- the correct returned value.NEWLINE # For example, the first test below will callNEWLINE # practice_problem3(-2, 2, 1.3)NEWLINE # and compare the returned value against [1, 7] (the correct answer).NEWLINE # -------------------------------------------------------------------------NEWLINE tests = [st.SimpleTestCase(practice_problem3,NEWLINE [-2, 2, 1.3],NEWLINE [1, 7]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 3, 0.25],NEWLINE [-5, 0, 1]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 4, 0.25],NEWLINE [-5, 0, 1, 2]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 5, 0.25],NEWLINE [-5, 0, 1, 2, 6]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 6, 0.25],NEWLINE [-5, 0, 1, 2, 6, 7]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-5, 7, 0.25],NEWLINE [-5, 0, 1, 2, 6, 7, 8]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 3, -1.0],NEWLINE [-1, 0, 1]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 4, -1.0],NEWLINE [-1, 0, 1, 2]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 5, -1.0],NEWLINE [-1, 0, 1, 2, 3]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-3, 6, -1.0],NEWLINE [-1, 0, 1, 2, 3, 5]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [30, 0, -1000],NEWLINE []),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [100, 5, 1.414],NEWLINE [139, 183, 516, 560, 849]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [0, 1, 1.414213562373],NEWLINE [286602]),NEWLINE ]NEWLINE # 14th test:NEWLINE big_list = []NEWLINE for k in range(888, 1888):NEWLINE big_list.append(k)NEWLINE tests.append(st.SimpleTestCase(practice_problem3,NEWLINE [888, 1000,NEWLINE - math.sqrt(2) - 0.00000000001],NEWLINE big_list))NEWLINENEWLINE # -------------------------------------------------------------------------NEWLINE # Run the 14 tests in the tests list constructed above.NEWLINE # -------------------------------------------------------------------------NEWLINE st.SimpleTestCase.run_tests('practice_problem3', tests)NEWLINENEWLINE ###########################################################################NEWLINE # TO DO 2 continued: More tests:NEWLINE # YOU add at least ** 2 ** additional tests here.NEWLINE #NEWLINE # You can use the SimpleTestCase class as above, or useNEWLINE # the ordinary expected/actual way, your choice.NEWLINE #NEWLINE # SUGGESTION: Ask an assistant to CHECK your tests to confirmNEWLINE # that they are adequate tests!NEWLINE ###########################################################################NEWLINE test1 = [st.SimpleTestCase(practice_problem3,NEWLINE [0, 2, .99],NEWLINE [0, 1]),NEWLINE st.SimpleTestCase(practice_problem3,NEWLINE [-10, 6, 0.2],NEWLINE [-6, -5, 0, 1, 2, 6])]NEWLINE st.SimpleTestCase.run_tests('practice_problem3', test1)NEWLINENEWLINENEWLINEdef practice_problem3(start, n, threshold):NEWLINE """NEWLINE What comes in:NEWLINE -- An integer: startNEWLINE -- An nonnegative integer: nNEWLINE -- A number: thresholdNEWLINE What goes out: Returns a list of the first n integers,NEWLINE starting at start, for which the sum of the integer'sNEWLINE sine and cosine is bigger than the given threshold.NEWLINE Side effects: None.NEWLINE Examples:NEWLINE practice_problem3(-2, 2, 1.3) returns [1, 7]NEWLINE as you can see if you work through this example usingNEWLINE the numbers presented below. (Do so!)NEWLINENEWLINE For these examples, the following (and more) numbersNEWLINE (each is rounded to 2 decimal places for the sake of brevity)NEWLINE are relevant:NEWLINE -5: sin = 0.96, cos = 0.28, sum = 1.24NEWLINE -4: sin = 0.76, cos = -0.65, sum = 0.10NEWLINE -3: sin = -0.14, cos = -0.99, sum = -1.13NEWLINE -2: sin = -0.91, cos = -0.42, sum = -1.33NEWLINE -1: sin = -0.84, cos = 0.54, sum = -0.30NEWLINE 0: sin = 0.00, cos = 1.00, sum = 1.00NEWLINE 1: sin = 0.84, cos = 0.54, sum = 1.38NEWLINE 2: sin = 0.91, cos = -0.42, sum = 0.49NEWLINE 3: sin = 0.14, cos = -0.99, sum = -0.85NEWLINE 4: sin = -0.76, cos = -0.65, sum = -1.41NEWLINE 5: sin = -0.96, cos = 0.28, sum = -0.68NEWLINE 6: sin = -0.28, cos = 0.96, sum = 0.68NEWLINE 7: sin = 0.66, cos = 0.75, sum = 1.41NEWLINE 8: sin = 0.99, cos = -0.15, sum = 0.84NEWLINE 9: sin = 0.41, cos = -0.91, sum = -0.50NEWLINE 10: sin = -0.54, cos = -0.84, sum = -1.38NEWLINE 11: sin = -1.00, cos = 0.00, sum = -1.00NEWLINE 12: sin = -0.54, cos = 0.84, sum = 0.31NEWLINE 13: sin = 0.42, cos = 0.91, sum = 1.33NEWLINENEWLINE So if start is -5 and threshold is 0.25 and:NEWLINE -- n is 3, then this function returns [-5, 0, 1]NEWLINE because sin(-5) + cos(-5) IS > 0.25 andNEWLINE sin(-4) + cos(-4) is NOT > 0.25 andNEWLINE sin(-3) + cos(-3) is NOT > 0.25 andNEWLINE sin(-2) + cos(-2) is NOT > 0.25 andNEWLINE sin(-1) + cos(-1) is NOT > 0.25 andNEWLINE sin(0) + cos(0) IS > 0.25 andNEWLINE sin(1) + cos(1) IS > 0.25 andNEWLINE and that makes the required 3 such numbers.NEWLINE -- n is 4, then this function returns [-5, 0, 1, 2]NEWLINE -- n is 5, then this function returns [-5, 0, 1, 2, 6]NEWLINE -- n is 6, then this function returns [-5, 0, 1, 2, 6, 7]NEWLINE -- n is 7, then this function returns [-5, 0, 1, 2, 6, 7, 8]NEWLINENEWLINE while if start is -3 and the threshold is -1.0 and:NEWLINE -- n is 3, then this function returns [-1, 0, 1]NEWLINE -- n is 4, then this function returns [-1, 0, 1, 2]NEWLINE -- n is 5, then this function returns [-1, 0, 1, 2, 3]NEWLINE -- n is 6, then this function returns [-1, 0, 1, 2, 3, 5]NEWLINENEWLINE and if n is 0 (regardless of what start is),NEWLINE this function returns []NEWLINENEWLINE and if threshold is more than the square root of 2,NEWLINE this function returns (regardless of what start and n are):NEWLINE [start, start + 1, start + 2, ... start + n - 1].NEWLINENEWLINE Type hints:NEWLINE :type start: intNEWLINE :type n: intNEWLINE :type threshold: floatNEWLINE """NEWLINE ###########################################################################NEWLINE # DONE: 3. Implement and test this function.NEWLINE # Some tests are already written for you (above),NEWLINE # but you are required to write ADDITIONAL tests (above).NEWLINE ###########################################################################NEWLINE # DIFFICULTY AND TIME RATINGS (see top of this file for explanation)NEWLINE # DIFFICULTY: 5NEWLINE # TIME ESTIMATE: < 15 minutes.NEWLINE ###########################################################################NEWLINE listy = []NEWLINE x = 0NEWLINE while x < n:NEWLINE if math.sin(start) + math.cos(start) > threshold:NEWLINE listy = listy + [start]NEWLINE x = x + 1NEWLINE start = start + 1NEWLINENEWLINE return listyNEWLINENEWLINENEWLINE# -----------------------------------------------------------------------------NEWLINE# Calls main to start the ball rolling.NEWLINE# -----------------------------------------------------------------------------NEWLINEmain()NEWLINE |
#!/usr/bin/env pythonNEWLINENEWLINE"""NEWLINEFill the "Player" table with info from this and past seasonss FPLNEWLINE"""NEWLINEimport osNEWLINENEWLINEimport jsonNEWLINENEWLINEfrom airsenal.framework.mappings import positionsNEWLINEfrom airsenal.framework.schema import PlayerAttributes, session_scope, sessionNEWLINENEWLINEfrom airsenal.framework.utils import (NEWLINE get_next_gameweek,NEWLINE get_player,NEWLINE get_player_from_api_id,NEWLINE get_team_name,NEWLINE get_past_seasons,NEWLINE CURRENT_SEASON,NEWLINE get_player_attributes,NEWLINE get_player_team_from_fixture,NEWLINE get_return_gameweek_from_news,NEWLINE)NEWLINENEWLINEfrom airsenal.framework.data_fetcher import FPLDataFetcherNEWLINENEWLINENEWLINEdef fill_attributes_table_from_file(detail_data, season, dbsession=session):NEWLINE """Fill player attributes table for previous season using data fromNEWLINE player detail JSON files.NEWLINE """NEWLINENEWLINE for player_name in detail_data.keys():NEWLINE # find the player id in the player table. If they're notNEWLINE # there, then we don't care (probably not a current player).NEWLINE player = get_player(player_name, dbsession=dbsession)NEWLINE if not player:NEWLINE print("Couldn't find player {}".format(player_name))NEWLINE continueNEWLINENEWLINE print("ATTRIBUTES {} {}".format(season, player))NEWLINE # now loop through all the fixtures that player played inNEWLINE # Only one attributes row per gameweek - create list of gameweeksNEWLINE # encountered so can ignore duplicates (e.g. from double gameweeks).NEWLINE previous_gameweeks = []NEWLINE for fixture_data in detail_data[player_name]:NEWLINE gameweek = int(fixture_data["gameweek"])NEWLINE if gameweek in previous_gameweeks:NEWLINE # already done this gameweekNEWLINE continueNEWLINE else:NEWLINE previous_gameweeks.append(gameweek)NEWLINENEWLINE pa = PlayerAttributes()NEWLINE pa.player = playerNEWLINE pa.player_id = player.player_idNEWLINE pa.season = seasonNEWLINE pa.gameweek = gameweekNEWLINE pa.price = int(fixture_data["value"])NEWLINE pa.team = fixture_data["played_for"]NEWLINE pa.position = fixture_data["position"]NEWLINE pa.transfers_balance = int(fixture_data["transfers_balance"])NEWLINE pa.selected = int(fixture_data["selected"])NEWLINE pa.transfers_in = int(fixture_data["transfers_in"])NEWLINE pa.transfers_out = int(fixture_data["transfers_out"])NEWLINE dbsession.add(pa)NEWLINENEWLINENEWLINEdef fill_attributes_table_from_api(season, gw_start=1, dbsession=session):NEWLINE """NEWLINE use the FPL API to get player attributes info for the current seasonNEWLINE """NEWLINE fetcher = FPLDataFetcher()NEWLINE next_gw = get_next_gameweek(season=season, dbsession=dbsession)NEWLINENEWLINE # needed for selected by calculation from percentage belowNEWLINE n_players = fetcher.get_current_summary_data()["total_players"]NEWLINENEWLINE input_data = fetcher.get_player_summary_data()NEWLINENEWLINE for player_api_id in input_data.keys():NEWLINE # find the player in the player tableNEWLINE player = get_player_from_api_id(player_api_id, dbsession=dbsession)NEWLINE if not player:NEWLINE print(NEWLINE "ATTRIBUTES {} No player found with id {}".format(season, player_api_id)NEWLINE )NEWLINE continueNEWLINENEWLINE print("ATTRIBUTES {} {}".format(season, player.name))NEWLINENEWLINE # First update the current gameweek using the summary dataNEWLINE p_summary = input_data[player_api_id]NEWLINE position = positions[p_summary["element_type"]]NEWLINENEWLINE pa = get_player_attributes(NEWLINE player.player_id, season=season, gameweek=next_gw, dbsession=dbsessionNEWLINE )NEWLINE if pa:NEWLINE # found pre-existing attributes for this gameweekNEWLINE update = TrueNEWLINE else:NEWLINE # no attributes for this gameweek for this player yetNEWLINE pa = PlayerAttributes()NEWLINE update = FalseNEWLINENEWLINE pa.player = playerNEWLINE pa.player_id = player.player_idNEWLINE pa.season = seasonNEWLINE pa.gameweek = next_gwNEWLINE pa.price = int(p_summary["now_cost"])NEWLINE pa.team = get_team_name(p_summary["team"], season=season, dbsession=dbsession)NEWLINE pa.position = positions[p_summary["element_type"]]NEWLINE pa.selected = int(float(p_summary["selected_by_percent"]) * n_players / 100)NEWLINE pa.transfers_in = int(p_summary["transfers_in_event"])NEWLINE pa.transfers_out = int(p_summary["transfers_out_event"])NEWLINE pa.transfers_balance = pa.transfers_in - pa.transfers_outNEWLINE pa.chance_of_playing_next_round = p_summary["chance_of_playing_next_round"]NEWLINE pa.news = p_summary["news"]NEWLINE if (NEWLINE pa.chance_of_playing_next_round is not NoneNEWLINE and pa.chance_of_playing_next_round <= 50NEWLINE ):NEWLINE pa.return_gameweek = get_return_gameweek_from_news(NEWLINE p_summary["news"],NEWLINE season=season,NEWLINE dbsession=dbsession,NEWLINE )NEWLINENEWLINE if not update:NEWLINE # only need to add to the dbsession for new entries, if we're doingNEWLINE # an update the final dbsession.commit() is enoughNEWLINE dbsession.add(pa)NEWLINENEWLINE # now get data for previous gameweeksNEWLINE player_data = fetcher.get_gameweek_data_for_player(player_api_id)NEWLINE if not player_data:NEWLINE print("Failed to get data for", player.name)NEWLINE continueNEWLINE for gameweek, data in player_data.items():NEWLINE if gameweek < gw_start:NEWLINE continueNEWLINENEWLINE for result in data:NEWLINE # check whether there are pre-existing attributes to updateNEWLINE pa = get_player_attributes(NEWLINE player.player_id,NEWLINE season=season,NEWLINE gameweek=gameweek,NEWLINE dbsession=dbsession,NEWLINE )NEWLINE if pa:NEWLINE update = TrueNEWLINE else:NEWLINE pa = PlayerAttributes()NEWLINE update = FalseNEWLINENEWLINE # determine the team the player played for in this fixtureNEWLINE opponent_id = result["opponent_team"]NEWLINE was_home = result["was_home"]NEWLINE kickoff_time = result["kickoff_time"]NEWLINE team = get_player_team_from_fixture(NEWLINE gameweek,NEWLINE opponent_id,NEWLINE was_home,NEWLINE kickoff_time,NEWLINE season=season,NEWLINE dbsession=dbsession,NEWLINE )NEWLINENEWLINE pa.player = playerNEWLINE pa.player_id = player.player_idNEWLINE pa.season = seasonNEWLINE pa.gameweek = gameweekNEWLINE pa.price = int(result["value"])NEWLINE pa.team = teamNEWLINE pa.position = position # does not change during seasonNEWLINE pa.transfers_balance = int(result["transfers_balance"])NEWLINE pa.selected = int(result["selected"])NEWLINE pa.transfers_in = int(result["transfers_in"])NEWLINE pa.transfers_out = int(result["transfers_out"])NEWLINENEWLINE if not update:NEWLINE # don't need to add to dbsession if updating pre-existing rowNEWLINE dbsession.add(pa)NEWLINENEWLINE break # done this gameweek nowNEWLINENEWLINENEWLINEdef make_attributes_table(seasons=[], dbsession=session):NEWLINE """Create the player attributes table using the previous 3 seasons (fromNEWLINE player details JSON files) and the current season (from API)NEWLINE """NEWLINE if not seasons:NEWLINE seasons = get_past_seasons(3)NEWLINE seasons.append(CURRENT_SEASON)NEWLINENEWLINE for season in seasons:NEWLINE if season == CURRENT_SEASON:NEWLINE continueNEWLINE input_path = os.path.join(NEWLINE os.path.dirname(__file__), "../data/player_details_{}.json".format(season)NEWLINE )NEWLINE with open(input_path, "r") as f:NEWLINE input_data = json.load(f)NEWLINENEWLINE fill_attributes_table_from_file(NEWLINE detail_data=input_data, season=season, dbsession=dbsessionNEWLINE )NEWLINENEWLINE # this season's data from the APINEWLINE if CURRENT_SEASON in seasons:NEWLINE fill_attributes_table_from_api(season=CURRENT_SEASON, dbsession=dbsession)NEWLINENEWLINE dbsession.commit()NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE with session_scope() as dbsession:NEWLINE make_attributes_table(dbsession=dbsession)NEWLINE |
#!/usr/bin/env python3NEWLINENEWLINE# Copyright 2019 Nina Marie Wahl and Charlotte Heggem.NEWLINE# Copyright 2019 Norwegian University of Science and Technology.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEimport _thread as threadNEWLINEimport sysNEWLINEimport mathNEWLINEimport rclpyNEWLINEfrom rclpy.node import NodeNEWLINEfrom lbr_msgs.msg import LbrStatusdataNEWLINEfrom builtin_interfaces.msg import TimeNEWLINEfrom rclpy.qos import qos_profile_sensor_dataNEWLINEfrom script.tcpSocket import TCPSocketNEWLINEfrom script.udpSocket import UDPSocketNEWLINEfrom std_msgs.msg import BoolNEWLINENEWLINEfrom rclpy.utilities import remove_ros_argsNEWLINEimport argparseNEWLINENEWLINEdef cl_red(msge): return '\033[31m' + msge + '\033[0m'NEWLINENEWLINENEWLINENEWLINEclass LbrStatusNode(Node):NEWLINE def __init__(self,connection_type,robot):NEWLINE super().__init__('lbr_statusdata_node')NEWLINE self.name='lbr_statusdata_node'NEWLINE self.last_status_timestamp = 0NEWLINE self.path_finished = FalseNEWLINE self.declare_parameter('port')NEWLINE port = int(self.get_parameter('port').value)NEWLINE if robot == 'LBR':NEWLINE self.declare_parameter('LBR/ip')NEWLINE ip = str(self.get_parameter('LBR/ip').value)NEWLINE else:NEWLINE ip = NoneNEWLINENEWLINENEWLINE if connection_type == 'TCP':NEWLINE self.soc = TCPSocket(ip,port,self.name)NEWLINE elif connection_type == 'UDP':NEWLINE self.soc=UDPSocket(ip,port,self.name)NEWLINE else:NEWLINE self.soc=NoneNEWLINENEWLINENEWLINE # Make Publisher for statusdataNEWLINE self.pub_lbr_statusdata = self.create_publisher(LbrStatusdata, 'lbr_statusdata', 10)NEWLINENEWLINE while not self.soc.isconnected:NEWLINE passNEWLINE self.get_logger().info('Node is ready')NEWLINENEWLINE while rclpy.ok() and self.soc.isconnected:NEWLINE self.status_callback(self.pub_lbr_statusdata, self.soc.lbr_statusdata)NEWLINENEWLINENEWLINENEWLINE def status_callback(self,status_publisher, data):NEWLINE if data != None:NEWLINE msg = LbrStatusdata()NEWLINE msg.header.stamp = self.get_clock().now().to_msg()NEWLINE status_elements = data[1].split(",")NEWLINE if (status_elements[1] != self.last_status_timestamp):NEWLINE self.last_status_timestamp = status_elements[1]NEWLINE for i in range(2, len(status_elements)):NEWLINE split = status_elements[i].split(":")NEWLINE if (split[0] == "ReadyToMove"):NEWLINE if (split[1] == "true"):NEWLINE msg.ready_to_move = TrueNEWLINE else:NEWLINE msg.ready_to_move = FalseNEWLINE elif (split[0] == "isLBRmoving"):NEWLINE if (split[1] == "true"):NEWLINE msg.is_lbr_moving = TrueNEWLINE self.soc.is_lbr_moving = TrueNEWLINE else:NEWLINE msg.is_lbr_moving = FalseNEWLINE self.soc.is_lbr_moving = FalseNEWLINE elif (split[0] == "PathFinished"):NEWLINE if (split[1] == "true"):NEWLINE msg.path_finished = TrueNEWLINE else:NEWLINE msg.path_finished = FalseNEWLINE elif (split[0] == "LBRsafetyStop"):NEWLINE if (split[1] == "true"):NEWLINE msg.lbr_safetystop = TrueNEWLINE else:NEWLINE msg.lbr_safetystop = FalseNEWLINE status_publisher.publish(msg)NEWLINENEWLINENEWLINEdef main(argv=sys.argv[1:]):NEWLINE parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)NEWLINE parser.add_argument('-c', '--connection')NEWLINE parser.add_argument('-ro', '--robot')NEWLINE args = parser.parse_args(remove_ros_args(args=argv))NEWLINENEWLINE rclpy.init(args=argv)NEWLINE lbr_statusdata_node = LbrStatusNode(args.connection,args.robot)NEWLINENEWLINE rclpy.spin(lbr_statusdata_node)NEWLINENEWLINENEWLINE try:NEWLINE lbr_statusdata_node.destroy_node()NEWLINE rclpy.shutdown()NEWLINE except:NEWLINE print(cl_red('Error: ') + "rclpy shutdown failed")NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
from __future__ import absolute_importNEWLINEimport os.path as ospNEWLINEimport torchNEWLINENEWLINEfrom PIL import ImageNEWLINENEWLINENEWLINENEWLINENEWLINENEWLINEclass SeqPreprocessor(object):NEWLINE def __init__(self, seqset, dataset, transform=None):NEWLINE super(SeqPreprocessor, self).__init__()NEWLINE self.seqset = seqsetNEWLINE self.identities = dataset.identitiesNEWLINE self.transform = transformNEWLINE self.root = [dataset.images_dir]NEWLINE if dataset.other_dir is not None:NEWLINE self.root.append(dataset.other_dir)NEWLINENEWLINE def __len__(self):NEWLINE return len(self.seqset)NEWLINENEWLINE def __getitem__(self, indices):NEWLINE if isinstance(indices, (tuple, list)):NEWLINE return [self._get_single_item(index) for index in indices]NEWLINE return self._get_single_item(indices)NEWLINENEWLINENEWLINE def _get_single_item(self, index):NEWLINENEWLINE start_ind, end_ind, pid, label, camid = self.seqset[index]NEWLINENEWLINE if len(self.root)==1:NEWLINE seq = []NEWLINE for ind in range(start_ind, end_ind):NEWLINE imgseq = []NEWLINE fname = self.identities[pid][camid][ind]NEWLINE fpath_img = osp.join(self.root[0], fname)NEWLINE imgrgb = Image.open(fpath_img).convert('RGB')NEWLINE imgseq.append(imgrgb)NEWLINE seq = [imgseq]NEWLINENEWLINENEWLINE elif len(self.root)==2:NEWLINE imgseq = []NEWLINE flowseq = []NEWLINE for ind in range(start_ind, end_ind):NEWLINE fname = self.identities[pid][camid][ind]NEWLINE fpath_img = osp.join(self.root[0], fname)NEWLINE imgrgb = Image.open(fpath_img).convert('RGB')NEWLINE fpath_flow = osp.join(self.root[1], fname)NEWLINE flowrgb = Image.open(fpath_flow).convert('RGB')NEWLINE imgseq.append(imgrgb)NEWLINE flowseq.append(flowrgb)NEWLINE seq = [imgseq, flowseq]NEWLINENEWLINE else:NEWLINE raise RuntimeError("The root is not validate")NEWLINENEWLINE #NEWLINE if self.transform is not None:NEWLINE seq = self.transform(seq)NEWLINENEWLINE img_tensor = torch.stack(seq[0], 0)NEWLINENEWLINE if len(self.root) == 2:NEWLINE flow_tensor = torch.stack(seq[1], 0)NEWLINE else:NEWLINE flow_tensor = NoneNEWLINENEWLINE return img_tensor, flow_tensor, label, camid, start_ind, end_indNEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE |
from nose.tools import set_traceNEWLINENEWLINEfrom api.opds import CirculationManagerAnnotatorNEWLINEfrom core.lane import Facets, PaginationNEWLINEfrom core.model import (NEWLINE BaseMaterializedWork,NEWLINE LicensePool,NEWLINE Session,NEWLINE)NEWLINEfrom core.opds import AcquisitionFeedNEWLINENEWLINEclass AdminAnnotator(CirculationManagerAnnotator):NEWLINENEWLINE def __init__(self, circulation, library, test_mode=False):NEWLINE super(AdminAnnotator, self).__init__(circulation, None, library, test_mode=test_mode)NEWLINE self.opds_cache_field = NoneNEWLINENEWLINE def annotate_work_entry(self, work, active_license_pool, edition, identifier, feed, entry):NEWLINENEWLINE super(AdminAnnotator, self).annotate_work_entry(work, active_license_pool, edition, identifier, feed, entry)NEWLINENEWLINE if isinstance(work, BaseMaterializedWork):NEWLINE data_source_name = work.nameNEWLINE else:NEWLINE data_source_name = active_license_pool.data_source.nameNEWLINENEWLINE feed.add_link_to_entry(NEWLINE entry,NEWLINE rel="http://librarysimplified.org/terms/rel/refresh",NEWLINE href=self.url_for(NEWLINE "refresh", data_source=data_source_name,NEWLINE identifier_type=identifier.type,NEWLINE identifier=identifier.identifier, _external=True)NEWLINE )NEWLINENEWLINE if active_license_pool.suppressed:NEWLINE feed.add_link_to_entry(NEWLINE entry,NEWLINE rel="http://librarysimplified.org/terms/rel/restore",NEWLINE href=self.url_for(NEWLINE "unsuppress", data_source=data_source_name,NEWLINE identifier_type=identifier.type,NEWLINE identifier=identifier.identifier, _external=True)NEWLINE )NEWLINE else:NEWLINE feed.add_link_to_entry(NEWLINE entry,NEWLINE rel="http://librarysimplified.org/terms/rel/hide",NEWLINE href=self.url_for(NEWLINE "suppress", data_source=data_source_name,NEWLINE identifier_type=identifier.type,NEWLINE identifier=identifier.identifier, _external=True)NEWLINE )NEWLINENEWLINE feed.add_link_to_entry(NEWLINE entry,NEWLINE rel="edit",NEWLINE href=self.url_for(NEWLINE "edit", data_source=data_source_name,NEWLINE identifier_type=identifier.type,NEWLINE identifier=identifier.identifier, _external=True)NEWLINE )NEWLINE NEWLINE def complaints_url(self, facets, pagination):NEWLINE kwargs = dict(facets.items())NEWLINE kwargs.update(dict(pagination.items()))NEWLINE return self.url_for("complaints", _external=True, **kwargs)NEWLINENEWLINE def suppressed_url(self, pagination):NEWLINE kwargs = dict(pagination.items())NEWLINE return self.url_for("suppressed", _external=True, **kwargs)NEWLINENEWLINE def annotate_feed(self, feed):NEWLINE # Add a 'search' link.NEWLINE search_url = self.url_for(NEWLINE 'lane_search', languages=None,NEWLINE _external=TrueNEWLINE )NEWLINE search_link = dict(NEWLINE rel="search",NEWLINE type="application/opensearchdescription+xml",NEWLINE href=search_urlNEWLINE )NEWLINE feed.add_link_to_feed(feed.feed, **search_link)NEWLINENEWLINENEWLINEclass AdminFeed(AcquisitionFeed):NEWLINENEWLINE @classmethodNEWLINE def complaints(cls, library, title, url, annotator, pagination=None):NEWLINE _db = Session.object_session(library)NEWLINE facets = Facets.default(library)NEWLINE pagination = pagination or Pagination.default()NEWLINENEWLINE q = LicensePool.with_complaint(library)NEWLINE results = pagination.apply(q).all()NEWLINENEWLINE if len(results) > 0:NEWLINE (pools, counts) = zip(*results)NEWLINE else:NEWLINE pools = ()NEWLINENEWLINE works = [pool.work for pool in pools]NEWLINE feed = cls(_db, title, url, works, annotator)NEWLINENEWLINE # Render a 'start' linkNEWLINE top_level_title = annotator.top_level_title()NEWLINE start_uri = annotator.groups_url(None)NEWLINE AdminFeed.add_link_to_feed(feed.feed, href=start_uri, rel="start", title=top_level_title)NEWLINENEWLINE # Render an 'up' link, same as the 'start' link to indicate top-level feedNEWLINE AdminFeed.add_link_to_feed(feed.feed, href=start_uri, rel="up", title=top_level_title)NEWLINENEWLINE if len(works) > 0:NEWLINE # There are works in this list. Add a 'next' link.NEWLINE AdminFeed.add_link_to_feed(feed.feed, rel="next", href=annotator.complaints_url(facets, pagination.next_page))NEWLINENEWLINE if pagination.offset > 0:NEWLINE AdminFeed.add_link_to_feed(feed.feed, rel="first", href=annotator.complaints_url(facets, pagination.first_page))NEWLINENEWLINE previous_page = pagination.previous_pageNEWLINE if previous_page:NEWLINE AdminFeed.add_link_to_feed(feed.feed, rel="previous", href=annotator.complaints_url(facets, previous_page))NEWLINENEWLINE annotator.annotate_feed(feed)NEWLINE return unicode(feed)NEWLINENEWLINE @classmethodNEWLINE def suppressed(cls, _db, title, url, annotator, pagination=None):NEWLINE pagination = pagination or Pagination.default()NEWLINENEWLINE q = _db.query(LicensePool).filter(NEWLINE LicensePool.suppressed == True).order_by(NEWLINE LicensePool.idNEWLINE )NEWLINE pools = pagination.apply(q).all()NEWLINENEWLINE works = [pool.work for pool in pools]NEWLINE feed = cls(_db, title, url, works, annotator)NEWLINENEWLINE # Render a 'start' linkNEWLINE top_level_title = annotator.top_level_title()NEWLINE start_uri = annotator.groups_url(None)NEWLINE AdminFeed.add_link_to_feed(feed.feed, href=start_uri, rel="start", title=top_level_title)NEWLINENEWLINE # Render an 'up' link, same as the 'start' link to indicate top-level feedNEWLINE AdminFeed.add_link_to_feed(feed.feed, href=start_uri, rel="up", title=top_level_title)NEWLINENEWLINE if len(works) > 0:NEWLINE # There are works in this list. Add a 'next' link.NEWLINE AdminFeed.add_link_to_feed(feed.feed, rel="next", href=annotator.suppressed_url(pagination.next_page))NEWLINENEWLINE if pagination.offset > 0:NEWLINE AdminFeed.add_link_to_feed(feed.feed, rel="first", href=annotator.suppressed_url(pagination.first_page))NEWLINENEWLINE previous_page = pagination.previous_pageNEWLINE if previous_page:NEWLINE AdminFeed.add_link_to_feed(feed.feed, rel="previous", href=annotator.suppressed_url(previous_page))NEWLINENEWLINE annotator.annotate_feed(feed)NEWLINE return unicode(feed)NEWLINE NEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""NEWLINECreated on Mon Jun 08 10:17:32 2015NEWLINENEWLINE@author: PacoNEWLINE"""NEWLINENEWLINEfrom api import APINEWLINENEWLINEclass Soundcloud(API):NEWLINENEWLINE _class_name = 'Soundcloud'NEWLINE _category = 'Music'NEWLINE _help_url = 'https://developers.soundcloud.com/docs/api/guide'NEWLINE _api_url = 'http://api.soundcloud.com/'NEWLINENEWLINE def __init__(self,apikey):NEWLINE self._api_key = apikeyNEWLINENEWLINE def _parsing_data(self,data):NEWLINE res = {'title':list(),'downloads':list(),'favorites':list(),'comments':list(),'genre':list(),'duration':list(),'tags':list(),'description':list(),'url':list()}NEWLINE for d in data:NEWLINE res['title'].append(self._tools.key_test('title',d))NEWLINE res['downloads'].append(self._tools.key_test('download_count',d,'int'))NEWLINE res['favorites'].append(self._tools.key_test('favoritings_count',d,'int'))NEWLINE res['comments'].append(self._tools.key_test('comment_count',d,'int'))NEWLINE res['genre'].append(self._tools.key_test('genre',d))NEWLINE res['duration'].append(self._tools.key_test('duration',d))NEWLINE res['tags'].append(self._tools.key_test('tag_list',d,'list'))NEWLINE res['description'].append(self._tools.key_test('description',d))NEWLINE res['url'].append(self._tools.key_test('permalink_url',d))NEWLINE return resNEWLINENEWLINE def _parsing_data2(self,data):NEWLINE res = {'username':list(),'country':list(),'name':list(),'description':list(),'city':list(),'website':list(),'tracks':list(),'followers':list()}NEWLINE for d in data:NEWLINE res['username'].append(self._tools.key_test('username',d))NEWLINE res['country'].append(self._tools.key_test('country',d))NEWLINE res['name'].append(self._tools.key_test('full_name',d))NEWLINE res['description'].append(self._tools.key_test('description',d))NEWLINE res['city'].append(self._tools.key_test('city',d))NEWLINE res['website'].append(self._tools.key_test('website',d))NEWLINE res['tracks'].append(self._tools.key_test('track_count',d,'int'))NEWLINE res['followers'].append(self._tools.key_test('followers_count',d,'int'))NEWLINE return resNEWLINENEWLINE def _parsing_data3(self,data):NEWLINE res = {'name':list(),'tracks':list(),'members':list(),'contributors':list(),'url':list(),'description':list()}NEWLINE for d in data:NEWLINE res['name'].append(self._tools.key_test('name',d))NEWLINE res['tracks'].append(self._tools.key_test('track_count',d,'int'))NEWLINE res['members'].append(self._tools.key_test('members_count',d,'int'))NEWLINE res['contributors'].append(self._tools.key_test('contributors_count',d,'int'))NEWLINE res['url'].append(self._tools.key_test('permalink_url',d))NEWLINE res['description'].append(self._tools.key_test('description',d))NEWLINE return resNEWLINENEWLINE def _parsing_data4(self,data):NEWLINE res = {'id':list(),'text':list()}NEWLINE for d in data:NEWLINE res['id'].append(self._tools.key_test('track_id',d,'int'))NEWLINE res['text'].append(self._tools.key_test('body',d))NEWLINE return resNEWLINENEWLINE def search_tracks(self,text='',limit=10):NEWLINE text = text.replace(' ','+')NEWLINE url = self._api_url+'tracks.json?client_id='+self._api_key+'&q='+text+'&limit='+str(limit)NEWLINE data = self._tools.data_from_url(url)NEWLINE self._increment_nb_call()NEWLINE return self._parsing_data(data)NEWLINENEWLINE def get_infos_track(self,idd=182242225):NEWLINE url = self._api_url+'tracks/'+str(idd)+'?client_id='+self._api_keyNEWLINE data = self._tools.data_from_url(url)NEWLINE self._increment_nb_call()NEWLINE return self._parsing_data(data)NEWLINENEWLINE def get_latest_tracks(self,limit=10):NEWLINE url = self._api_url+'tracks.json?client_id='+self._api_key+'&limit='+str(limit)NEWLINE data = self._tools.data_from_url(url)NEWLINE self._increment_nb_call()NEWLINE return self._parsing_data(data)NEWLINENEWLINE def search_users(self,text='',limit=10):NEWLINE text = text.replace(' ','+')NEWLINE url = self._api_url+'users?client_id='+self._api_key+'&q='+text+'&limit='+str(limit)NEWLINE data = self._tools.data_from_url(url)NEWLINE self._increment_nb_call()NEWLINE return self._parsing_data2(data)NEWLINENEWLINE def search_groups(self,text='',limit=10):NEWLINE text = text.replace(' ','+')NEWLINE url = self._api_url+'groups?client_id='+self._api_key+'&q='+text+'&limit='+str(limit)NEWLINE data = self._tools.data_from_url(url)NEWLINE self._increment_nb_call()NEWLINE return self._parsing_data3(data)NEWLINENEWLINE def get_latest_comments(self,limit=10):NEWLINE url = self._api_url+'comments?client_id='+self._api_key+'&limit='+str(limit)NEWLINE data = self._tools.data_from_url(url)NEWLINE self._increment_nb_call()NEWLINE return self._parsing_data4(data)NEWLINE |
"""NEWLINE# https://code.google.com/p/promisedata/source/browse/#svn%2Ftrunk%2Feffort%2FalbrechtNEWLINENEWLINEStandard header:NEWLINENEWLINE"""NEWLINEfrom __future__ import division,print_functionNEWLINEimport sysNEWLINEsys.dont_write_bytecode = TrueNEWLINEfrom lib import *NEWLINENEWLINE"""NEWLINE@attribute Language numericNEWLINE@attribute Hardware numericNEWLINE@attribute Duration numericNEWLINE@attribute KSLOC numericNEWLINE@attribute AdjFP numericNEWLINE@attribute RAWFP numericNEWLINE@attribute EffortMM numericNEWLINE"""NEWLINENEWLINEdef kemerer(weighFeature = None, NEWLINE split = "variance"):NEWLINE vl=1;l=2;n=3;h=4;vh=5;xh=6;_=0NEWLINE return data(indep= [ NEWLINE # 0..5NEWLINE 'Language','Hardware','Duration','KSLOC','AdjFP','RAWFP'],NEWLINE less = ['Effort'],NEWLINE _rows=[NEWLINE [1,1,17,253.6,1217.1,1010,287],NEWLINE [1,2,7,40.5,507.3,457,82.5],NEWLINE [1,3,15,450,2306.8,2284,1107.31],NEWLINE [1,1,18,214.4,788.5,881,86.9],NEWLINE [1,2,13,449.9,1337.6,1583,336.3],NEWLINE [1,4,5,50,421.3,411,84],NEWLINE [2,4,5,43,99.9,97,23.2],NEWLINE [1,2,11,200,993,998,130.3],NEWLINE [1,1,14,289,1592.9,1554,116],NEWLINE [1,1,5,39,240,250,72],NEWLINE [1,1,13,254.2,1611,1603,258.7],NEWLINE [1,5,31,128.6,789,724,230.7],NEWLINE [1,6,20,161.4,690.9,705,157],NEWLINE [1,1,26,164.8,1347.5,1375,246.9],NEWLINE [3,1,14,60.2,1044.3,976,69.9]NEWLINE ],NEWLINE _tunings =[[NEWLINE # vlow low nom high vhigh xhighNEWLINE #scale factors:NEWLINE 'Prec', 6.20, 4.96, 3.72, 2.48, 1.24, _ ],[NEWLINE 'Flex', 5.07, 4.05, 3.04, 2.03, 1.01, _ ],[NEWLINE 'Resl', 7.07, 5.65, 4.24, 2.83, 1.41, _ ],[NEWLINE 'Pmat', 7.80, 6.24, 4.68, 3.12, 1.56, _ ],[NEWLINE 'Team', 5.48, 4.38, 3.29, 2.19, 1.01, _ ]],NEWLINE weighFeature = weighFeature,NEWLINE _split = split,NEWLINE _isCocomo = FalseNEWLINE )NEWLINENEWLINEdef _kemerer(): print(kemerer()) |
from pomagma.reducer.syntax import ABS, APP, BOT, IVAR, JOIN, NVAR, TOPNEWLINEfrom pomagma.reducer.systems import (System, try_compute_step,NEWLINE try_decide_equal, unfold)NEWLINEfrom pomagma.util.testing import for_eachNEWLINENEWLINEx = NVAR('x')NEWLINEy = NVAR('y')NEWLINEz = NVAR('z')NEWLINEi0 = IVAR(0)NEWLINENEWLINENEWLINE@for_each([NEWLINE (System(x=y, y=y), x, y),NEWLINE (System(x=y, y=y), APP(x, x), APP(y, x)),NEWLINE (System(x=y, y=y), JOIN(x, y), y),NEWLINE (System(x=y, y=y, z=z), JOIN(x, JOIN(y, z)), JOIN(y, z)),NEWLINE])NEWLINEdef test_unfold(system, body, expected):NEWLINE assert unfold(system, body) is expectedNEWLINENEWLINENEWLINE@for_each([NEWLINE System(),NEWLINE System(x=ABS(i0)),NEWLINE System(x=ABS(APP(i0, TOP))),NEWLINE System(x=ABS(APP(i0, BOT))),NEWLINE System(x=ABS(i0), y=ABS(i0)),NEWLINE])NEWLINEdef test_try_compute_step_normal(system):NEWLINE system = system.copy()NEWLINE assert not try_compute_step(system), systemNEWLINENEWLINENEWLINE@for_each([NEWLINE (System(x=x), System(x=x)),NEWLINE (System(x=y, y=x), System(x=x, y=x)),NEWLINE (System(x=x, y=x), System(x=x, y=x)),NEWLINE (System(x=ABS(i0), y=APP(x, x)), System(x=ABS(i0), y=x)),NEWLINE (System(x=ABS(i0), y=x), System(x=ABS(i0), y=ABS(i0))),NEWLINE])NEWLINEdef test_try_compute_step_nonnormal(system, expected):NEWLINE actual = system.copy()NEWLINE assert try_compute_step(actual)NEWLINE assert actual == expectedNEWLINENEWLINENEWLINE@for_each([NEWLINE (System(x=ABS(i0), y=ABS(i0)), x, y, True),NEWLINE (System(x=ABS(i0), y=ABS(APP(i0, TOP))), x, y, False),NEWLINE])NEWLINEdef test_try_decide_equal(system, lhs, rhs, expected):NEWLINE assert try_decide_equal(system, lhs, rhs) is expectedNEWLINE |
import loggingNEWLINEimport mathNEWLINEimport osNEWLINEimport pickleNEWLINEimport sysNEWLINEimport timeNEWLINENEWLINEimport psutilNEWLINENEWLINEfrom .catboost_utils import construct_custom_catboost_metricNEWLINEfrom .hyperparameters.parameters import get_param_baselineNEWLINEfrom .hyperparameters.searchspaces import get_default_searchspaceNEWLINEfrom ..abstract.abstract_model import AbstractModelNEWLINEfrom ...constants import PROBLEM_TYPES_CLASSIFICATION, MULTICLASSNEWLINEfrom ....utils.exceptions import NotEnoughMemoryError, TimeLimitExceededNEWLINEfrom .....try_import import try_import_catboostNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINENEWLINE# TODO: Catboost crashes on multiclass problems where only two classes have significant member count.NEWLINE# Question: Do we turn these into binary classification and then convert to multiclass output in Learner? This would make the most sense.NEWLINE# TODO: Consider having Catboost variant that converts all categoricals to numerical as done in RFModel, was showing improved results in some problems.NEWLINEclass CatboostModel(AbstractModel):NEWLINE def __init__(self, path: str, name: str, problem_type: str, objective_func, stopping_metric=None, num_classes=None, hyperparameters=None, features=None, debug=0, **kwargs):NEWLINE super().__init__(path=path, name=name, problem_type=problem_type, objective_func=objective_func, stopping_metric=stopping_metric, num_classes=num_classes, hyperparameters=hyperparameters, features=features, debug=debug, **kwargs)NEWLINE try_import_catboost()NEWLINE from catboost import CatBoostClassifier, CatBoostRegressorNEWLINE self.model_type = CatBoostClassifier if problem_type in PROBLEM_TYPES_CLASSIFICATION else CatBoostRegressorNEWLINE if isinstance(self.params['eval_metric'], str):NEWLINE self.metric_name = self.params['eval_metric']NEWLINE else:NEWLINE self.metric_name = type(self.params['eval_metric']).__name__NEWLINENEWLINE def _set_default_params(self):NEWLINE default_params = get_param_baseline(problem_type=self.problem_type)NEWLINE for param, val in default_params.items():NEWLINE self._set_default_param_value(param, val)NEWLINE self._set_default_param_value('random_seed', 0) # Remove randomness for reproducibilityNEWLINE self._set_default_param_value('eval_metric', construct_custom_catboost_metric(self.stopping_metric, True, not self.stopping_metric_needs_y_pred, self.problem_type))NEWLINE # Set 'allow_writing_files' to True in order to keep log files created by catboost during training (these will be saved in the directory where AutoGluon stores this model)NEWLINE self._set_default_param_value('allow_writing_files', False) # Disables creation of catboost logging files during training by defaultNEWLINENEWLINE def _get_default_searchspace(self):NEWLINE return get_default_searchspace(self.problem_type, num_classes=self.num_classes)NEWLINENEWLINE def preprocess(self, X):NEWLINE X = super().preprocess(X)NEWLINE categoricals = list(X.select_dtypes(include='category').columns)NEWLINE if categoricals:NEWLINE X = X.copy()NEWLINE for category in categoricals:NEWLINE current_categories = X[category].cat.categoriesNEWLINE if '__NaN__' in current_categories:NEWLINE X[category] = X[category].fillna('__NaN__')NEWLINE else:NEWLINE X[category] = X[category].cat.add_categories('__NaN__').fillna('__NaN__')NEWLINE return XNEWLINENEWLINE # TODO: Use Pool in preprocess, optimize bagging to do Pool.split() to avoid re-computing pool for each fold! Requires stateful + yNEWLINE # Pool is much more memory efficient, avoids copying data twice in memoryNEWLINE def fit(self, X_train, Y_train, X_test=None, Y_test=None, time_limit=None, **kwargs):NEWLINE from catboost import PoolNEWLINE num_rows_train = len(X_train)NEWLINE num_cols_train = len(X_train.columns)NEWLINE if self.problem_type == MULTICLASS:NEWLINE if self.num_classes is not None:NEWLINE num_classes = self.num_classesNEWLINE else:NEWLINE num_classes = 10 # Guess if not given, can do better by looking at y_trainNEWLINE else:NEWLINE num_classes = 1NEWLINENEWLINE # TODO: Add ignore_memory_limits param to disable NotEnoughMemoryError ExceptionsNEWLINE max_memory_usage_ratio = self.params_aux['max_memory_usage_ratio']NEWLINE approx_mem_size_req = num_rows_train * num_cols_train * num_classes / 2 # TODO: Extremely crude approximation, can be vastly improvedNEWLINE if approx_mem_size_req > 1e9: # > 1 GBNEWLINE available_mem = psutil.virtual_memory().availableNEWLINE ratio = approx_mem_size_req / available_memNEWLINE if ratio > (1 * max_memory_usage_ratio):NEWLINE logger.warning('\tWarning: Not enough memory to safely train CatBoost model, roughly requires: %s GB, but only %s GB is available...' % (round(approx_mem_size_req / 1e9, 3), round(available_mem / 1e9, 3)))NEWLINE raise NotEnoughMemoryErrorNEWLINE elif ratio > (0.2 * max_memory_usage_ratio):NEWLINE logger.warning('\tWarning: Potentially not enough memory to safely train CatBoost model, roughly requires: %s GB, but only %s GB is available...' % (round(approx_mem_size_req / 1e9, 3), round(available_mem / 1e9, 3)))NEWLINENEWLINE start_time = time.time()NEWLINE X_train = self.preprocess(X_train)NEWLINE cat_features = list(X_train.select_dtypes(include='category').columns)NEWLINE X_train = Pool(data=X_train, label=Y_train, cat_features=cat_features)NEWLINENEWLINE if X_test is not None:NEWLINE X_test = self.preprocess(X_test)NEWLINE X_test = Pool(data=X_test, label=Y_test, cat_features=cat_features)NEWLINE eval_set = X_testNEWLINE if num_rows_train <= 10000:NEWLINE modifier = 1NEWLINE else:NEWLINE modifier = 10000/num_rows_trainNEWLINE early_stopping_rounds = max(round(modifier*150), 10)NEWLINE num_sample_iter_max = max(round(modifier*50), 2)NEWLINE else:NEWLINE eval_set = NoneNEWLINE early_stopping_rounds = NoneNEWLINE num_sample_iter_max = 50NEWLINENEWLINE invalid_params = ['num_threads', 'num_gpus']NEWLINE for invalid in invalid_params:NEWLINE if invalid in self.params:NEWLINE self.params.pop(invalid)NEWLINE train_dir = NoneNEWLINE if 'allow_writing_files' in self.params and self.params['allow_writing_files']:NEWLINE if 'train_dir' not in self.params:NEWLINE try:NEWLINE # TODO: What if path is in S3?NEWLINE os.makedirs(os.path.dirname(self.path), exist_ok=True)NEWLINE except:NEWLINE passNEWLINE else:NEWLINE train_dir = self.path + 'catboost_info'NEWLINE logger.log(15, f'\tCatboost model hyperparameters: {self.params}')NEWLINENEWLINE # TODO: Add more control over these params (specifically early_stopping_rounds)NEWLINE verbosity = kwargs.get('verbosity', 2)NEWLINE if verbosity <= 1:NEWLINE verbose = FalseNEWLINE elif verbosity == 2:NEWLINE verbose = FalseNEWLINE elif verbosity == 3:NEWLINE verbose = 20NEWLINE else:NEWLINE verbose = TrueNEWLINENEWLINE init_model = NoneNEWLINE init_model_tree_count = NoneNEWLINE init_model_best_iteration = NoneNEWLINE init_model_best_score = NoneNEWLINENEWLINE params = self.params.copy()NEWLINE num_features = len(self.features)NEWLINE if self.problem_type == MULTICLASS and 'rsm' not in params and 'colsample_bylevel' not in params and num_features > 1000:NEWLINE if time_limit:NEWLINE # Reduce sample iterations to avoid taking unreasonable amounts of timeNEWLINE num_sample_iter_max = max(round(num_sample_iter_max/2), 2)NEWLINE # Subsample columns to speed up trainingNEWLINE params['colsample_bylevel'] = max(min(1.0, 1000 / num_features), 0.05)NEWLINE logger.log(30, f'\tMany features detected ({num_features}), dynamically setting \'colsample_bylevel\' to {params["colsample_bylevel"]} to speed up training (Default = 1).')NEWLINE logger.log(30, f'\tTo disable this functionality, explicitly specify \'colsample_bylevel\' in the model hyperparameters.')NEWLINENEWLINE if time_limit:NEWLINE time_left_start = time_limit - (time.time() - start_time)NEWLINE if time_left_start <= time_limit * 0.4: # if 60% of time was spent preprocessing, likely not enough time to train modelNEWLINE raise TimeLimitExceededNEWLINE params_init = params.copy()NEWLINE num_sample_iter = min(num_sample_iter_max, params_init['iterations'])NEWLINE params_init['iterations'] = num_sample_iterNEWLINE if train_dir is not None:NEWLINE params_init['train_dir'] = train_dirNEWLINE self.model = self.model_type(NEWLINE **params_init,NEWLINE )NEWLINE self.model.fit(NEWLINE X_train,NEWLINE eval_set=eval_set,NEWLINE use_best_model=True,NEWLINE verbose=verbose,NEWLINE # early_stopping_rounds=early_stopping_rounds,NEWLINE )NEWLINENEWLINE init_model_tree_count = self.model.tree_count_NEWLINE init_model_best_iteration = self.model.get_best_iteration()NEWLINE init_model_best_score = self.model.get_best_score()['validation'][self.metric_name]NEWLINENEWLINE time_left_end = time_limit - (time.time() - start_time)NEWLINE time_taken_per_iter = (time_left_start - time_left_end) / num_sample_iterNEWLINE estimated_iters_in_time = round(time_left_end / time_taken_per_iter)NEWLINE init_model = self.modelNEWLINENEWLINE params_final = params.copy()NEWLINENEWLINE # TODO: This only handles memory with time_limits specified, but not with time_limits=None, handle when time_limits=NoneNEWLINE available_mem = psutil.virtual_memory().availableNEWLINE model_size_bytes = sys.getsizeof(pickle.dumps(self.model))NEWLINENEWLINE max_memory_proportion = 0.3 * max_memory_usage_ratioNEWLINE mem_usage_per_iter = model_size_bytes / num_sample_iterNEWLINE max_memory_iters = math.floor(available_mem * max_memory_proportion / mem_usage_per_iter)NEWLINENEWLINE params_final['iterations'] = min(params['iterations'] - num_sample_iter, estimated_iters_in_time)NEWLINE if params_final['iterations'] > max_memory_iters - num_sample_iter:NEWLINE if max_memory_iters - num_sample_iter <= 500:NEWLINE logger.warning('\tWarning: CatBoost will be early stopped due to lack of memory, increase memory to enable full quality models, max training iterations changed to %s from %s' % (max_memory_iters - num_sample_iter, params_final['iterations']))NEWLINE params_final['iterations'] = max_memory_iters - num_sample_iterNEWLINE else:NEWLINE params_final = params.copy()NEWLINENEWLINE if train_dir is not None:NEWLINE params_final['train_dir'] = train_dirNEWLINE if params_final['iterations'] > 0:NEWLINE self.model = self.model_type(NEWLINE **params_final,NEWLINE )NEWLINENEWLINE # TODO: Strangely, this performs different if clone init_model is sent in than if trained for same total number of iterations. May be able to optimize catboost models further with thisNEWLINE self.model.fit(NEWLINE X_train,NEWLINE eval_set=eval_set,NEWLINE verbose=verbose,NEWLINE early_stopping_rounds=early_stopping_rounds,NEWLINE # use_best_model=True,NEWLINE init_model=init_model,NEWLINE )NEWLINENEWLINE if init_model is not None:NEWLINE final_model_best_score = self.model.get_best_score()['validation'][self.metric_name]NEWLINE if self.stopping_metric._optimum > final_model_best_score:NEWLINE if final_model_best_score > init_model_best_score:NEWLINE best_iteration = init_model_tree_count + self.model.get_best_iteration()NEWLINE else:NEWLINE best_iteration = init_model_best_iterationNEWLINE else:NEWLINE if final_model_best_score < init_model_best_score:NEWLINE best_iteration = init_model_tree_count + self.model.get_best_iteration()NEWLINE else:NEWLINE best_iteration = init_model_best_iterationNEWLINENEWLINE self.model.shrink(ntree_start=0, ntree_end=best_iteration+1)NEWLINENEWLINE self.params_trained['iterations'] = self.model.tree_count_NEWLINENEWLINE def get_model_feature_importance(self):NEWLINE importance_df = self.model.get_feature_importance(prettified=True)NEWLINE importance_df['Importances'] = importance_df['Importances'] / 100NEWLINE importance_series = importance_df.set_index('Feature Id')['Importances']NEWLINE importance_dict = importance_series.to_dict()NEWLINE return importance_dictNEWLINE |
"""Objects, functions and constants relating to OCP bounds.NEWLINENEWLINEAttributesNEWLINE----------NEWLINEDEFAULT_ASSUME_INF_BOUNDS : boolNEWLINE Default as to whether Pycollo should treat unspecified bounds as beingNEWLINE numerically infinite.NEWLINEDEFAULT_INF_VALUE : floatNEWLINE Default numerical value for when Pycollo needs to use a finite numericalNEWLINE approximation for infinity.NEWLINENEWLINE"""NEWLINENEWLINENEWLINE__all__ = ["EndpointBounds", "PhaseBounds"]NEWLINENEWLINENEWLINEfrom abc import (ABC, abstractmethod)NEWLINEfrom collections import namedtupleNEWLINEfrom numbers import NumberNEWLINEfrom typing import (Iterable, Optional, Union)NEWLINENEWLINEimport numpy as npNEWLINEimport scipy.optimize as optimizeNEWLINEimport sympy as symNEWLINENEWLINEfrom .node import NodeNEWLINEfrom .typing import OptionalBoundsTypeNEWLINEfrom .utils import (fast_sympify,NEWLINE format_for_output,NEWLINE SUPPORTED_ITER_TYPES,NEWLINE symbol_primitives,NEWLINE )NEWLINENEWLINENEWLINE# Default values for settingsNEWLINEDEFAULT_ASSUME_INF_BOUNDS = TrueNEWLINEDEFAULT_BOUND_CLASH_ABSOLUTE_TOLERANCE = 1e-6NEWLINEDEFAULT_BOUND_CLASH_RELATIVE_TOLERANCE = 1e-6NEWLINEDEFAULT_NUMERICAL_INF = 10e19NEWLINEDEFAULT_OVERRIDE_ENDPOINTS = TrueNEWLINEDEFAULT_REMOVE_CONSTANT_VARIABLES = TrueNEWLINENEWLINENEWLINE# Data structuresNEWLINEphase_info_fields = ("name", "index", "backend")NEWLINEPhaseInfo = namedtuple("PhaseInfo", phase_info_fields)NEWLINE"""Data structure for information about OCP phases.NEWLINENEWLINEThese are mostly used to format descriptive error messages for the user.NEWLINENEWLINEFieldsNEWLINE------NEWLINEname : strNEWLINE The name associated with the phaseNEWLINEindex : intNEWLINE The index of the phase.NEWLINEbackend : :py:class:`PycolloPhaseData`NEWLINE The phase backend associated with the specified OCP phase.NEWLINENEWLINE"""NEWLINENEWLINEbounds_info_fields = ("user_bnds", "user_syms", "bnds_type", "num",NEWLINE "is_variable", "none_default_allowed")NEWLINEBoundsInfo = namedtuple("BoundsInfo",NEWLINE bounds_info_fields,NEWLINE defaults=[True, True])NEWLINE"""Data structure for storing information about user-supplied bounds.NEWLINENEWLINEFieldsNEWLINE------NEWLINEuser_bnds : objNEWLINE The bounds that the user has supplied.NEWLINEuser_syms : Iterable[sym.Symbols]NEWLINE An iterable of symbols relating to the user-supplied bounds (if available).NEWLINEbnds_type : strNEWLINE String indentifying the aspect of the OCP that the bounds relate to. MostlyNEWLINE used for formatting descriptive error messages for the user.NEWLINEnum : intNEWLINE The number of variables/constraints that should be expected for the type ofNEWLINE bounds in question.NEWLINEis_variable : boolNEWLINE `True` if the bound type in question is a variable, `False` if it is aNEWLINE constraint.NEWLINEnone_default_allowed : boolNEWLINE `True` if Pycollo should automatically handle the situation where no boundsNEWLINE have been supplied. `False` if an error should be raised.NEWLINENEWLINE"""NEWLINENEWLINENEWLINEclass BoundsABC(ABC):NEWLINENEWLINE @abstractmethodNEWLINE def optimal_control_problem(self):NEWLINE passNEWLINENEWLINE @abstractmethodNEWLINE def _process_and_check_user_values(self):NEWLINE passNEWLINENEWLINE @abstractmethodNEWLINE def _required_variable_bounds(self):NEWLINE passNEWLINENEWLINENEWLINEclass EndpointBounds(BoundsABC):NEWLINENEWLINE def __init__(self,NEWLINE optimal_control_problem,NEWLINE *,NEWLINE parameter_variables: OptionalBoundsType = None,NEWLINE endpoint_constraints: OptionalBoundsType = None,NEWLINE ):NEWLINENEWLINE self.ocp = optimal_control_problemNEWLINE self.parameter_variables = parameter_variablesNEWLINE self.endpoint_constraints = endpoint_constraintsNEWLINENEWLINE @propertyNEWLINE def optimal_control_problem(self):NEWLINE return self.ocpNEWLINENEWLINE def _process_and_check_user_values(self):NEWLINE self._backend = self.optimal_control_problem._backendNEWLINE self._INF = self.optimal_control_problem.settings.numerical_infNEWLINE self._process_parameter_vars()NEWLINE self._process_endpoint_cons()NEWLINENEWLINE def _process_parameter_vars(self):NEWLINE user_bnds = self.parameter_variablesNEWLINE user_syms = self._backend.s_var_userNEWLINE bnds_type = "parameter variable"NEWLINE num_expected = self._backend.num_s_var_fullNEWLINE bnds_info = BoundsInfo(user_bnds, user_syms, bnds_type, num_expected)NEWLINE self._s_bnd, self._s_needed = process_single_type_of_values(self,NEWLINE bnds_info)NEWLINENEWLINE def _process_endpoint_cons(self):NEWLINE num_b_con = self.optimal_control_problem.number_endpoint_constraintsNEWLINE user_bnds = self.endpoint_constraintsNEWLINE user_syms = [None] * num_b_conNEWLINE bnds_type = "endpoint constraints"NEWLINE num_expect = num_b_conNEWLINE bnds_info = BoundsInfo(user_bnds, user_syms, bnds_type, num_expect,NEWLINE False)NEWLINE self._b_con_bnd, needed = process_single_type_of_values(self,NEWLINE bnds_info)NEWLINENEWLINE def _required_variable_bounds(self):NEWLINE x_bnd = self._s_bnd[self._s_needed]NEWLINE return x_bndNEWLINENEWLINENEWLINEclass PhaseBounds(BoundsABC):NEWLINE """Bounds on variables and constraints associated with a phase.NEWLINENEWLINE This class currently behaves like a data class, however additionalNEWLINE functionality will be added in the future to support robust checking of theNEWLINE user-supplied values for the bounds.NEWLINENEWLINE Intended behaviour will be::NEWLINENEWLINE * None values will be treated as no bounds, i.e. ['-inf', 'inf'].NEWLINE * Single values will be treated as equal lower and upper bounds.NEWLINE * Mappings will be accepted for `state_variables`, `control_variables`,NEWLINE `initial_state_constraints` and `final_state_constraints`.NEWLINE * Keys in the mappings should be the strings of the correspondingNEWLINE `state_variables` or `control_variables` for the phase.NEWLINE * 'inf' values will be replaced by a large floating point value so thatNEWLINE scaling can be done automatically.NEWLINE * The 'inf' replacement value can be changed inNEWLINE `OptimalControlProblem.settings.numerical_inf`, the default is 1e19.NEWLINE * If a :obj:`np.ndarray` with size = (2, 2) is passed as a value thenNEWLINE the first dimension will be treated as corresponding to theNEWLINE variable or constraint to be bounded.NEWLINE * If iterables are passed then they may contain a combination of None,NEWLINE single numerical values, and pairs of numerical valuesNEWLINE * Symbolic expressions should also be allowed if they can be convertedNEWLINE into numerical values when processed alongside auxiliary data.NEWLINENEWLINE NotesNEWLINE -----NEWLINE * 'inf' values should be avoided where possible in order to give betterNEWLINE automatic scaling.NEWLINENEWLINE AttributesNEWLINE ----------NEWLINE phaseNEWLINE The phase with which these bounds will be associated. Default value isNEWLINE `None`.NEWLINE initial_timeNEWLINE Bounds on when the phase starts. Default value is `None`.NEWLINE final_timeNEWLINE Bounds on when the phase ends. Default value is `None`.NEWLINE state_variables:NEWLINE Bounds on the phase's state variables. Default value is `None`.NEWLINE control_variablesNEWLINE Bounds on the phase's control variables. Default value is `None`.NEWLINE integral_variablesNEWLINE Bounds on the phase's integral variables. Default value is `None`.NEWLINE path_constraintsNEWLINE Bounds on the phase's path constraints. Default value is `None`.NEWLINE initial_state_constraintsNEWLINE Bounds on the phase's state variables at the initial time. DefaultNEWLINE value is `None`.NEWLINE final_state_constraintsNEWLINE Bounds on the phase's state variables at the final time. Default valueNEWLINE is `None`.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE phase: "Phase",NEWLINE *,NEWLINE initial_time: Optional[float] = None,NEWLINE final_time: Optional[float] = None,NEWLINE state_variables: OptionalBoundsType = None,NEWLINE control_variables: OptionalBoundsType = None,NEWLINE integral_variables: OptionalBoundsType = None,NEWLINE path_constraints: OptionalBoundsType = None,NEWLINE initial_state_constraints: OptionalBoundsType = None,NEWLINE final_state_constraints: OptionalBoundsType = None,NEWLINE ):NEWLINE """Bounds on variables and constraints associated with a phase.NEWLINENEWLINE ArgsNEWLINE ----NEWLINE phaseNEWLINE The phase with which these bounds will be associated.NEWLINE initial_timeNEWLINE Bounds on when the phase starts. Default value is `None`.NEWLINE final_timeNEWLINE Bounds on when the phase ends. Default value is `None`.NEWLINE state_variablesNEWLINE Bounds on the phase's state variables. Default value is `None`.NEWLINE control_variablesNEWLINE Bounds on the phase's control variables. Default value is `None`.NEWLINE integral_variablesNEWLINE Bounds on the phase's integral variables. Default value is `None`.NEWLINE path_constraintsNEWLINE Bounds on the phase's path constraints. Default value is `None`.NEWLINE initial_state_constraintsNEWLINE Bounds on the phase's state variables at the initial time. DefaultNEWLINE value is `None`.NEWLINE final_state_constraintsNEWLINE Bounds on the phase's state variables at the final time. DefaultNEWLINE value is `None`.NEWLINE """NEWLINE self.ocp = phase.optimal_control_problemNEWLINE self.phase = phaseNEWLINE self.initial_time = initial_timeNEWLINE self.final_time = final_timeNEWLINE self.state_variables = state_variablesNEWLINE self.control_variables = control_variablesNEWLINE self.integral_variables = integral_variablesNEWLINE self.path_constraints = path_constraintsNEWLINE self.initial_state_constraints = initial_state_constraintsNEWLINE self.final_state_constraints = final_state_constraintsNEWLINENEWLINE @propertyNEWLINE def optimal_control_problem(self):NEWLINE return self.phase.optimal_control_problemNEWLINENEWLINE def _process_and_check_user_values(self, phase_backend):NEWLINE self._backend = phase_backendNEWLINE self._INF = self.optimal_control_problem.settings.numerical_infNEWLINE p_info = self._get_phase_info(phase_backend)NEWLINE self._process_state_vars(p_info)NEWLINE self._process_control_vars(p_info)NEWLINE self._process_integral_vars(p_info)NEWLINE self._process_time_vars(p_info)NEWLINE self._process_path_cons(p_info)NEWLINE self._process_initial_state_cons(p_info)NEWLINE self._process_final_state_cons(p_info)NEWLINENEWLINE def _get_phase_info(self, phase_backend):NEWLINE phase_name = phase_backend.ocp_phase.nameNEWLINE phase_index = phase_backend.ocp_phase.phase_numberNEWLINE phase_info = PhaseInfo(phase_name, phase_index, phase_backend)NEWLINE return phase_infoNEWLINENEWLINE def _process_state_vars(self, p_info):NEWLINE user_bnds = self.state_variablesNEWLINE user_syms = p_info.backend.y_var_userNEWLINE bnds_type = "state variable"NEWLINE num_expect = p_info.backend.num_y_var_fullNEWLINE bnds_info = BoundsInfo(user_bnds, user_syms, bnds_type, num_expect)NEWLINE self._y_bnd, self._y_needed = process_single_type_of_values(self,NEWLINE bnds_info,NEWLINE p_info)NEWLINENEWLINE def _process_control_vars(self, p_info):NEWLINE user_bnd = self.control_variablesNEWLINE user_sym = p_info.backend.u_var_userNEWLINE bnd_type = "control variable"NEWLINE num_expect = p_info.backend.num_u_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect)NEWLINE self._u_bnd, self._u_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINENEWLINE def _process_integral_vars(self, p_info):NEWLINE user_bnd = self.integral_variablesNEWLINE user_sym = p_info.backend.q_var_userNEWLINE bnd_type = "integral variable"NEWLINE num_expect = p_info.backend.num_q_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect)NEWLINE self._q_bnd, self._q_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINENEWLINE def _process_path_cons(self, p_info):NEWLINE user_bnd = self.path_constraintsNEWLINE user_sym = [None] * p_info.backend.num_p_conNEWLINE bnd_type = "path constraints"NEWLINE num_expect = p_info.backend.num_p_conNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect, False)NEWLINE self._p_con_bnd, needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINENEWLINE def _process_time_vars(self, p_info):NEWLINE user_bnd = [self.initial_time, self.final_time]NEWLINE user_sym = p_info.backend.t_var_userNEWLINE bnd_type = "time variable"NEWLINE num_expect = p_info.backend.num_t_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect)NEWLINE self._t_bnd, self._t_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINE self._check_time_bounds_error((0, 0), (1, 0), p_info)NEWLINE self._check_time_bounds_error((0, 1), (1, 1), p_info)NEWLINENEWLINE def _check_time_bounds_error(self, i_1, i_2, p_info):NEWLINE arg_1 = self._t_bnd[i_1]NEWLINE arg_2 = self._t_bnd[i_2]NEWLINE if arg_1 > arg_2:NEWLINE self._raise_time_bounds_error(i_1, i_2, arg_1, arg_2, p_info)NEWLINENEWLINE def _raise_time_bounds_error(self, i_1, i_2, bnd_1, bnd_2, p_info):NEWLINE bnd_1_t0_or_tF = "initial" if i_1[0] == 0 else "final"NEWLINE bnd_1_lower_or_upper = "lower" if i_1[1] == 0 else "upper"NEWLINE bnd_2_t0_or_tF = "initial" if i_2[0] == 0 else "final"NEWLINE bnd_2_lower_or_upper = "lower" if i_2[1] == 0 else "upper"NEWLINE msg = (f"The {bnd_2_lower_or_upper} bound for the {bnd_2_t0_or_tF} "NEWLINE f"time ('{bnd_2}') must be greater than the "NEWLINE f"{bnd_1_lower_or_upper} bound for the {bnd_1_t0_or_tF} time "NEWLINE f"('{bnd_1}') in phase {p_info.name} (index #{p_info.index}).")NEWLINE raise ValueError(msg)NEWLINENEWLINE def _process_initial_state_cons(self, p_info):NEWLINE user_bnd = self.initial_state_constraintsNEWLINE user_sym = p_info.backend.y_var_userNEWLINE bnd_type = "initial state constraint"NEWLINE num_expect = p_info.backend.num_y_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect, False)NEWLINE y_t0_bnd, self._y_t0_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINE if self.ocp.settings.override_endpoint_bounds:NEWLINE y_t0_bnd = self._override_endpoint_bounds(y_t0_bnd)NEWLINE self._y_t0_bnd = y_t0_bndNEWLINENEWLINE def _process_final_state_cons(self, p_info):NEWLINE user_bnd = self.final_state_constraintsNEWLINE user_sym = p_info.backend.y_var_userNEWLINE bnd_type = "final state constraint"NEWLINE num_expect = p_info.backend.num_y_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect, False)NEWLINE y_tF_bnd, self._y_tF_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINE if self.ocp.settings.override_endpoint_bounds:NEWLINE y_tF_bnd = self._override_endpoint_bounds(y_tF_bnd)NEWLINE self._y_tF_bnd = y_tF_bndNEWLINENEWLINE def _override_endpoint_bounds(self, y_con_bnd):NEWLINE settings = self.ocp.settingsNEWLINE override = settings.override_endpoint_boundsNEWLINE lower_is_less = y_con_bnd[:, 0] < self._y_bnd[:, 0]NEWLINE if not override and np.any(lower_is_less):NEWLINE msg = (f"")NEWLINE raise ValueError(msg)NEWLINE y_con_bnd[lower_is_less, 0] = self._y_bnd[lower_is_less, 0]NEWLINE upper_is_more = y_con_bnd[:, 1] > self._y_bnd[:, 1]NEWLINE if not override and np.any(upper_is_more):NEWLINE msg = (f"")NEWLINE raise ValueError(msg)NEWLINE y_con_bnd[upper_is_more, 1] = self._y_bnd[upper_is_more, 1]NEWLINE return y_con_bndNEWLINENEWLINE # def _process_potential_dual_value_to_single_value(self, bnd_info, p_info):NEWLINE # bnd = bnd_info.user_bndNEWLINE # msg = (f"Single bounds in this form ('{bnd}') are not supported.")NEWLINE # is_list = isinstance(bnd, SUPPORTED_ITER_TYPES)NEWLINE # if not is_list:NEWLINE # raise TypeError(msg)NEWLINE # is_len_2 = len(bnd) == 2NEWLINE # if not is_len_2:NEWLINE # raise ValueError(msg)NEWLINE # is_pair_same = bnd[0] == bnd[1]NEWLINE # if not is_pair_same:NEWLINE # raise ValueError(msg)NEWLINE # bnd = bnd[0]NEWLINE # bnd_info = bnd_info._replace(user_bnds=bnd)NEWLINE # return bnd_infoNEWLINENEWLINE def _required_variable_bounds(self):NEWLINE y_bnd = self._y_bnd[self._y_needed]NEWLINE u_bnd = self._u_bnd[self._u_needed]NEWLINE q_bnd = self._q_bnd[self._q_needed]NEWLINE t_bnd = self._t_bnd[self._t_needed]NEWLINE x_bnd = np.vstack([y_bnd, u_bnd, q_bnd, t_bnd])NEWLINE return x_bndNEWLINENEWLINENEWLINEclass Bounds:NEWLINENEWLINE def __init__(self, ocp_backend):NEWLINE self.ocp_backend = ocp_backendNEWLINE self.process_and_check_user_values()NEWLINE self.collect_required_variable_bounds()NEWLINE self.collect_required_state_variable_endpoint_bounds()NEWLINE self.collect_constraint_bounds()NEWLINE self.add_unrequired_variables_to_auxiliary_data()NEWLINENEWLINE def process_and_check_user_values(self):NEWLINE for p in self.ocp_backend.p:NEWLINE p.ocp_phase.bounds._process_and_check_user_values(p)NEWLINE self.ocp_backend.ocp.bounds._process_and_check_user_values()NEWLINENEWLINE def collect_required_variable_bounds(self):NEWLINE x_bnd = []NEWLINE for p in self.ocp_backend.p:NEWLINE p_bnds = p.ocp_phase.boundsNEWLINE x_bnd.append(p_bnds._required_variable_bounds())NEWLINE x_bnd.append(self.ocp_backend.ocp.bounds._required_variable_bounds())NEWLINE self.x_bnd = np.vstack(x_bnd)NEWLINENEWLINE def collect_required_state_variable_endpoint_bounds(self):NEWLINE y_t0_bnd = []NEWLINE y_tF_bnd = []NEWLINE for p in self.ocp_backend.p:NEWLINE p_bnd = p.ocp_phase.boundsNEWLINE y_t0_bnd.append(p_bnd._y_t0_bnd[p_bnd._y_needed])NEWLINE y_tF_bnd.append(p_bnd._y_tF_bnd[p_bnd._y_needed])NEWLINE self.y_t0_bnd = np.vstack(y_t0_bnd)NEWLINE self.y_tF_bnd = np.vstack(y_tF_bnd)NEWLINENEWLINE @propertyNEWLINE def x_bnd_lower(self):NEWLINE return self.x_bnd[:, 0]NEWLINENEWLINE @propertyNEWLINE def x_bnd_upper(self):NEWLINE return self.x_bnd[:, 1]NEWLINENEWLINE def collect_constraint_bounds(self):NEWLINE passNEWLINENEWLINE def add_unrequired_variables_to_auxiliary_data(self):NEWLINE self.aux_data = {}NEWLINE for p in self.ocp_backend.p:NEWLINE p_bnd = p.ocp_phase.boundsNEWLINE self.aux_data.update({y: np.mean(value) NEWLINE for y, y_needed, value in zip(NEWLINE p.y_var_full, p_bnd._y_needed, p_bnd._y_bnd) NEWLINE if not y_needed})NEWLINE self.aux_data.update({u: np.mean(value) NEWLINE for u, u_needed, value in zip(NEWLINE p.u_var_full, p_bnd._u_needed, p_bnd._u_bnd) NEWLINE if not u_needed})NEWLINE self.aux_data.update({q: np.mean(value) NEWLINE for q, q_needed, value in zip(NEWLINE p.q_var_full, p_bnd._q_needed, p_bnd._q_bnd) NEWLINE if not q_needed})NEWLINE self.aux_data.update({t: np.mean(value) NEWLINE for t, t_needed, value in zip(NEWLINE p.t_var_full, p_bnd._t_needed, p_bnd._t_bnd) NEWLINE if not t_needed})NEWLINE prob_bnd = self.ocp_backend.ocp.boundsNEWLINE self.aux_data.update({s: np.mean(value) NEWLINE for s, s_needed, value in zip(NEWLINE self.ocp_backend.s_var_full, prob_bnd._s_needed, prob_bnd._s_bnd) NEWLINE if not s_needed})NEWLINENEWLINENEWLINE"""NEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE"""NEWLINENEWLINENEWLINEdef process_single_type_of_values(bnds_obj, bnds_info, p_info=None):NEWLINE """Given a `BoundsInfo` object, process and determine if needed.NEWLINENEWLINE Bounds can either be passed by the user as:NEWLINE * a dictionary with the keys as the OCP symbols and the values as theNEWLINE bounds;NEWLINE * no bounds via the use of `None`; orNEWLINE * an iterable of supported type (e.g. tuple, list, np.ndarray) providedNEWLINE that the first dimension is the number of variables/constraints ofNEWLINE that type and the second dimension is either 1 or 2 (depending onNEWLINE the circumstance).NEWLINENEWLINE Note that some forms of bounds are not supported for specific types ofNEWLINE bounds.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE Returns:NEWLINE --------NEWLINE `tuple`NEWLINE Of length 2 with the first item being a :py:class:`ndarray <numpy>` withNEWLINE the correctly formatted bounds and the second item being anotherNEWLINE :py:class:`ndarray <numpy>` of type `bool` stating whether the boundsNEWLINE are needed (i.e. have they been determined to be equal in upper andNEWLINE lower bound so that Pycollo can remove them from the OCP and insteadNEWLINE treat them as variables).NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds supplied by the user are of a type that cannot be handledNEWLINE by Pycollo.NEWLINENEWLINE """NEWLINE if isinstance(bnds_info.user_bnds, dict):NEWLINE bnds = process_mapping_bounds_instance(bnds_obj, bnds_info, p_info)NEWLINE elif bnds_info.user_bnds is None:NEWLINE bnds = process_none_bounds_instance(bnds_obj, bnds_info, p_info)NEWLINE elif isinstance(bnds_info.user_bnds, SUPPORTED_ITER_TYPES):NEWLINE bnds = process_iterable_bounds_instance(bnds_obj, bnds_info, p_info)NEWLINE else:NEWLINE formatted_valid_types = format_for_output(SUPPORTED_ITER_TYPES)NEWLINE msg = (f"Bounds for {bnds_info.bnds_type} cannot be supplied as a "NEWLINE f"{type(bnds_info.user_bnds)}, use one of: "NEWLINE f"{formatted_valid_types}")NEWLINE raise TypeError(msg)NEWLINE bnds, needed = check_lower_against_upper(bnds_obj, bnds, bnds_info, p_info)NEWLINE return bnds, neededNEWLINENEWLINENEWLINEdef process_mapping_bounds_instance(bnds_obj, bnds_info, p_info):NEWLINE """Used to process bounds supplied by the user as a `dict`.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE listNEWLINE A list of lists with the outer length equal to the number of expectedNEWLINE bounds and the inner lengths all equal to 2.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds type is not supported for use of dictionary because thereNEWLINE aren't symbols associated with every variable/constraint of that type.NEWLINENEWLINE """NEWLINE if any(user_sym is None for user_sym in bnds_info.user_syms):NEWLINE msg = f"Can't use mapping for {bnds_info.bnds_type} bounds."NEWLINE raise TypeError(msg)NEWLINE bnds = []NEWLINE for bnd_i, user_sym in enumerate(bnds_info.user_syms):NEWLINE bnd = bnds_info.user_bnds.get(user_sym)NEWLINE bnd_info = BoundsInfo(bnd, user_sym, bnds_info.bnds_type, bnd_i)NEWLINE check_user_bound_missing(bnds_obj, bnd_info, p_info)NEWLINE bnd = as_lower_upper_pair(bnds_obj, bnd_info, p_info)NEWLINE bnds.append(bnd)NEWLINE return bndsNEWLINENEWLINENEWLINEdef check_user_bound_missing(bnds_obj, bnds_info, p_info):NEWLINE """Check if any user-supplied bounds for a specific type are missing.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If there are bounds that need to be supplied by aren't.NEWLINENEWLINE """NEWLINE is_bnd_none = bnds_info.user_bnds is NoneNEWLINE is_inf_assumed = bnds_obj.ocp.settings.assume_inf_boundsNEWLINE if is_bnd_none and not is_inf_assumed:NEWLINE msg = (f"No bounds have been supplied for the {bnds_info.bnds_type} "NEWLINE f"'{bnds_info.user_syms}' (index #{bnds_info.num}).")NEWLINE raise ValueError(msg)NEWLINENEWLINENEWLINEdef process_iterable_bounds_instance(bnds_obj, bnds_info, p_info):NEWLINE """Used to process bounds supplied by the user as a `dict`.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE listNEWLINE A list of lists with the outer length equal to the number of expectedNEWLINE bounds and the inner lengths all equal to 2.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds type is not supported for use of dictionary because thereNEWLINE aren't symbols associated with every variable/constraint of that type.NEWLINENEWLINE """NEWLINE supported_iter = isinstance(bnds_info.user_bnds[0], SUPPORTED_ITER_TYPES)NEWLINE if bnds_info.num == 1 and not supported_iter:NEWLINE bnds_info = bnds_info._replace(user_bnds=[bnds_info.user_bnds])NEWLINE bnds = []NEWLINE for bnd_i, bnd in enumerate(bnds_info.user_bnds):NEWLINE bnd_info = BoundsInfo(bnd, None, bnds_info.bnds_type, bnd_i)NEWLINE check_user_bound_missing(bnds_obj, bnd_info, p_info)NEWLINE bnd = as_lower_upper_pair(bnds_obj, bnd_info, p_info)NEWLINE bnds.append(bnd)NEWLINE return bndsNEWLINENEWLINENEWLINEdef process_none_bounds_instance(bnds_obj, bnds_info, p_info):NEWLINE """Used to process bounds supplied by the user as a `dict`.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE listNEWLINE A list of lists with the outer length equal to the number of expectedNEWLINE bounds and the inner lengths all equal to 2.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds type is not supported for use of dictionary because thereNEWLINE aren't symbols associated with every variable/constraint of that type.NEWLINENEWLINE """NEWLINE bnds = []NEWLINE for bnd_i, user_sym in enumerate(bnds_info.user_syms):NEWLINE bnd = NoneNEWLINE bnd_info = BoundsInfo(bnd, user_sym, bnds_info.bnds_type, bnd_i)NEWLINE check_user_bound_missing(bnds_obj, bnd_info, p_info)NEWLINE bnd = as_lower_upper_pair(bnds_obj, bnd_info, p_info)NEWLINE bnds.append(bnd)NEWLINE return bndsNEWLINENEWLINENEWLINEdef as_lower_upper_pair(bnds_obj, bnds_info, p_info):NEWLINE """Get the user-supplied bounds as a lower-upper pair of numeric values.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE `list`NEWLINE Pair of bounds as a lower bound (first) and an upper bound (second) inNEWLINE a `list`.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If the flattened user-supplied bounds are not either shape (1, ) orNEWLINE (2, ).NEWLINENEWLINE """NEWLINE bnds = np.array(bnds_info.user_bnds).flatten()NEWLINE if bnds.shape == (1, ):NEWLINE both = "lower and upper bounds"NEWLINE both_info = bnds_info._replace(user_bnds=bnds[0])NEWLINE lower_bnd = get_bound_as_number(bnds_obj, both_info, both, p_info)NEWLINE upper_bnd = lower_bndNEWLINE elif bnds.shape == (2, ):NEWLINE lower = "lower bound"NEWLINE upper = "upper bound"NEWLINE lower_info = bnds_info._replace(user_bnds=bnds[0])NEWLINE upper_info = bnds_info._replace(user_bnds=bnds[1])NEWLINE lower_bnd = get_bound_as_number(bnds_obj, lower_info, lower, p_info)NEWLINE upper_bnd = get_bound_as_number(bnds_obj, upper_info, upper, p_info)NEWLINE else:NEWLINE raise ValueErrorNEWLINE lower_bnd = -bnds_obj._INF if lower_bnd is None else lower_bndNEWLINE upper_bnd = bnds_obj._INF if upper_bnd is None else upper_bndNEWLINE return [lower_bnd, upper_bnd]NEWLINENEWLINENEWLINEdef get_bound_as_number(bnds_obj, bnds_info, lower_upper, p_info):NEWLINE """Format user-supplied bounds to be a number.NEWLINENEWLINE Users can potentially supply bounds as strings (such as "inf" etc.),NEWLINE numerical values from non-core Python (e.g. :py:type`float64 <numpy>`,NEWLINE :py:type:`DM <casadi>`), or as symbols (e.g. :py:type:`Symbol <sympy>`,NEWLINE :py:type:`SX <casadi>`) provided that they can be resolved as constants dueNEWLINE to auxiliary data supplied by the user.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE floatNEWLINE The bound as a numerical value.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If the user-supplied bound is symbolic and contains a symbol primitiveNEWLINE that cannot be resolved down to a numerical value.NEWLINE NotImplementedErrorNEWLINE If the user supplies a string bound that is unsupported, e.g. 'nan'.NEWLINENEWLINE """NEWLINE bnds = bnds_info.user_bndsNEWLINE if bnds is None:NEWLINE return bndsNEWLINE elif isinstance(bnds, str):NEWLINE if bnds == "inf":NEWLINE return bnds_obj._INFNEWLINE elif bnds == "-inf":NEWLINE return -bnds_obj._INFNEWLINE try:NEWLINE bnds = float(bnds)NEWLINE except TypeError:NEWLINE msg = (f"A bound value of {bnds} is not supported.")NEWLINE raise NotImplementedError(msg)NEWLINE if isinstance(bnds, (np.float64, np.int64, float, int)):NEWLINE return float(bnds)NEWLINE bnds = bnds_obj.ocp._backend.substitute_pycollo_sym(bnds)NEWLINE if symbol_primitives(bnds):NEWLINE msg = (f"The user-supplied {lower_upper} for the "NEWLINE f"{bnds_info.bnds_type} '{bnd_info.user_syms}' "NEWLINE f"(index #{bnds_info.num}) of '{bnds}' "NEWLINE f"cannot be precomputed.")NEWLINE raise ValueError(msg)NEWLINE return float(bnds)NEWLINENEWLINENEWLINEdef check_lower_against_upper(bnds_obj, bnds, bnds_info, p_info):NEWLINE """Abstraction layer for checking lower bound against upper bound in pair.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE `tuple`NEWLINE The first index is an :py:type:`ndarray <numpy>` of shape (2, ) withNEWLINE the numerical lower and upper bounds for the bound in question and theNEWLINE second index is a `bool` of whether that bound pair is needed in theNEWLINE OCP (`True`) or if it can be treated as a constant (`False`).NEWLINENEWLINE """NEWLINE if not bnds:NEWLINE bnds = np.empty(shape=(0, 2), dtype=float)NEWLINE needed = np.empty(shape=0, dtype=bool)NEWLINE return bnds, neededNEWLINE bnds = np.array(bnds, dtype=float)NEWLINE bnds, needed = check_lower_same_as_upper_to_tol(bnds_obj, bnds, bnds_info,NEWLINE p_info)NEWLINE bnds = check_lower_less_than_upper(bnds_obj, bnds, bnds_info, p_info)NEWLINE return bnds, neededNEWLINENEWLINENEWLINEdef check_lower_same_as_upper_to_tol(bnds_obj, bnds, bnd_info, p_info):NEWLINE """Handle case where bounds are equal to floating precision.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE `tuple`NEWLINE The first index is an :py:type:`ndarray <numpy>` of shape (2, ) withNEWLINE the numerical lower and upper bounds for the bound in question and theNEWLINE second index is a `bool` of whether that bound pair is needed in theNEWLINE OCP (`True`) or if it can be treated as a constant (`False`).NEWLINENEWLINE """NEWLINE lower_bnds = bnds[:, 0]NEWLINE upper_bnds = bnds[:, 1]NEWLINE atol = bnds_obj.ocp.settings.bound_clash_relative_toleranceNEWLINE rtol = bnds_obj.ocp.settings.bound_clash_absolute_toleranceNEWLINE are_same = np.isclose(lower_bnds, upper_bnds, rtol=rtol, atol=atol)NEWLINE needed = extract_variables_to_constants(bnds_obj, bnds, are_same)NEWLINE mean_bnds = (lower_bnds + upper_bnds) / 2NEWLINE bnds[are_same, 0] = mean_bnds[are_same]NEWLINE bnds[are_same, 1] = mean_bnds[are_same]NEWLINE return bnds, neededNEWLINENEWLINENEWLINEdef check_lower_less_than_upper(bnds_obj, bnds, bnds_info, p_info):NEWLINE """Ensure the lower bound is less than the upper bound.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE :py:type:`ndarray <numpy>`NEWLINE The lower-upper bound pair with shape (2, ).NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If any lower bounds are greater than their upper bound.NEWLINENEWLINE """NEWLINE lower_bnds = bnds[:, 0]NEWLINE upper_bnds = bnds[:, 1]NEWLINE lower_less_than_upper = lower_bnds <= upper_bndsNEWLINE all_less_than = np.all(lower_less_than_upper)NEWLINE if not all_less_than:NEWLINE error_indices = np.flatnonzero(~lower_less_than_upper)NEWLINE error_syms = np.array(bnds_info.user_syms)[error_indices]NEWLINE plural_needed = len(error_indices) > 1NEWLINE bound_plural = "bounds" if plural_needed else "bound"NEWLINE index_plural = "indices" if plural_needed else "index"NEWLINE bnds_type_plural = (f"{bnds_info.bnds_type}"NEWLINE f"{'s' if plural_needed else ''}")NEWLINE user_syms_formatted = format_for_output(error_syms)NEWLINE user_indices_formatted = format_for_output(NEWLINE error_indices, wrapping_char="", prefix_char="#")NEWLINE lower_bnds_formatted = format_for_output(lower_bnds[error_indices])NEWLINE upper_bnds_formatted = format_for_output(upper_bnds[error_indices])NEWLINE msg = (f"The user-supplied upper {bound_plural} for the "NEWLINE f"{bnds_type_plural} {user_syms_formatted} ({index_plural} "NEWLINE f"{user_indices_formatted}) of {upper_bnds_formatted} "NEWLINE f"cannot be less than the user-supplied lower "NEWLINE f"{bound_plural} of {lower_bnds_formatted}.")NEWLINE raise ValueError(msg)NEWLINE return bndsNEWLINENEWLINENEWLINEdef extract_variables_to_constants(bnds_obj, bnds, are_same):NEWLINE """NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE are_same : `bool`NEWLINE If bounds are equal.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE boolNEWLINE `True` if the bounds pair are needed, `False` if not.NEWLINENEWLINE """NEWLINE if not bnds_obj.ocp.settings.remove_constant_variables:NEWLINE needed = np.full(bnds.shape[0], True)NEWLINE return neededNEWLINE needed = ~are_sameNEWLINE return neededNEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE |
# Copyright (c) 2015 OpenStack FoundationNEWLINE# All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License"); you mayNEWLINE# not use this file except in compliance with the License. You may obtainNEWLINE# a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS, WITHOUTNEWLINE# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See theNEWLINE# License for the specific language governing permissions and limitationsNEWLINE# under the License.NEWLINENEWLINE"""Policy Engine For magnum."""NEWLINENEWLINEimport decoratorNEWLINEfrom oslo_config import cfgNEWLINEfrom oslo_policy import policyNEWLINEfrom oslo_utils import importutilsNEWLINEimport pecanNEWLINENEWLINEfrom magnum.common import clientsNEWLINEfrom magnum.common import exceptionNEWLINEfrom magnum.common import policiesNEWLINENEWLINENEWLINE_ENFORCER = NoneNEWLINECONF = cfg.CONFNEWLINENEWLINENEWLINE# we can get a policy enforcer by this init.NEWLINE# oslo policy support change policy rule dynamically.NEWLINE# at present, policy.enforce will reload the policy rules when it checksNEWLINE# the policy files have been touched.NEWLINEdef init(policy_file=None, rules=None,NEWLINE default_rule=None, use_conf=True, overwrite=True):NEWLINE """Init an Enforcer class.NEWLINENEWLINE :param policy_file: Custom policy file to use, if none isNEWLINE specified, ``conf.policy_file`` will beNEWLINE used.NEWLINE :param rules: Default dictionary / Rules to use. It will beNEWLINE considered just in the first instantiation. IfNEWLINE :meth:`load_rules` with ``force_reload=True``,NEWLINE :meth:`clear` or :meth:`set_rules` withNEWLINE ``overwrite=True`` is called this will be overwritten.NEWLINE :param default_rule: Default rule to use, conf.default_rule willNEWLINE be used if none is specified.NEWLINE :param use_conf: Whether to load rules from cache or config file.NEWLINE :param overwrite: Whether to overwrite existing rules when reload rulesNEWLINE from config file.NEWLINE """NEWLINE global _ENFORCERNEWLINE if not _ENFORCER:NEWLINE # http://docs.openstack.org/developer/oslo.policy/usage.htmlNEWLINE _ENFORCER = policy.Enforcer(CONF,NEWLINE policy_file=policy_file,NEWLINE rules=rules,NEWLINE default_rule=default_rule,NEWLINE use_conf=use_conf,NEWLINE overwrite=overwrite)NEWLINE _ENFORCER.register_defaults(policies.list_rules())NEWLINENEWLINE return _ENFORCERNEWLINENEWLINENEWLINEdef enforce(context, rule=None, target=None,NEWLINE do_raise=True, exc=None, *args, **kwargs):NEWLINENEWLINE """Checks authorization of a rule against the target and credentials.NEWLINENEWLINE :param dict context: As much information about the user performing theNEWLINE action as possible.NEWLINE :param rule: The rule to evaluate.NEWLINE :param dict target: As much information about the object being operatedNEWLINE on as possible.NEWLINE :param do_raise: Whether to raise an exception or not if checkNEWLINE fails.NEWLINE :param exc: Class of the exception to raise if the check fails.NEWLINE Any remaining arguments passed to :meth:`enforce` (bothNEWLINE positional and keyword arguments) will be passed toNEWLINE the exception class. If not specified,NEWLINE :class:`PolicyNotAuthorized` will be used.NEWLINENEWLINE :return: ``False`` if the policy does not allow the action and `exc` isNEWLINE not provided; otherwise, returns a value that evaluates toNEWLINE ``True``. Note: for rules using the "case" expression, thisNEWLINE ``True`` value will be the specified string from theNEWLINE expression.NEWLINE """NEWLINE enforcer = init()NEWLINE credentials = context.to_dict()NEWLINE if not exc:NEWLINE exc = exception.PolicyNotAuthorizedNEWLINE if target is None:NEWLINE target = {'project_id': context.project_id,NEWLINE 'user_id': context.user_id}NEWLINE add_policy_attributes(target)NEWLINE return enforcer.enforce(rule, target, credentials,NEWLINE do_raise=do_raise, exc=exc, *args, **kwargs)NEWLINENEWLINENEWLINEdef add_policy_attributes(target):NEWLINE """Adds extra information for policy enforcement to raw target object"""NEWLINE context = importutils.import_module('magnum.common.context')NEWLINE admin_context = context.make_admin_context()NEWLINE admin_osc = clients.OpenStackClients(admin_context)NEWLINE trustee_domain_id = admin_osc.keystone().trustee_domain_idNEWLINE target['trustee_domain_id'] = trustee_domain_idNEWLINE return targetNEWLINENEWLINENEWLINEdef check_is_admin(context):NEWLINE """Whether or not user is admin according to policy setting.NEWLINENEWLINE """NEWLINE init()NEWLINE target = {}NEWLINE credentials = context.to_dict()NEWLINE return _ENFORCER.enforce('context_is_admin', target, credentials)NEWLINENEWLINENEWLINEdef enforce_wsgi(api_name, act=None):NEWLINE """This is a decorator to simplify wsgi action policy rule check.NEWLINENEWLINE :param api_name: The collection name to be evaluate.NEWLINE :param act: The function name of wsgi action.NEWLINENEWLINE example:NEWLINE from magnum.common import policyNEWLINE class ClustersController(rest.RestController):NEWLINE ....NEWLINE @policy.enforce_wsgi("cluster", "delete")NEWLINE @wsme_pecan.wsexpose(None, types.uuid_or_name, status_code=204)NEWLINE def delete(self, cluster_ident):NEWLINE ...NEWLINE """NEWLINE @decorator.decoratorNEWLINE def wrapper(fn, *args, **kwargs):NEWLINE action = "%s:%s" % (api_name, (act or fn.__name__))NEWLINE enforce(pecan.request.context, action,NEWLINE exc=exception.PolicyNotAuthorized, action=action)NEWLINE return fn(*args, **kwargs)NEWLINE return wrapperNEWLINE |
from django.core.management.base import BaseCommandNEWLINEfrom mainapp.models import Category, ProductsNEWLINEfrom authapp.models import UserNEWLINENEWLINEimport json, osNEWLINENEWLINEJSON_PATH = 'mainapp/json'NEWLINENEWLINEdef load_from_json(file_name):NEWLINE with open(os.path.join(JSON_PATH, file_name + '.json'), 'r') as infile:NEWLINE return json.load(infile)NEWLINENEWLINEclass Command(BaseCommand):NEWLINE def handle(self, *args, **options):NEWLINE categories = load_from_json('categories') NEWLINENEWLINE Category.objects.all().delete()NEWLINE for category in categories:NEWLINE new_category = Category(**category)NEWLINE new_category.save() NEWLINE NEWLINE products = load_from_json('products')NEWLINE NEWLINE Products.objects.all().delete()NEWLINE for product in products:NEWLINE category_name = product["product_type"]NEWLINE # Получаем категорию по имениNEWLINE _category = Category.objects.get(name=category_name)NEWLINE # Заменяем название категории объектомNEWLINE product['product_type'] = _categoryNEWLINE new_product = Products(**product)NEWLINE new_product.save()NEWLINENEWLINE # # Создаем суперпользователя при помощи менеджера моделиNEWLINE User.objects.create_superuser('admin', 'admin@localhost', '123', age=29)NEWLINE |
"""Logging helper functions."""NEWLINEfrom __future__ import absolute_importNEWLINENEWLINEimport loggingNEWLINENEWLINENEWLINEclass Logger:NEWLINE """Helper class for logging."""NEWLINENEWLINE def __init__(self, level=None):NEWLINE """Set up logging instance and set log level."""NEWLINE self.logger = logging.getLogger("cloudstats")NEWLINE if level:NEWLINE self.setLevel(level)NEWLINENEWLINE if not len(self.logger.handlers):NEWLINE console = logging.StreamHandler()NEWLINE console.setFormatter(NEWLINE logging.Formatter("%(asctime)s %(levelname)s - %(message)s")NEWLINE )NEWLINE self.logger.addHandler(console)NEWLINENEWLINE def set_level(self, level="info"):NEWLINE """Set the level to the provided level."""NEWLINENEWLINE if level:NEWLINE level = level.lower()NEWLINENEWLINE if level == "debug":NEWLINE self.logger.setLevel(logging.DEBUG)NEWLINE elif level == "warn":NEWLINE self.logger.setLevel(logging.WARN)NEWLINE elif level == "error":NEWLINE self.logger.setLevel(logging.ERROR)NEWLINE else:NEWLINE self.logger.setLevel(logging.INFO)NEWLINENEWLINE def debug(self, message):NEWLINE """Log a message with debug loglevel."""NEWLINE self.logger.debug(message)NEWLINENEWLINE def info(self, message):NEWLINE """Log a message with info loglevel."""NEWLINE self.logger.info(message)NEWLINENEWLINE def warn(self, message):NEWLINE """Log a message with warn loglevel."""NEWLINE self.logger.warn(message)NEWLINENEWLINE def error(self, message):NEWLINE """Log a message with warn loglevel."""NEWLINE self.logger.error(message)NEWLINE |
# Copyright 2019 Huawei Technologies Co., LtdNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ============================================================================NEWLINEimport numpy as npNEWLINENEWLINEimport mindspore as msNEWLINEimport mindspore.nn as nnNEWLINEfrom mindspore.common.api import _executorNEWLINEfrom mindspore.ops import operations as PNEWLINEfrom mindspore.ops import composite as CNEWLINEfrom mindspore import Tensor, contextNEWLINEfrom tests.ut.python.ops.test_math_ops import VirtualLossNEWLINENEWLINENEWLINEgrad_all = C.GradOperation(get_all=True)NEWLINENEWLINENEWLINEclass GradWrap(nn.Cell):NEWLINE def __init__(self, network):NEWLINE super(GradWrap, self).__init__()NEWLINE self.network = networkNEWLINENEWLINE def construct(self, x, y):NEWLINE return grad_all(self.network)(x, y)NEWLINENEWLINEclass NetWithLoss(nn.Cell):NEWLINE def __init__(self, network):NEWLINE super(NetWithLoss, self).__init__()NEWLINE self.loss = VirtualLoss()NEWLINE self.network = networkNEWLINENEWLINE def construct(self, x, y):NEWLINE predict = self.network(x, y)NEWLINE return self.loss(predict)NEWLINENEWLINEclass Net(nn.Cell):NEWLINE def __init__(self, shape, offset, strategy1=None, strategy2=None, target="Device"):NEWLINE super().__init__()NEWLINE self.index = Tensor(np.ones(shape), dtype=ms.int32)NEWLINE self.offset = offsetNEWLINE self.elu = P.EmbeddingLookup().shard(strategy1).add_prim_attr("primitive_target", target)NEWLINE self.mm = P.BatchMatMul().shard(strategy2)NEWLINENEWLINE def construct(self, x, y):NEWLINE out = self.elu(x, self.index, self.offset)NEWLINE out = self.mm(out, y)NEWLINE return outNEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_false():NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = NetWithLoss(Net(shape, offset))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_true():NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = NetWithLoss(Net(shape, offset))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_false_grad():NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset)))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_true_grad():NEWLINE context.set_context(save_graphs=False)NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset)))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_semi_auto1():NEWLINE context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")NEWLINE shape = [64, 32]NEWLINE offset = 0NEWLINE strategy1 = ((8, 1), (1, 1))NEWLINE strategy2 = ((4, 1, 2), (4, 2, 1))NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset, strategy1, strategy2, "CPU")))NEWLINENEWLINE net.set_auto_parallel()NEWLINE x = Tensor(np.ones([64, 64]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_semi_auto2():NEWLINE context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")NEWLINE shape = [64, 32]NEWLINE offset = 0NEWLINE strategy1 = ((1, 8), (1, 1))NEWLINE strategy2 = ((4, 1, 2), (4, 2, 1))NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset, strategy1, strategy2, "CPU")))NEWLINENEWLINE net.set_auto_parallel()NEWLINE x = Tensor(np.ones([64, 64]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINE |
#NEWLINE# Copyright 2019 The FATE Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINEimport ioNEWLINEimport osNEWLINEfrom typing import IterableNEWLINENEWLINEfrom pyarrow import fsNEWLINENEWLINEfrom fate_arch.common import hdfs_utilsNEWLINEfrom fate_arch.common.log import getLoggerNEWLINEfrom fate_arch.storage import StorageEngine, HDFSStorageTypeNEWLINEfrom fate_arch.storage import StorageTableBaseNEWLINENEWLINELOGGER = getLogger()NEWLINENEWLINENEWLINEclass StorageTable(StorageTableBase):NEWLINE def __init__(self,NEWLINE address=None,NEWLINE name: str = None,NEWLINE namespace: str = None,NEWLINE partitions: int = None,NEWLINE storage_type: HDFSStorageType = None,NEWLINE options=None):NEWLINE super(StorageTable, self).__init__(name=name, namespace=namespace)NEWLINE self._address = addressNEWLINE self._name = nameNEWLINE self._namespace = namespaceNEWLINE self._partitions = partitions if partitions else 1NEWLINE self._type = storage_type if storage_type else HDFSStorageType.DISKNEWLINE self._options = options if options else {}NEWLINE self._engine = StorageEngine.HDFSNEWLINENEWLINE # tricky way to load libhdfsNEWLINE try:NEWLINE from pyarrow import HadoopFileSystemNEWLINE HadoopFileSystem(self._path)NEWLINE except Exception as e:NEWLINE LOGGER.warning(f"load libhdfs failed: {e}")NEWLINE self._hdfs_client = fs.HadoopFileSystem.from_uri(self._path)NEWLINENEWLINE def get_name(self):NEWLINE return self._nameNEWLINENEWLINE def get_namespace(self):NEWLINE return self._namespaceNEWLINENEWLINE def get_address(self):NEWLINE return self._addressNEWLINENEWLINE def get_engine(self):NEWLINE return self._engineNEWLINENEWLINE def get_type(self):NEWLINE return self._typeNEWLINENEWLINE def get_partitions(self):NEWLINE return self._partitionsNEWLINENEWLINE def get_options(self):NEWLINE return self._optionsNEWLINENEWLINE def put_all(self, kv_list: Iterable, append=True, assume_file_exist=False, **kwargs):NEWLINE LOGGER.info(f"put in hdfs file: {self._path}")NEWLINE if append and (assume_file_exist or self._exist()):NEWLINE stream = self._hdfs_client.open_append_stream(path=self._path, compression=None)NEWLINE else:NEWLINE stream = self._hdfs_client.open_output_stream(path=self._path, compression=None)NEWLINENEWLINE counter = 0NEWLINE with io.TextIOWrapper(stream) as writer:NEWLINE for k, v in kv_list:NEWLINE writer.write(hdfs_utils.serialize(k, v))NEWLINE writer.write(hdfs_utils.NEWLINE)NEWLINE counter = counter + 1NEWLINE self._meta.update_metas(count=counter)NEWLINENEWLINE def collect(self, **kwargs) -> list:NEWLINE for line in self._as_generator():NEWLINE yield hdfs_utils.deserialize(line.rstrip())NEWLINENEWLINE def read(self) -> list:NEWLINE for line in self._as_generator():NEWLINE yield lineNEWLINENEWLINE def destroy(self):NEWLINE super().destroy()NEWLINE self._hdfs_client.delete_file(self._path)NEWLINENEWLINE def count(self):NEWLINE count = 0NEWLINE for _ in self._as_generator():NEWLINE count += 1NEWLINE self.get_meta().update_metas(count=count)NEWLINE return countNEWLINENEWLINE def save_as(self, address, partitions=None, name=None, namespace=None, schema=None, **kwargs):NEWLINE super().save_as(name, namespace, partitions=partitions, schema=schema)NEWLINE self._hdfs_client.copy_file(src=self._path, dst=address.path)NEWLINE return StorageTable(address=address, partitions=partitions, name=name, namespace=namespace, **kwargs)NEWLINENEWLINE def close(self):NEWLINE passNEWLINENEWLINE @propertyNEWLINE def _path(self) -> str:NEWLINE return f"{self._address.name_node}/{self._address.path}"NEWLINENEWLINE def _exist(self):NEWLINE info = self._hdfs_client.get_file_info([self._path])[0]NEWLINE return info.type != fs.FileType.NotFoundNEWLINENEWLINE def _as_generator(self):NEWLINE info = self._hdfs_client.get_file_info([self._path])[0]NEWLINE if info.type == fs.FileType.NotFound:NEWLINE raise FileNotFoundError(f"file {self._path} not found")NEWLINENEWLINE elif info.type == fs.FileType.File:NEWLINE with io.TextIOWrapper(buffer=self._hdfs_client.open_input_stream(self._path),NEWLINE encoding="utf-8") as reader:NEWLINE for line in reader:NEWLINE yield lineNEWLINENEWLINE else:NEWLINE selector = fs.FileSelector(os.path.join("/", self._address.path))NEWLINE file_infos = self._hdfs_client.get_file_info(selector)NEWLINE for file_info in file_infos:NEWLINE if file_info.base_name == "_SUCCESS":NEWLINE continueNEWLINE assert file_info.is_file, f"{self._path} is directory contains a subdirectory: {file_info.path}"NEWLINE with io.TextIOWrapper(NEWLINE buffer=self._hdfs_client.open_input_stream(f"{self._address.name_node}/{file_info.path}"),NEWLINE encoding="utf-8") as reader:NEWLINE for line in reader:NEWLINE yield lineNEWLINE |
# Copyright 2019 kubeflow.org.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom http import HTTPStatusNEWLINENEWLINEimport tornado.ioloopNEWLINEimport tornado.webNEWLINEimport tornado.httpserverNEWLINEimport argparseNEWLINEimport osNEWLINEimport loggingNEWLINEimport jsonNEWLINEfrom enum import EnumNEWLINEfrom kfserving.model import KFModelNEWLINEfrom typing import List, Dict, Optional, AnyNEWLINEfrom kfserving.protocols.request_handler import RequestHandlerNEWLINEfrom kfserving.protocols.tensorflow_http import TensorflowRequestHandlerNEWLINEfrom kfserving.protocols.seldon_http import SeldonRequestHandlerNEWLINENEWLINEDEFAULT_HTTP_PORT = 8080NEWLINEDEFAULT_GRPC_PORT = 8081NEWLINENEWLINENEWLINEclass Protocol(Enum):NEWLINE tensorflow_http = "tensorflow.http"NEWLINE seldon_http = "seldon.http"NEWLINENEWLINE def __str__(self):NEWLINE return self.valueNEWLINENEWLINENEWLINEparser = argparse.ArgumentParser(add_help=False)NEWLINEparser.add_argument('--http_port', default=DEFAULT_HTTP_PORT, type=int,NEWLINE help='The HTTP Port listened to by the model server.')NEWLINEparser.add_argument('--grpc_port', default=DEFAULT_GRPC_PORT, type=int,NEWLINE help='The GRPC Port listened to by the model server.')NEWLINEparser.add_argument('--protocol', type=Protocol, choices=list(Protocol),NEWLINE default="tensorflow.http",NEWLINE help='The protocol served by the model server')NEWLINEargs, _ = parser.parse_known_args()NEWLINENEWLINEKFSERVER_LOGLEVEL = os.environ.get('KFSERVER_LOGLEVEL', 'INFO').upper()NEWLINElogging.basicConfig(level=KFSERVER_LOGLEVEL)NEWLINENEWLINEPREDICTOR_URL_FORMAT = "http://{0}/v1/models/{1}:predict"NEWLINENEWLINENEWLINEclass KFServer(object):NEWLINE def __init__(self, protocol: Protocol = args.protocol, http_port: int = args.http_port,NEWLINE grpc_port: int = args.grpc_port):NEWLINE self.registered_models: Dict[str, KFModel] = {}NEWLINE self.http_port = http_portNEWLINE self.grpc_port = grpc_portNEWLINE self.protocol = protocolNEWLINE self._http_server: Optional[tornado.httpserver.HTTPServer] = NoneNEWLINENEWLINE def create_application(self):NEWLINE return tornado.web.Application([NEWLINE # Server Liveness API returns 200 if server is alive.NEWLINE (r"/", LivenessHandler),NEWLINE # Protocol Discovery API that returns the serving protocol supported by this server.NEWLINE (r"/protocol", ProtocolHandler, dict(protocol=self.protocol)),NEWLINE # Prometheus Metrics API that returns metrics for model serversNEWLINE (r"/v1/metrics", MetricsHandler, dict(models=self.registered_models)),NEWLINE # Model Health API returns 200 if model is ready to serve.NEWLINE (r"/v1/models/([a-zA-Z0-9_-]+)",NEWLINE ModelHealthHandler, dict(models=self.registered_models)),NEWLINE # Predict API executes executes predict on input tensorsNEWLINE (r"/v1/models/([a-zA-Z0-9_-]+)",NEWLINE ModelPredictHandler, dict(protocol=self.protocol, models=self.registered_models)),NEWLINE # Optional Custom Predict Verb for Tensorflow compatibilityNEWLINE (r"/v1/models/([a-zA-Z0-9_-]+):predict",NEWLINE ModelPredictHandler, dict(protocol=self.protocol, models=self.registered_models)),NEWLINE (r"/v1/models/([a-zA-Z0-9_-]+):explain",NEWLINE ModelExplainHandler, dict(protocol=self.protocol, models=self.registered_models)),NEWLINE ])NEWLINENEWLINE def start(self, models: List[KFModel] = []):NEWLINE # TODO add a GRPC serverNEWLINE for model in models:NEWLINE self.register_model(model)NEWLINENEWLINE self._http_server = tornado.httpserver.HTTPServer(self.create_application())NEWLINENEWLINE logging.info("Listening on port %s" % self.http_port)NEWLINE self._http_server.bind(self.http_port)NEWLINE self._http_server.start(0) # Forks workers equal to host's coresNEWLINE tornado.ioloop.IOLoop.current().start()NEWLINENEWLINE def register_model(self, model: KFModel):NEWLINE if not model.name:NEWLINE raise Exception("Failed to register model, model.name must be provided.")NEWLINE self.registered_models[model.name] = modelNEWLINE logging.info("Registering model:" + model.name)NEWLINENEWLINENEWLINEdef get_request_handler(protocol, request: Dict) -> RequestHandler:NEWLINE if protocol == Protocol.tensorflow_http:NEWLINE return TensorflowRequestHandler(request)NEWLINE else:NEWLINE return SeldonRequestHandler(request)NEWLINENEWLINENEWLINEclass ModelExplainHandler(tornado.web.RequestHandler):NEWLINENEWLINE def initialize(self, protocol: str, models: Dict[str, KFModel]):NEWLINE self.protocol = protocolNEWLINE self.models = modelsNEWLINENEWLINE def post(self, name: str):NEWLINENEWLINE # TODO Add metricsNEWLINE if name not in self.models:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.NOT_FOUND,NEWLINE reason="Model with name %s does not exist." % nameNEWLINE )NEWLINENEWLINE model = self.models[name]NEWLINE if not model.ready:NEWLINE model.load()NEWLINENEWLINE try:NEWLINE body = json.loads(self.request.body)NEWLINE except json.decoder.JSONDecodeError as e:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.BAD_REQUEST,NEWLINE reason="Unrecognized request format: %s" % eNEWLINE )NEWLINENEWLINE request_handler: RequestHandler = get_request_handler(self.protocol, body)NEWLINE request_handler.validate()NEWLINE request = request_handler.extract_request()NEWLINE explanation = model.explain(request)NEWLINENEWLINE self.write(explanation)NEWLINENEWLINENEWLINEclass ModelPredictHandler(tornado.web.RequestHandler):NEWLINE def initialize(self, protocol: str, models: Dict[str, KFModel]):NEWLINE self.protocol = protocolNEWLINE self.models = modelsNEWLINENEWLINE def post(self, name: str):NEWLINE # TODO Add metricsNEWLINE if name not in self.models:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.NOT_FOUND,NEWLINE reason="Model with name %s does not exist." % nameNEWLINE )NEWLINENEWLINE model = self.models[name]NEWLINE if not model.ready:NEWLINE model.load()NEWLINENEWLINE try:NEWLINE body = json.loads(self.request.body)NEWLINE except json.decoder.JSONDecodeError as e:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=HTTPStatus.BAD_REQUEST,NEWLINE reason="Unrecognized request format: %s" % eNEWLINE )NEWLINENEWLINE # for predictor this is noopNEWLINE # for transformer the preprocess step transforms the body to request that is conforming to data plane protocolNEWLINE request = model.preprocess(body)NEWLINE # validate if the request to predictor is conforming to data plane protocolNEWLINE request_handler: RequestHandler = get_request_handler(self.protocol, request)NEWLINE request_handler.validate()NEWLINE inputs = request_handler.extract_request()NEWLINE # for predictor this does in-place predictionNEWLINE # for transformer it calls out to predictorNEWLINE results = model.predict(inputs)NEWLINE # for predictor this is noopNEWLINE # for transformer the postprocess step transforms the result to what user expectsNEWLINE outputs = model.postprocess(results)NEWLINE response = request_handler.wrap_response(outputs)NEWLINENEWLINE self.write(response)NEWLINENEWLINENEWLINEclass LivenessHandler(tornado.web.RequestHandler):NEWLINE def get(self):NEWLINE self.write("Alive")NEWLINENEWLINENEWLINEclass ProtocolHandler(tornado.web.RequestHandler):NEWLINE def initialize(self, protocol: Protocol):NEWLINE self.protocol = protocolNEWLINENEWLINE def get(self):NEWLINE self.write(str(self.protocol.value))NEWLINENEWLINENEWLINEclass MetricsHandler(tornado.web.RequestHandler):NEWLINE def get(self):NEWLINE self.write("Not Implemented")NEWLINENEWLINENEWLINEclass ModelHealthHandler(tornado.web.RequestHandler):NEWLINE def initialize(self, models: Dict[str, KFModel]):NEWLINE self.models = modelsNEWLINENEWLINE def get(self, name: str):NEWLINE if name not in self.models:NEWLINE raise tornado.web.HTTPError(NEWLINE status_code=404,NEWLINE reason="Model with name %s does not exist." % nameNEWLINE )NEWLINENEWLINE model = self.models[name]NEWLINE self.write(json.dumps({NEWLINE "name": model.name,NEWLINE "ready": model.readyNEWLINE }))NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE s = KFServer()NEWLINE s.start()NEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""Writer for MacOS script files."""NEWLINENEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEimport osNEWLINENEWLINEfrom l2tdevtools.dependency_writers import interfaceNEWLINENEWLINENEWLINEclass MacOSInstallScriptWriter(interface.DependencyFileWriter):NEWLINE """MacOS installation script file writer."""NEWLINENEWLINE _TEMPLATE_FILE = os.path.join('data', 'templates', 'macos_install.sh')NEWLINENEWLINE PATH = os.path.join('config', 'macos', 'install.sh')NEWLINENEWLINE def Write(self):NEWLINE """Writes an install.sh file."""NEWLINE dependencies = self._dependency_helper.GetL2TBinaries(python_version=2)NEWLINENEWLINE template_mappings = {NEWLINE 'dependencies': ' '.join(dependencies),NEWLINE 'project_name': self._project_definition.name,NEWLINE }NEWLINENEWLINE template_file = os.path.join(self._l2tdevtools_path, self._TEMPLATE_FILE)NEWLINE file_content = self._GenerateFromTemplate(template_file, template_mappings)NEWLINENEWLINE file_content = file_content.encode('utf-8')NEWLINENEWLINE with open(self.PATH, 'wb') as file_object:NEWLINE file_object.write(file_content)NEWLINENEWLINENEWLINEclass MacOSMakeDistScriptWriter(interface.DependencyFileWriter):NEWLINE """MacOS make distribution script file writer."""NEWLINENEWLINE _TEMPLATE_FILE = os.path.join('data', 'templates', 'macos_make_dist.sh')NEWLINENEWLINE PATH = os.path.join('config', 'macos', 'make_dist.sh')NEWLINENEWLINE def Write(self):NEWLINE """Writes a make_dist.sh file."""NEWLINE dependencies = self._dependency_helper.GetL2TBinaries(python_version=2)NEWLINENEWLINE template_mappings = {NEWLINE 'dependencies': ' '.join(dependencies),NEWLINE 'project_name': self._project_definition.name,NEWLINE }NEWLINENEWLINE template_file = os.path.join(self._l2tdevtools_path, self._TEMPLATE_FILE)NEWLINE file_content = self._GenerateFromTemplate(template_file, template_mappings)NEWLINENEWLINE file_content = file_content.encode('utf-8')NEWLINENEWLINE with open(self.PATH, 'wb') as file_object:NEWLINE file_object.write(file_content)NEWLINENEWLINENEWLINEclass MacOSUninstallScriptWriter(interface.DependencyFileWriter):NEWLINE """MacOS uninstallation script file writer."""NEWLINENEWLINE _TEMPLATE_FILE = os.path.join('data', 'templates', 'macos_uninstall.sh')NEWLINENEWLINE PATH = os.path.join('config', 'macos', 'uninstall.sh')NEWLINENEWLINE def Write(self):NEWLINE """Writes an uninstall.sh file."""NEWLINE dependencies = self._dependency_helper.GetL2TBinaries(python_version=2)NEWLINENEWLINE template_mappings = {NEWLINE 'dependencies': ' '.join(dependencies),NEWLINE 'project_name': self._project_definition.name,NEWLINE }NEWLINENEWLINE template_file = os.path.join(self._l2tdevtools_path, self._TEMPLATE_FILE)NEWLINE file_content = self._GenerateFromTemplate(template_file, template_mappings)NEWLINENEWLINE file_content = file_content.encode('utf-8')NEWLINENEWLINE with open(self.PATH, 'wb') as file_object:NEWLINE file_object.write(file_content)NEWLINE |
# This trainer is based on the trainer as forked from version of transformers in hash 935e346959e81aa96a772c11d05734ea652932d1NEWLINE# And changed the training process to include discrete adversarial trainingNEWLINE# TODO - perhaps shoould copy the trainer code here as well?NEWLINENEWLINEimport collectionsNEWLINEfrom datetime import datetimeNEWLINEimport mathNEWLINEimport osNEWLINEfrom time import timeNEWLINEfrom typing import Optional, Union, Dict, Any, TupleNEWLINEimport pandas as pdNEWLINENEWLINEimport torchNEWLINEfrom torch.utils.data import DataLoader, DistributedSamplerNEWLINEfrom torch import nnNEWLINEfrom transformers import Trainer, TrainerState, is_apex_available, PreTrainedModel, is_torch_tpu_available, WEIGHTS_NAMENEWLINEfrom packaging import versionNEWLINEfrom transformers.integrations import hp_paramsNEWLINEfrom transformers.trainer_utils import TrainOutputNEWLINEfrom transformers.utils import loggingNEWLINENEWLINEfrom attacks.analyze_robustness import RunningCounterNEWLINEfrom hf_transformers.adv_training_args import AdversarialTrainingArgumentsNEWLINEfrom hf_transformers.adv_utils import get_adversarial_inputs, split_batch, get_attackerNEWLINEfrom common.utils import set_seed_everywhereNEWLINENEWLINE_use_apex = FalseNEWLINENEWLINE# Check if Pytorch version >= 1.6 to switch between Native AMP and ApexNEWLINEif version.parse(torch.__version__) < version.parse("1.6"):NEWLINENEWLINE if is_apex_available():NEWLINE from apex import ampNEWLINE _use_apex = TrueNEWLINEelse:NEWLINE _use_native_amp = TrueNEWLINE from torch.cuda.amp import autocastNEWLINENEWLINEif is_torch_tpu_available():NEWLINE import torch_xla.core.xla_model as xmNEWLINE import torch_xla.debug.metrics as metNEWLINE import torch_xla.distributed.parallel_loader as plNEWLINENEWLINEif version.parse(torch.__version__) < version.parse("1.2"):NEWLINE _use_ddp_no_sync = FalseNEWLINEelse:NEWLINE _use_ddp_no_sync = TrueNEWLINENEWLINENEWLINElogger = logging.get_logger(__name__)NEWLINENEWLINENEWLINEclass AdversarialTrainer(Trainer):NEWLINE """NEWLINE Trainer is a simple but feature-complete training and eval loop for PyTorch, optimized for 🤗 Transformers.NEWLINENEWLINE Args:NEWLINE model (:class:`~transformers.PreTrainedModel` or :obj:`torch.nn.Module`, `optional`):NEWLINE The model to train, evaluate or use for predictions. If not provided, a ``model_init`` must be passed.NEWLINENEWLINE .. note::NEWLINENEWLINE :class:`~transformers.Trainer` is optimized to work with the :class:`~transformers.PreTrainedModel`NEWLINE provided by the library. You can still use your own models defined as :obj:`torch.nn.Module` as long asNEWLINE they work the same way as the 🤗 Transformers models.NEWLINE args (:class:`~transformers.TrainingArguments`, `optional`):NEWLINE The arguments to tweak for training. Will default to a basic instance ofNEWLINE :class:`~transformers.TrainingArguments` with the ``output_dir`` set to a directory named `tmp_trainer` inNEWLINE the current directory if not provided.NEWLINE data_collator (:obj:`DataCollator`, `optional`):NEWLINE The function to use to form a batch from a list of elements of :obj:`train_dataset` or :obj:`eval_dataset`.NEWLINE Will default to :func:`~transformers.default_data_collator` if no ``tokenizer`` is provided, an instance ofNEWLINE :func:`~transformers.DataCollatorWithPadding` otherwise.NEWLINE train_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):NEWLINE The dataset to use for training. If it is an :obj:`datasets.Dataset`, columns not accepted by theNEWLINE ``model.forward()`` method are automatically removed.NEWLINE eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):NEWLINE The dataset to use for evaluation. If it is an :obj:`datasets.Dataset`, columns not accepted by theNEWLINE ``model.forward()`` method are automatically removed.NEWLINE tokenizer (:class:`PreTrainedTokenizerBase`, `optional`):NEWLINE The tokenizer used to preprocess the data. If provided, will be used to automatically pad the inputs theNEWLINE maximum length when batching inputs, and it will be saved along the model to make it easier to rerun anNEWLINE interrupted training or reuse the fine-tuned model.NEWLINE model_init (:obj:`Callable[[], PreTrainedModel]`, `optional`):NEWLINE A function that instantiates the model to be used. If provided, each call toNEWLINE :meth:`~transformers.Trainer.train` will start from a new instance of the model as given by this function.NEWLINENEWLINE The function may have zero argument, or a single one containing the optuna/Ray Tune trial object, to beNEWLINE able to choose different architectures according to hyper parameters (such as layer count, sizes of innerNEWLINE layers, dropout probabilities etc).NEWLINE compute_metrics (:obj:`Callable[[EvalPrediction], Dict]`, `optional`):NEWLINE The function that will be used to compute metrics at evaluation. Must take aNEWLINE :class:`~transformers.EvalPrediction` and return a dictionary string to metric values.NEWLINE callbacks (List of :obj:`~transformers.TrainerCallback`, `optional`):NEWLINE A list of callbacks to customize the training loop. Will add those to the list of default callbacksNEWLINE detailed in :doc:`here <callback>`.NEWLINENEWLINE If you want to remove one of the default callbacks used, use the :meth:`Trainer.remove_callback` method.NEWLINE optimizers (:obj:`Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR`, `optional`): A tupleNEWLINE containing the optimizer and the scheduler to use. Will default to an instance ofNEWLINE :class:`~transformers.AdamW` on your model and a scheduler given byNEWLINE :func:`~transformers.get_linear_schedule_with_warmup` controlled by :obj:`args`.NEWLINE """NEWLINE def __init__(self, max_seq_len, adv_training_args: AdversarialTrainingArguments, *args, **kwargs):NEWLINE super().__init__(*args, **kwargs)NEWLINE self.max_seq_len = max_seq_lenNEWLINE self.adv_args = adv_training_argsNEWLINE if self.adv_args.adversarial_every != 1:NEWLINE raise NotImplementedError('There is currently a problem with the implementation of not exploring every time, 'NEWLINE 'so this cannot be used. use adversarial_every 1')NEWLINENEWLINE @staticmethodNEWLINE def _update_timing(timings, key, t):NEWLINE timings[key].update(time() - t)NEWLINE return time()NEWLINENEWLINE def train(self, model_path: Optional[str] = None, trial: Union["optuna.Trial", Dict[str, Any]] = None):NEWLINE """NEWLINE Main training entry point.NEWLINENEWLINE Args:NEWLINE model_path (:obj:`str`, `optional`):NEWLINE Local path to the model if the model to train has been instantiated from a local path. If present,NEWLINE training will resume from the optimizer/scheduler states loaded here.NEWLINE trial (:obj:`optuna.Trial` or :obj:`Dict[str, Any]`, `optional`):NEWLINE The trial run or the hyperparameter dictionary for hyperparameter search.NEWLINE """NEWLINE # This might change the seed so needs to run first.NEWLINE print('Entered adversarial trainer train procedure')NEWLINE t0 = datetime.now()NEWLINE t = time()NEWLINE timings = collections.defaultdict(RunningCounter)NEWLINENEWLINE self._hp_search_setup(trial)NEWLINENEWLINE t = self._update_timing(timings, 'hp_setup', t)NEWLINENEWLINE # Model re-initNEWLINE if self.model_init is not None:NEWLINE # Seed must be set before instantiating the model when using model_init.NEWLINE set_seed_everywhere(self.args.seed)NEWLINENEWLINE model = self.call_model_init(trial)NEWLINENEWLINE if not self.args.model_parallel:NEWLINE self.model = model.to(self.args.device)NEWLINENEWLINE # Reinitializes optimizer and schedulerNEWLINE self.optimizer, self.lr_scheduler = None, NoneNEWLINENEWLINE t = self._update_timing(timings, 'model_init', t)NEWLINENEWLINE # Keeping track whether we can can len() on the dataset or notNEWLINE train_dataset_is_sized = isinstance(self.train_dataset, collections.abc.Sized)NEWLINENEWLINE # Data loader and number of training stepsNEWLINE train_dataloader = self.get_train_dataloader()NEWLINE t = self._update_timing(timings, 'train dataloader', t)NEWLINE adv_train_dataloaders = self.get_adv_train_dataloaders()NEWLINE t = self._update_timing(timings, 'adv dataloaders', t)NEWLINENEWLINE # Setting up training control variables:NEWLINE # number of training epochs: num_train_epochsNEWLINE # number of training steps per epoch: num_update_steps_per_epochNEWLINE # total number of training steps to execute: max_stepsNEWLINE if train_dataset_is_sized:NEWLINE num_update_steps_per_epoch = len(train_dataloader) // self.args.gradient_accumulation_stepsNEWLINE num_update_steps_per_epoch = max(num_update_steps_per_epoch, 1)NEWLINE if self.args.max_steps > 0:NEWLINE max_steps = self.args.max_stepsNEWLINE num_train_epochs = self.args.max_steps // num_update_steps_per_epoch + int(NEWLINE self.args.max_steps % num_update_steps_per_epoch > 0NEWLINE )NEWLINE else:NEWLINE max_steps = math.ceil(self.args.num_train_epochs * num_update_steps_per_epoch)NEWLINE num_train_epochs = math.ceil(self.args.num_train_epochs)NEWLINE else:NEWLINE # see __init__. max_steps is set when the dataset has no __len__NEWLINE max_steps = self.args.max_stepsNEWLINE num_train_epochs = 1NEWLINE num_update_steps_per_epoch = max_stepsNEWLINENEWLINE self.create_optimizer_and_scheduler(num_training_steps=max_steps)NEWLINE self.state = TrainerState()NEWLINE self.state.is_hyper_param_search = trial is not NoneNEWLINENEWLINE # Check if saved optimizer or scheduler states existNEWLINE self._load_optimizer_and_scheduler(model_path)NEWLINENEWLINE # Mixed precision training with apex (torch < 1.6)NEWLINE model = self.modelNEWLINE if self.args.fp16 and _use_apex:NEWLINE if not is_apex_available():NEWLINE raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")NEWLINE model, self.optimizer = amp.initialize(model, self.optimizer, opt_level=self.args.fp16_opt_level)NEWLINENEWLINE # Multi-gpu training (should be after apex fp16 initialization)NEWLINE if self.args.n_gpu > 1 and not self.args.model_parallel:NEWLINE model = torch.nn.DataParallel(model)NEWLINENEWLINE # Distributed training (should be after apex fp16 initialization)NEWLINE if self.args.local_rank != -1:NEWLINE model = torch.nn.parallel.DistributedDataParallel(NEWLINE model,NEWLINE device_ids=[self.args.local_rank],NEWLINE output_device=self.args.local_rank,NEWLINE find_unused_parameters=(NEWLINE not getattr(model.config, "gradient_checkpointing", False)NEWLINE if isinstance(model, PreTrainedModel)NEWLINE else TrueNEWLINE ),NEWLINE )NEWLINE # find_unused_parameters breaks checkpointing as perNEWLINE # https://github.com/huggingface/transformers/pull/4659#issuecomment-643356021NEWLINENEWLINE # Train!NEWLINE if is_torch_tpu_available():NEWLINE total_train_batch_size = self.args.train_batch_size * xm.xrt_world_size()NEWLINE else:NEWLINE total_train_batch_size = (NEWLINE self.args.train_batch_sizeNEWLINE * self.args.gradient_accumulation_stepsNEWLINE * (torch.distributed.get_world_size() if self.args.local_rank != -1 else 1)NEWLINE )NEWLINENEWLINE num_examples = (NEWLINE self.num_examples(train_dataloader)NEWLINE if train_dataset_is_sizedNEWLINE else total_train_batch_size * self.args.max_stepsNEWLINE )NEWLINENEWLINE logger.info("***** Running training *****")NEWLINE logger.info(f" Num examples = {num_examples}")NEWLINE logger.info(f" Num Epochs = {num_train_epochs}")NEWLINE logger.info(f" Instantaneous batch size per device = {self.args.per_device_train_batch_size}")NEWLINE logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_train_batch_size}")NEWLINE logger.info(f" Gradient Accumulation steps = {self.args.gradient_accumulation_steps}")NEWLINE logger.info(f" Total optimization steps = {max_steps}")NEWLINENEWLINE self.state.epoch = 0NEWLINE epochs_trained = 0NEWLINE steps_trained_in_current_epoch = 0NEWLINENEWLINE t = self._update_timing(timings, 'final prep', t)NEWLINENEWLINE # Check if continuing training from a checkpointNEWLINE if model_path and os.path.isfile(os.path.join(model_path, "trainer_state.json")):NEWLINE self.state = TrainerState.load_from_json(os.path.join(model_path, "trainer_state.json"))NEWLINE epochs_trained = self.state.global_step // num_update_steps_per_epochNEWLINE if not self.args.ignore_data_skip:NEWLINE steps_trained_in_current_epoch = self.state.global_step % (num_update_steps_per_epoch)NEWLINE steps_trained_in_current_epoch *= self.args.gradient_accumulation_stepsNEWLINE else:NEWLINE steps_trained_in_current_epoch = 0NEWLINENEWLINE logger.info(" Continuing training from checkpoint, will skip to saved global_step")NEWLINE logger.info(f" Continuing training from epoch {epochs_trained}")NEWLINE logger.info(f" Continuing training from global step {self.state.global_step}")NEWLINE if not self.args.ignore_data_skip:NEWLINE logger.info(NEWLINE f" Will skip the first {epochs_trained} epochs then the first {steps_trained_in_current_epoch} "NEWLINE "batches in the first epoch."NEWLINE )NEWLINE t = self._update_timing(timings, 'load checkpoint', t)NEWLINENEWLINE # Update the referencesNEWLINE self.callback_handler.model = self.modelNEWLINE self.callback_handler.optimizer = self.optimizerNEWLINE self.callback_handler.lr_scheduler = self.lr_schedulerNEWLINE self.callback_handler.train_dataloader = train_dataloaderNEWLINE self.state.trial_name = self.hp_name(trial) if self.hp_name is not None else NoneNEWLINE self.state.trial_params = hp_params(trial) if trial is not None else NoneNEWLINE # This should be the same if the state has been saved but in case the training arguments changed, it's saferNEWLINE # to set this after the load.NEWLINE self.state.max_steps = max_stepsNEWLINE self.state.num_train_epochs = num_train_epochsNEWLINE self.state.is_local_process_zero = self.is_local_process_zero()NEWLINE self.state.is_world_process_zero = self.is_world_process_zero()NEWLINENEWLINE # tr_loss is a tensor to avoid synchronization of TPUs through .item()NEWLINE tr_loss = torch.tensor(0.0).to(self.args.device)NEWLINE # _total_loss_scalar is updated everytime .item() has to be called on tr_loss and stores the sum of all lossesNEWLINE self._total_loss_scalar = 0.0NEWLINE self._globalstep_last_logged = 0NEWLINE self._total_flos = self.state.total_flosNEWLINE model.zero_grad()NEWLINENEWLINE self.control = self.callback_handler.on_train_begin(self.args, self.state, self.control)NEWLINENEWLINE # Skip the first epochs_trained epochs to get the random state of the dataloader at the right point.NEWLINE if not self.args.ignore_data_skip:NEWLINE for epoch in range(epochs_trained):NEWLINE # We just need to begin an iteration to create the randomization of the sampler.NEWLINE for _ in train_dataloader:NEWLINE breakNEWLINENEWLINE # self.adv_args.adversarial_every *= self.args.gradient_accumulation_steps # This was not correct!!!!NEWLINE total_steps_taken = 0NEWLINE total_steps = len(train_dataloader) * self.args.num_train_epochsNEWLINENEWLINE t = self._update_timing(timings, 'setters', t)NEWLINENEWLINE attacker = None if self.adv_args.orig_lambda == 1. else get_attacker(self.max_seq_len, self.args.device,NEWLINE self.args.train_batch_size, self.adv_args, self.tokenizer)NEWLINENEWLINE t = self._update_timing(timings, 'attackers init', t)NEWLINENEWLINE # ------------------------------------------------------------------------------------------------------------------NEWLINE # ----------------------------- actually starting the training (iterating over epochs) -----------------------------NEWLINE # ------------------------------------------------------------------------------------------------------------------NEWLINE for epoch in range(epochs_trained, num_train_epochs):NEWLINE # print(f'entering epoch {epoch}/{epochs_trained}')NEWLINE t = time()NEWLINE if isinstance(train_dataloader, DataLoader) and isinstance(train_dataloader.sampler, DistributedSampler):NEWLINE train_dataloader.sampler.set_epoch(epoch)NEWLINENEWLINE if is_torch_tpu_available():NEWLINE parallel_loader = pl.ParallelLoader(train_dataloader, [self.args.device]).per_device_loader(NEWLINE self.args.deviceNEWLINE )NEWLINE epoch_iterator = parallel_loaderNEWLINE else:NEWLINE epoch_iterator = train_dataloaderNEWLINENEWLINE # Reset the past mems state at the beginning of each epoch if necessary.NEWLINE if self.args.past_index >= 0:NEWLINE self._past = NoneNEWLINENEWLINE steps_in_epoch = len(epoch_iterator) if train_dataset_is_sized else self.args.max_stepsNEWLINE self.control = self.callback_handler.on_epoch_begin(self.args, self.state, self.control)NEWLINENEWLINE adv_batch_counter = -1NEWLINENEWLINE adv_epoch_iterators = NoneNEWLINE if self.adv_args.orig_lambda < 1. and self.adv_args.async_adv_batches:NEWLINE adv_epoch_iterators = [iter(atdl) for atdl in adv_train_dataloaders]NEWLINENEWLINE t = self._update_timing(timings, 'prep epoch', t)NEWLINENEWLINE # *************************************************************************************************NEWLINE # ************************************ Starting training steps ************************************NEWLINE # *************************************************************************************************NEWLINE for step, inputs in enumerate(epoch_iterator):NEWLINE # batch will contain train_batch_size samples (n_gpus*batch_per_gpu)NEWLINENEWLINE # TODO - perhpas the following lines shouold be in the on_step_begins callback?NEWLINE total_steps_taken += 1NEWLINE # print('Starting batch:', torch.cuda.memory_allocated() / (2**20))NEWLINE adv_batch_counter += 1NEWLINENEWLINE # Skip past any already trained steps if resuming trainingNEWLINE if steps_trained_in_current_epoch > 0:NEWLINE steps_trained_in_current_epoch -= 1NEWLINE continueNEWLINENEWLINE if (step + 1) % self.args.gradient_accumulation_steps == 0:NEWLINE self.control = self.callback_handler.on_step_begin(self.args, self.state, self.control)NEWLINENEWLINE if (NEWLINE ((step + 1) % self.args.gradient_accumulation_steps != 0)NEWLINE and self.args.local_rank != -1NEWLINE and _use_ddp_no_syncNEWLINE ):NEWLINE with model.no_sync():NEWLINE # print(f'start training step')NEWLINE # ------------------------>>>>> Perform training step <<<<<<------------------------NEWLINE tr_loss += self.training_step(model, inputs)NEWLINE t = self._update_timing(timings, 'train step', t)NEWLINENEWLINE # print(f'start adv training step')NEWLINE # ------------------------>>>>> Perform Adversarial training step <<<<<<------------------------NEWLINE tr_loss += self.adv_training_step(model, inputs, tr_loss, adv_epoch_iterators, adv_batch_counter,NEWLINE step, attacker, timings)NEWLINE t = self._update_timing(timings, 'adv train step', t)NEWLINENEWLINE else:NEWLINE # print(f'start training step')NEWLINENEWLINE # ------------------------>>>>> Perform training step <<<<<<------------------------NEWLINE tr_loss += self.training_step(model, inputs)NEWLINE t = self._update_timing(timings, 'train step', t)NEWLINENEWLINE # print(f'start adv training step')NEWLINENEWLINE # ------------------------>>>>> Perform Adversarial training step <<<<<<------------------------NEWLINE tr_loss, adv_batch_counter = self.adv_training_step(model, inputs, tr_loss, adv_epoch_iterators, adv_batch_counter,NEWLINE step, attacker, timings)NEWLINE t = self._update_timing(timings, 'adv train step', t)NEWLINENEWLINE # print('out of steps')NEWLINE self._total_flos += self.floating_point_ops(inputs)NEWLINENEWLINE if (step + 1) % self.args.gradient_accumulation_steps == 0 or (NEWLINE # last step in epoch but step is always smaller than gradient_accumulation_stepsNEWLINE steps_in_epoch <= self.args.gradient_accumulation_stepsNEWLINE and (step + 1) == steps_in_epochNEWLINE ):NEWLINE if self.args.fp16 and _use_native_amp:NEWLINE self.scaler.unscale_(self.optimizer)NEWLINE torch.nn.utils.clip_grad_norm_(model.parameters(), self.args.max_grad_norm)NEWLINE elif self.args.fp16 and _use_apex:NEWLINE torch.nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), self.args.max_grad_norm)NEWLINE else:NEWLINE torch.nn.utils.clip_grad_norm_(model.parameters(), self.args.max_grad_norm)NEWLINENEWLINE if is_torch_tpu_available():NEWLINE xm.optimizer_step(self.optimizer)NEWLINE elif self.args.fp16 and _use_native_amp:NEWLINE self.scaler.step(self.optimizer)NEWLINE self.scaler.update()NEWLINE else:NEWLINE self.optimizer.step()NEWLINENEWLINE self.lr_scheduler.step()NEWLINE model.zero_grad()NEWLINE self.state.global_step += 1NEWLINE self.state.epoch = epoch + (step + 1) / steps_in_epochNEWLINE self.control = self.callback_handler.on_step_end(self.args, self.state, self.control)NEWLINENEWLINE self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)NEWLINENEWLINE t = self._update_timing(timings, 'optimize step', t)NEWLINENEWLINE if self.control.should_epoch_stop or self.control.should_training_stop:NEWLINE breakNEWLINENEWLINE self.control = self.callback_handler.on_epoch_end(self.args, self.state, self.control)NEWLINE self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)NEWLINENEWLINE if self.args.tpu_metrics_debug or self.args.debug:NEWLINE if is_torch_tpu_available():NEWLINE # tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)NEWLINE xm.master_print(met.metrics_report())NEWLINE else:NEWLINE logger.warning(NEWLINE "You enabled PyTorch/XLA debug metrics but you don't have a TPU "NEWLINE "configured. Check your training configuration if this is unexpected."NEWLINE )NEWLINE if self.control.should_training_stop:NEWLINE breakNEWLINENEWLINE if self.args.past_index and hasattr(self, "_past"):NEWLINE # Clean the state at the end of trainingNEWLINE delattr(self, "_past")NEWLINENEWLINE logger.info("\n\nTraining completed. Do not forget to share your model on huggingface.co/models =)\n\n")NEWLINE if self.args.load_best_model_at_end and self.state.best_model_checkpoint is not None:NEWLINE logger.info(NEWLINE f"Loading best model from {self.state.best_model_checkpoint} (score: {self.state.best_metric})."NEWLINE )NEWLINE if isinstance(self.model, PreTrainedModel):NEWLINE self.model = self.model.from_pretrained(self.state.best_model_checkpoint)NEWLINE if not self.args.model_parallel:NEWLINE self.model = self.model.to(self.args.device)NEWLINE else:NEWLINE state_dict = torch.load(os.path.join(self.state.best_model_checkpoint, WEIGHTS_NAME))NEWLINE self.model.load_state_dict(state_dict)NEWLINENEWLINE if self._total_flos is not None:NEWLINE self.store_flos()NEWLINE self.log({"total_flos": self.state.total_flos})NEWLINENEWLINE self.control = self.callback_handler.on_train_end(self.args, self.state, self.control)NEWLINE # add remaining tr_lossNEWLINE self._total_loss_scalar += tr_loss.item()NEWLINENEWLINE return TrainOutput(self.state.global_step, self._total_loss_scalar / self.state.global_step)NEWLINENEWLINE def get_adv_train_dataloaders(self):NEWLINE adv_train_dataloaders = NoneNEWLINE if self.adv_args.orig_lambda < 1. and self.adv_args.async_adv_batches:NEWLINE adv_train_dataloaders = [NEWLINE # Though there is no need to encode these batches we iterate over the train_dataset which is already encoded. Also, note thatNEWLINE # there is no explicit see here but the random sampler will render each of the loader with a different permutationNEWLINE DataLoader(self.train_dataset,NEWLINE sampler=self._get_train_sampler(),NEWLINE batch_size=self.args.train_batch_size,NEWLINE collate_fn=self.data_collator,NEWLINE drop_last=self.args.dataloader_drop_last,NEWLINE num_workers=self.args.dataloader_num_workers,NEWLINE )NEWLINE for _ in range(self.adv_args.n_adv_loaders)NEWLINE ]NEWLINE return adv_train_dataloadersNEWLINENEWLINE def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor:NEWLINE """NEWLINE Perform a training step on a batch of inputs.NEWLINENEWLINE Subclass and override to inject custom behavior.NEWLINENEWLINE Args:NEWLINE model (:obj:`nn.Module`):NEWLINE The model to train.NEWLINE inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):NEWLINE The inputs and targets of the model.NEWLINENEWLINE The dictionary will be unpacked before being fed to the model. Most models expect the targets under theNEWLINE argument :obj:`labels`. Check your model's documentation for all accepted arguments.NEWLINENEWLINE Return:NEWLINE :obj:`torch.Tensor`: The tensor with training loss on this batch.NEWLINE """NEWLINENEWLINE model.train()NEWLINE inputs = self._prepare_inputs(inputs)NEWLINENEWLINE if self.args.fp16 and _use_native_amp:NEWLINE with autocast():NEWLINE loss = self.compute_loss(model, inputs)NEWLINE else:NEWLINE loss = self.compute_loss(model, inputs)NEWLINENEWLINE if self.args.n_gpu > 1:NEWLINE loss = loss.mean() # mean() to average on multi-gpu parallel trainingNEWLINENEWLINE loss = self.adv_args.orig_lambda * lossNEWLINENEWLINE if self.args.gradient_accumulation_steps > 1:NEWLINE loss = loss / self.args.gradient_accumulation_stepsNEWLINENEWLINE if self.args.fp16 and _use_native_amp:NEWLINE self.scaler.scale(loss).backward()NEWLINE elif self.args.fp16 and _use_apex:NEWLINE with amp.scale_loss(loss, self.optimizer) as scaled_loss:NEWLINE scaled_loss.backward()NEWLINE else:NEWLINE loss.backward()NEWLINENEWLINE return loss.detach()NEWLINENEWLINE def adv_training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]], tr_loss, adv_epoch_iterators,NEWLINE adv_batch_counter: int, step: int, attacker, timings) -> Tuple[torch.Tensor, int]:NEWLINE # start attacking the inputs (attack all inputs in the batch)NEWLINE if self.adv_args.orig_lambda < 1. and adv_batch_counter % self.adv_args.adversarial_every == 0:NEWLINE # advance adversarial loaders or use current batchNEWLINE # TODO - perhphaps can avoid preparing them on gpuNEWLINE t = time()NEWLINE if not self.adv_args.async_adv_batches:NEWLINE adv_batches = self._prepare_inputs(inputs) # for sync adv loaders we will exploit on each sample which is n_exploits*train_batch_sizeNEWLINE else:NEWLINE # with async loaders, each loader will create train_batch_size batches, so we would end up withNEWLINE # n_exploits*n_loaders*train_batch_size adv batchesNEWLINE adv_batches = [self._prepare_inputs(next(aei)) for aei in adv_epoch_iterators]NEWLINE if len(adv_batches) > 1:NEWLINE adv_batches = {k: torch.cat([b[k] for b in adv_batches]) for k in adv_batches[0].keys()}NEWLINE else:NEWLINE adv_batches = adv_batches[0]NEWLINENEWLINE if (step + 1) % self.args.gradient_accumulation_steps != 0: # not the step that performs the updateNEWLINE adv_batch_counter -= 1 # force next step to be adversarial as wellNEWLINE # this is notNEWLINE model.eval()NEWLINE t = self._update_timing(timings, 'prep adv batch', t)NEWLINE with torch.no_grad():NEWLINE adv_inputs = get_adversarial_inputs(self.adv_args, adv_batches, model, self.tokenizer, attacker, self.args.eval_batch_size,NEWLINE self._prepare_inputs, timings=timings)NEWLINENEWLINE # # -------------------------------------------------------------------------------------------NEWLINE # print('\n' + ('*' * 75))NEWLINE # for sent, mask, lab in zip(adv_inputs['input_ids'].cpu(), adv_inputs['attention_mask'].cpu(), adv_inputs['labels'].cpu()):NEWLINE # # validate:NEWLINE # mask = mask.numpy()NEWLINE # mask_end_ind = mask.sum()NEWLINE # assert np.all(mask[:mask_end_ind] == 1)NEWLINE # assert np.all(sent.numpy()[mask_end_ind:] == 0)NEWLINE # assert sent[0].item() == 101NEWLINE # assert sent[mask_end_ind - 1].item() == 102NEWLINE # print(tokenizer.decode(sent.numpy(), skip_special_tokens=True), '\t', lab.item())NEWLINE # print('*' * 75)NEWLINE # # -------------------------------------------------------------------------------------------NEWLINENEWLINE model.train()NEWLINE t = self._update_timing(timings, 'get adv batch', t)NEWLINENEWLINE # print('Created adv inputs:', torch.cuda.memory_allocated() / (2**20))NEWLINENEWLINE if adv_inputs is not None:NEWLINE # we may have more inputs here than allowed by the train_batch_sizeNEWLINE for mini_batch, weight in split_batch(adv_inputs, self.args.train_batch_size):NEWLINE adv_loss = model(**mini_batch)[0]NEWLINENEWLINE # print('Forwarded it:', torch.cuda.memory_allocated() / (2**20))NEWLINE if self.args.n_gpu > 1:NEWLINE adv_loss = adv_loss.mean() # mean() to average on multi-gpu parallel trainingNEWLINENEWLINE # Note that we already becakpropogagte the orignal loss as well as added it to the tr_loss counter, so here we onlyNEWLINE # need to take care of the adv_lossNEWLINE adv_loss = (1 - self.adv_args.orig_lambda) * adv_loss * weight # the weight is the chunk of the original batch it playsNEWLINENEWLINE if self.args.gradient_accumulation_steps > 1:NEWLINE adv_loss = adv_loss / self.args.gradient_accumulation_stepsNEWLINENEWLINE if self.args.fp16 and _use_native_amp:NEWLINE self.scaler.scale(adv_loss).backward()NEWLINE elif self.args.fp16 and _use_apex:NEWLINE with amp.scale_loss(adv_loss, self.optimizer) as scaled_loss:NEWLINE scaled_loss.backward()NEWLINE else:NEWLINE adv_loss.backward()NEWLINENEWLINE return tr_loss+adv_loss.detach(), adv_batch_counterNEWLINE t = self._update_timing(timings, 'train on adv batch', t)NEWLINENEWLINE else:NEWLINE return tr_loss, adv_batch_counterNEWLINE |
# terrascript/errorcheck/r.pyNEWLINE# Automatically generated by tools/makecode.py ()NEWLINENEWLINEimport warningsNEWLINENEWLINEwarnings.warn(NEWLINE "using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2NEWLINE)NEWLINEimport terrascriptNEWLINENEWLINENEWLINEclass errorcheck_is_valid(terrascript.Resource):NEWLINE passNEWLINE |
from abc import abstractclassmethod;NEWLINENEWLINEfrom vizier.vapi import *NEWLINENEWLINEclass OptimizerClient:NEWLINE def __init__(self):NEWLINE passNEWLINENEWLINE @abstractclassmethodNEWLINE def create_study(self, study: Study, concurrency: int):NEWLINE passNEWLINE NEWLINE @abstractclassmethodNEWLINE def get_suggestions(self, study: Study, concurrency: int) -> 'list[Trial]':NEWLINE passNEWLINE NEWLINE @abstractclassmethodNEWLINE def process_measurement(NEWLINE self,NEWLINE study: Study,NEWLINE measurement: MeasurementNEWLINE ) -> Measurement:NEWLINE passNEWLINENEWLINE @abstractclassmethodNEWLINE def complete_trial(self, study: Study, trial: Trial):NEWLINE passNEWLINENEWLINE @abstractclassmethodNEWLINE def get_best_trial(self, study: Study) -> Trial:NEWLINE pass |
# UniBorg (telegram userbot)NEWLINE# Copyright (C) 2020 The AuthorsNEWLINENEWLINE# This program is free software: you can redistribute it and/or modifyNEWLINE# it under the terms of the GNU Affero General Public License as published byNEWLINE# the Free Software Foundation, either version 3 of the License, orNEWLINE# (at your option) any later version.NEWLINENEWLINE# This program is distributed in the hope that it will be useful,NEWLINE# but WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See theNEWLINE# GNU Affero General Public License for more details.NEWLINENEWLINE# DeezLoader is an attempt to archive songs andNEWLINE# to serve the poor who can't afford legal copy of the songs.NEWLINE# If you are capable of buying andNEWLINE# spending money on songs in legal ways, please do so.NEWLINENEWLINE# The Author(s) of this module are not responsibleNEWLINE# for the usage of this program by other people.NEWLINENEWLINE# The Author(s) of this module do not recommendNEWLINE# doing it illegally or against Deezer's Terms of ServiceNEWLINENEWLINE# You should have received a copy of the GNU Affero General Public LicenseNEWLINE# along with this program. If not, see <https://www.gnu.org/licenses/>.NEWLINENEWLINE# requires: deezloader hachoir PillowNEWLINE# Ported from UniBorg by AnggaR96sNEWLINENEWLINEimport osNEWLINEimport shutilNEWLINEimport timeNEWLINENEWLINEimport deezloaderNEWLINEfrom hachoir.metadata import extractMetadataNEWLINEfrom hachoir.parser import createParserNEWLINEfrom telethon.tl.types import DocumentAttributeAudioNEWLINENEWLINEfrom userbot import DEEZER_ARL_TOKEN, TEMP_DOWNLOAD_DIRECTORYNEWLINEfrom userbot.events import registerNEWLINENEWLINENEWLINE@register(outgoing=True,NEWLINE pattern=r"^\.deez (.+?|) (FLAC|MP3\_320|MP3\_256|MP3\_128)")NEWLINEasync def _(event):NEWLINE """DeezLoader by @An0nimiaNEWLINE Ported for UniBorg by @SpEcHlDe"""NEWLINE if event.fwd_from:NEWLINE returnNEWLINENEWLINE strings = {NEWLINE "name": "DeezLoad",NEWLINE "arl_token_cfg_doc": "ARL Token for Deezer",NEWLINE "invalid_arl_token": "please set the required variables for this module",NEWLINE "wrong_cmd_syntax": "bruh, now i think how far should we go. please terminate my Session.",NEWLINE "server_error": "We're experiencing technical difficulties.",NEWLINE "processing": "`Downloading...`",NEWLINE "uploading": "`Uploading...`",NEWLINE }NEWLINENEWLINE ARL_TOKEN = DEEZER_ARL_TOKENNEWLINENEWLINE if ARL_TOKEN is None:NEWLINE await event.edit(strings["invalid_arl_token"])NEWLINE returnNEWLINENEWLINE try:NEWLINE loader = deezloader.Login(ARL_TOKEN)NEWLINE except Exception as er:NEWLINE await event.edit(str(er))NEWLINE returnNEWLINENEWLINE temp_dl_path = os.path.join(TEMP_DOWNLOAD_DIRECTORY, str(time.time()))NEWLINE if not os.path.exists(temp_dl_path):NEWLINE os.makedirs(temp_dl_path)NEWLINENEWLINE required_link = event.pattern_match.group(1)NEWLINE required_qty = event.pattern_match.group(2)NEWLINENEWLINE await event.edit(strings["processing"])NEWLINENEWLINE if "spotify" in required_link:NEWLINE if "track" in required_link:NEWLINE required_track = loader.download_trackspo(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE elif "album" in required_link:NEWLINE reqd_albums = loader.download_albumspo(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE zips=False,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE for required_track in reqd_albums:NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE elif "deezer" in required_link:NEWLINE if "track" in required_link:NEWLINE required_track = loader.download_trackdee(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE elif "album" in required_link:NEWLINE reqd_albums = loader.download_albumdee(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE zips=False,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE for required_track in reqd_albums:NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE else:NEWLINE await event.edit(strings["wrong_cmd_syntax"])NEWLINENEWLINENEWLINEasync def upload_track(track_location, message):NEWLINE metadata = extractMetadata(createParser(track_location))NEWLINE duration = 0NEWLINE title = ""NEWLINE performer = ""NEWLINE if metadata.has("duration"):NEWLINE duration = metadata.get("duration").secondsNEWLINE if metadata.has("title"):NEWLINE title = metadata.get("title")NEWLINE if metadata.has("artist"):NEWLINE performer = metadata.get("artist")NEWLINE document_attributes = [NEWLINE DocumentAttributeAudio(NEWLINE duration=duration,NEWLINE voice=False,NEWLINE title=title,NEWLINE performer=performer,NEWLINE waveform=None,NEWLINE )NEWLINE ]NEWLINE supports_streaming = TrueNEWLINE force_document = FalseNEWLINE caption_rts = os.path.basename(track_location)NEWLINE await message.client.send_file(NEWLINE message.chat_id,NEWLINE track_location,NEWLINE caption=caption_rts,NEWLINE force_document=force_document,NEWLINE supports_streaming=supports_streaming,NEWLINE allow_cache=False,NEWLINE attributes=document_attributes,NEWLINE )NEWLINE os.remove(track_location)NEWLINE |
import botometer, constantsNEWLINEfrom botometer import NoTimelineErrorNEWLINEfrom requests import ConnectionError, HTTPError, TimeoutNEWLINEfrom urllib3.exceptions import ReadTimeoutError, ProtocolError, SSLErrorNEWLINEimport tweepyNEWLINEimport sysNEWLINEimport osNEWLINEimport globNEWLINEimport csvNEWLINEimport pandas as pdNEWLINEimport smtplibNEWLINEimport randomNEWLINEimport timeNEWLINENEWLINENEWLINEclass BotometerClient:NEWLINENEWLINE def __init__(self, filename):NEWLINENEWLINE self.bot_meter = botometer.Botometer(wait_on_ratelimit=True,NEWLINE mashape_key=constants.mashape_key,NEWLINE **constants.botometer_auth)NEWLINENEWLINE self.master_file_name = 'MasterIDs.csv'NEWLINE # Store all the ids we get an error on so they aren't checked againNEWLINE self.error_ids_file_name = 'ErrorIDs.csv'NEWLINE self.unique_ids_file_name = 'UniqueIDs.csv'NEWLINENEWLINE # Time so we can take how long it takes to scrape all these idsNEWLINE self.start_time = time.time()NEWLINE self.streaming_file_name = filenameNEWLINE self.create_master_file()NEWLINE self.create_error_file()NEWLINE self.tweepy_api = constants.apiNEWLINENEWLINE self.error_df = BotometerClient.load_error_ids_df(self.error_ids_file_name)NEWLINE self.master_df = BotometerClient.load_master_ids_df(self.master_file_name)NEWLINE self.df = self.get_all_ids()NEWLINENEWLINE def start_bot_collection(self):NEWLINE # Get botometer scores for every id in the streamNEWLINE print('Starting Client....')NEWLINE number_of_accounts_to_check = len(self.df)NEWLINENEWLINE self.df.reset_index(drop=True, inplace=True)NEWLINENEWLINE for index, row in self.df.iterrows():NEWLINE if index % 10 == 0:NEWLINE print('On index: ', index, ' out of ', number_of_accounts_to_check)NEWLINENEWLINE tweet_text = row['status_text']NEWLINE tweet_time = row['status_created_at']NEWLINE user_id = row['user_id']NEWLINE tweet_count = row['stream_tweet_count']NEWLINENEWLINE try:NEWLINE result, payload = self.bot_meter.check_account(user_id,NEWLINE full_user_object=True,NEWLINE return_user_data=True)NEWLINE cap = result['cap']['universal']NEWLINE bot_score = result['display_scores']['universal']NEWLINE print('cap: ', cap)NEWLINE print('\n')NEWLINE # print('bot score: ', bot_score)NEWLINENEWLINE if cap > 0.70:NEWLINE self.send_tweet(payload['user']['screen_name'], cap)NEWLINENEWLINE # Save to Master, Mentions, and TimelineNEWLINE self.save_to_master(user_id, bot_score, cap, tweet_count,NEWLINE tweet_time, tweet_text, payload['user'])NEWLINENEWLINE except tweepy.TweepError as exc:NEWLINE # Save this user_id so we don't check it againNEWLINE self.save_to_error_ids(user_id)NEWLINE print('Error encountered for ', user_id)NEWLINE print('Error response: ', exc.response)NEWLINE print('Error reason: ', exc.reason)NEWLINE print('Error api code: ', exc.api_code)NEWLINE print('\n')NEWLINENEWLINE except NoTimelineError as err:NEWLINE self.save_to_error_ids(user_id)NEWLINE print('No Timeline error caught: ', err)NEWLINE print('\n')NEWLINENEWLINE except (ConnectionError, HTTPError, Timeout, ReadTimeoutError, ProtocolError, SSLError) as exc:NEWLINE print("New exception: ", exc)NEWLINE # print(exc.reason)NEWLINE time.sleep(120)NEWLINENEWLINE print('\n\n$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$')NEWLINE print('Finished! :)')NEWLINE time_diff = int(time.time() - self.start_time)NEWLINE num_ids = str(len(self.df))NEWLINE print('It look {:02d}:{:02d}:{:02d} time to collect ' + num_ids + ' bot scores!'.format(time_diff // 3600, (time_diff % 3600 // 60), time_diff % 60))NEWLINE BotometerClient.send_notification_email()NEWLINE returnNEWLINENEWLINE def send_tweet(self, user, cap):NEWLINENEWLINE low_start_options = ['Beep, Beep, I think I found another bot... {0}'.format(user),NEWLINE 'R2 says {0}'.format(user),NEWLINE 'It looks like {0}'.format(user),NEWLINE 'I\'ve calculated that {}'.format(user)NEWLINE ]NEWLINENEWLINE high_start_options = ['I spy a bot... {0}'.format(user),NEWLINE 'Danger Will Robinson I\'ve found another political bot {0}'.format(user),NEWLINE 'Robot in disguise {0}'.format(user),NEWLINE 'Looks like {0} is breaking the first law: A robot may not injure a human being or, 'NEWLINE 'through inaction, allow a human being to come to harm. {1}'.format(user, user),NEWLINE 'I guess {0} doesn\'t know the Zeroth Law: A robot may not harm humanity, or, by 'NEWLINE 'inaction, allow humanity to come to harm. {1}'.format(user, user)NEWLINE ]NEWLINENEWLINE cap *= 100NEWLINE cap = round(cap, 2)NEWLINENEWLINE if cap < 90:NEWLINE start = random.choice(low_start_options)NEWLINE ending = ' has a botometer score of {0}%, suggesting it could be a bot or bot assisted. ' \NEWLINE '#politicalbots'.format(cap)NEWLINE else:NEWLINE start = random.choice(high_start_options)NEWLINE ending = ' has a botometer score of {0}%, suggesting it is probably a bot. #politicalbots'.format(cap)NEWLINENEWLINE tweet_text = "{0}{1}".format(start, ending)NEWLINE self.tweepy_api.update_status(tweet_text)NEWLINENEWLINE returnNEWLINENEWLINE def save_to_error_ids(self, user_id):NEWLINE error_ids_file = open(self.error_ids_file_name, 'a')NEWLINE error_writer = csv.writer(error_ids_file)NEWLINENEWLINE try:NEWLINE error_writer.writerow([user_id])NEWLINENEWLINE except Exception as exc:NEWLINE print(exc)NEWLINE passNEWLINENEWLINE error_ids_file.close()NEWLINE returnNEWLINENEWLINE def save_to_master(self, user_id, bot_score, cap, tweet_count, tweet_time, tweet_text, user_dict):NEWLINE # Open the csv file created previouslyNEWLINE master_file = open(self.master_file_name, 'a')NEWLINENEWLINE # Create a csv writerNEWLINE master_writer = csv.writer(master_file)NEWLINENEWLINE try:NEWLINE master_writer.writerow([user_id,NEWLINE bot_score,NEWLINE cap,NEWLINE tweet_count,NEWLINE tweet_time,NEWLINE tweet_text,NEWLINE user_dict['favourites_count'],NEWLINE user_dict['statuses_count'],NEWLINE user_dict['description'],NEWLINE user_dict['location'],NEWLINE user_dict['created_at'],NEWLINE user_dict['verified'],NEWLINE user_dict['following'],NEWLINE user_dict['url'],NEWLINE user_dict['listed_count'],NEWLINE user_dict['followers_count'],NEWLINE user_dict['default_profile_image'],NEWLINE user_dict['utc_offset'],NEWLINE user_dict['friends_count'],NEWLINE user_dict['default_profile'],NEWLINE user_dict['name'],NEWLINE user_dict['lang'],NEWLINE user_dict['screen_name'],NEWLINE user_dict['geo_enabled'],NEWLINE user_dict['profile_background_color'],NEWLINE user_dict['profile_image_url'],NEWLINE user_dict['time_zone'],NEWLINE user_dict['listed_count']NEWLINE ])NEWLINENEWLINE except Exception as exc:NEWLINE print(exc)NEWLINE passNEWLINENEWLINE # Close the csv fileNEWLINE master_file.close()NEWLINE returnNEWLINENEWLINE def create_error_file(self):NEWLINE if os.path.isfile(self.error_ids_file_name):NEWLINE print("Error file found")NEWLINE returnNEWLINENEWLINE else:NEWLINE error_file = open(self.error_ids_file_name, 'w')NEWLINENEWLINE try:NEWLINE writer = csv.writer(error_file)NEWLINE writer.writerow(['user_id'])NEWLINENEWLINE except Exception as exc:NEWLINE print(exc)NEWLINE passNEWLINENEWLINE error_file.close()NEWLINE returnNEWLINENEWLINE def create_master_file(self):NEWLINE if os.path.isfile(self.master_file_name):NEWLINE print('Master ID file found')NEWLINE returnNEWLINENEWLINE else:NEWLINE print('Creating master ID file...')NEWLINE csv_file = open(self.master_file_name, "w")NEWLINENEWLINE try:NEWLINE writer = csv.writer(csv_file)NEWLINENEWLINE writer.writerow(['user_id',NEWLINE 'bot_score',NEWLINE 'cap',NEWLINE 'tweet_count',NEWLINE 'tweet_time',NEWLINE 'tweet_text',NEWLINE 'user_favourites_count',NEWLINE 'user_statuses_count',NEWLINE 'user_description',NEWLINE 'user_location',NEWLINE 'user_created_at',NEWLINE 'user_verified',NEWLINE 'user_following',NEWLINE 'user_url',NEWLINE 'user_listed_count',NEWLINE 'user_followers_count',NEWLINE 'user_default_profile_image',NEWLINE 'user_utc_offset',NEWLINE 'user_friends_count',NEWLINE 'user_default_profile',NEWLINE 'user_name',NEWLINE 'user_lang',NEWLINE 'user_screen_name',NEWLINE 'user_geo_enabled',NEWLINE 'user_profile_background_color',NEWLINE 'user_profile_image_url',NEWLINE 'user_time_zone',NEWLINE 'user_listed_count'NEWLINE ])NEWLINENEWLINE except Exception as exc:NEWLINE print('Error writing to csv: ', exc)NEWLINENEWLINE returnNEWLINENEWLINE def get_all_ids(self):NEWLINE if self.streaming_file_name is None or self.master_df is None or self.error_df is None:NEWLINE print("Streaming file name, master_df or error_df is NONE!")NEWLINE returnNEWLINENEWLINE # Load streamingData from csvNEWLINE path = os.path.dirname(os.path.abspath(__file__)) + '/' + self.streaming_file_nameNEWLINENEWLINE df = pd.read_csv(path, header=0, low_memory=False, error_bad_lines=False, lineterminator='\n')NEWLINENEWLINE # Calculate the tweet count for each user idNEWLINE df['stream_tweet_count'] = df.groupby('user_id')['user_id'].transform('count')NEWLINENEWLINE # Drop all the columns we don't care aboutNEWLINE column_list = ['status_text', 'status_created_at', 'user_id', 'stream_tweet_count']NEWLINE df = df[column_list]NEWLINE original_size = len(df)NEWLINENEWLINE # Drop duplicate ids since we only need to get the user data onceNEWLINE df = df.drop_duplicates('user_id', keep='last')NEWLINE unique_size = len(df)NEWLINE print('Out of ', original_size, ' tweets there were ', (original_size - unique_size), ' duplicate ID\'s')NEWLINENEWLINE # Drop all ids that are already in master_dfNEWLINE master_id_list = self.master_df.user_id.tolist()NEWLINE df = df[~df.user_id.isin(master_id_list)]NEWLINENEWLINE print('Out of ', unique_size, ' there were ', (unique_size - len(df)), ' ids that already have scores')NEWLINENEWLINE print('Error DF cols: ', list(self.error_df.columns.values))NEWLINENEWLINE error_id_list = self.error_df['user_id\r'].tolist()NEWLINE df = df[~df.user_id.isin(error_id_list)]NEWLINENEWLINE print('After removing error ids we have ', len(df), ' ids to check!')NEWLINENEWLINE # Drop any rows that are missing the required columnsNEWLINE size_before_drop = len(df)NEWLINE df.dropna(subset=['status_text', 'status_created_at', 'user_id', 'stream_tweet_count'])NEWLINENEWLINE print('Dropped', (size_before_drop - len(df)), 'rows with missing data!')NEWLINE print('Collecting bot scores for ', len(df), ' new ids')NEWLINENEWLINE return dfNEWLINENEWLINE ###########################NEWLINE # Start of static methods #NEWLINE ###########################NEWLINENEWLINE @staticmethodNEWLINE def get_user_data_as_dict(df):NEWLINE # print(df)NEWLINE user_dict = {'favourites_count': df.iloc[0]['user_favourites_count'],NEWLINE 'statuses_count': df.iloc[0]['user_statuses_count'],NEWLINE 'description': df.iloc[0]['user_description'],NEWLINE 'location': df.iloc[0]['user_location'],NEWLINE 'created_at': df.iloc[0]['user_created_at'],NEWLINE 'verified': df.iloc[0]['user_verified'],NEWLINE 'following': df.iloc[0]['user_following'],NEWLINE 'url': df.iloc[0]['user_url'],NEWLINE 'listed_count': df.iloc[0]['user_listed_count'],NEWLINE 'followers_count': df.iloc[0]['user_followers_count'],NEWLINE 'default_profile_image': df.iloc[0]['user_default_profile_image'],NEWLINE 'utc_offset': df.iloc[0]['user_utc_offset'],NEWLINE 'friends_count': df.iloc[0]['user_friends_count'],NEWLINE 'default_profile': df.iloc[0]['user_default_profile'],NEWLINE 'name': df.iloc[0]['user_name'],NEWLINE 'lang': df.iloc[0]['user_lang'],NEWLINE 'screen_name': df.iloc[0]['user_screen_name'],NEWLINE 'geo_enabled': df.iloc[0]['user_geo_enabled'],NEWLINE 'profile_background_color': df.iloc[0]['user_profile_background_color'],NEWLINE 'profile_image_url': df.iloc[0]['user_profile_image_url'],NEWLINE 'time_zone': df.iloc[0]['user_time_zone']}NEWLINENEWLINE return user_dictNEWLINENEWLINE #####################################NEWLINE # Load Data from Streaming CSV File #NEWLINE #####################################NEWLINE @staticmethodNEWLINE def load_master_ids_df(master_file_name):NEWLINE # Read in MasterIDs and remove any values in there from our data frameNEWLINE path = os.path.dirname(os.path.abspath(__file__)) + '/' + master_file_nameNEWLINE master_df = pd.read_csv(path, header=0, low_memory=False, error_bad_lines=False, lineterminator='\n')NEWLINENEWLINE return master_dfNEWLINENEWLINE @staticmethodNEWLINE def load_error_ids_df(error_ids_file_name):NEWLINE # Read in Error IDs and remove any values already createdNEWLINE path = os.path.dirname(os.path.abspath(__file__)) + '/' + error_ids_file_nameNEWLINE error_df = pd.read_csv(path, header=0, low_memory=False, error_bad_lines=False, lineterminator='\n')NEWLINENEWLINE return error_dfNEWLINENEWLINE #################################NEWLINE # One function to rule them all #NEWLINE #################################NEWLINE @staticmethodNEWLINE def start_mining(file_name):NEWLINE print('\nStarting Botometer mining...')NEWLINENEWLINE # Check if the desired csv file existsNEWLINE if os.path.isfile(file_name):NEWLINE print('\nStreaming data found')NEWLINE client = BotometerClient(file_name)NEWLINENEWLINE # Start it upNEWLINE client.start_bot_collection()NEWLINENEWLINE else:NEWLINE print('Error: requested csv file does not exist!')NEWLINE returnNEWLINENEWLINE @staticmethodNEWLINE def show_csv_files():NEWLINE print("\nI found the following csv files...")NEWLINENEWLINE path = os.path.dirname(os.path.abspath(__file__))NEWLINE extension = 'csv'NEWLINE os.chdir(path)NEWLINE results = [i for i in glob.glob('*.{}'.format(extension))]NEWLINE results.sort()NEWLINENEWLINE for result in results:NEWLINE print(result)NEWLINENEWLINE returnNEWLINENEWLINE ###################NEWLINE # Parsing Methods #NEWLINE ###################NEWLINENEWLINE @staticmethodNEWLINE def parse_entities(entities):NEWLINE hashtag_key = 'hashtags'NEWLINE mentions_key = 'user_mentions'NEWLINE url_key = 'urls'NEWLINENEWLINE if hashtag_key in entities:NEWLINE hashtag_dict = entities[hashtag_key]NEWLINE hashtag_text = BotometerClient.parse_hashtags(hashtag_dict)NEWLINE else:NEWLINE hashtag_text = ''NEWLINENEWLINE if mentions_key in entities:NEWLINE mentions_dict = entities[mentions_key]NEWLINE mentions_text = BotometerClient.parse_mentions(mentions_dict)NEWLINE else:NEWLINE mentions_text = ''NEWLINENEWLINE if url_key in entities:NEWLINE url_dict = entities[url_key]NEWLINE url_text = BotometerClient.parse_urls(url_dict)NEWLINE else:NEWLINE url_text = ''NEWLINENEWLINE return hashtag_text, mentions_text, url_textNEWLINENEWLINE @staticmethodNEWLINE def parse_hashtags(hashtag_dict):NEWLINE hashtag_text = ''NEWLINE for dictionary in hashtag_dict:NEWLINE if 'text' in dictionary:NEWLINE if hashtag_text != '':NEWLINE hashtag_text += ' ' + dictionary['text']NEWLINE else:NEWLINE hashtag_text += dictionary['text']NEWLINENEWLINE return hashtag_textNEWLINENEWLINE @staticmethodNEWLINE def parse_mentions(mentions_dict):NEWLINE mentions_text = ''NEWLINE for dictionary in mentions_dict:NEWLINE if 'id_str' in dictionary:NEWLINE if mentions_text != '':NEWLINE mentions_text += ' ' + dictionary['id_str']NEWLINE else:NEWLINE mentions_text += dictionary['id_str']NEWLINENEWLINE return mentions_textNEWLINENEWLINE @staticmethodNEWLINE def parse_urls(url_dict):NEWLINE url_text = ''NEWLINE for dictionary in url_dict:NEWLINE if 'url' in dictionary:NEWLINE if url_text != '':NEWLINE url_text += ' ' + dictionary['url']NEWLINE else:NEWLINE url_text += dictionary['url']NEWLINENEWLINE return url_textNEWLINENEWLINE ######################NEWLINE # Email Notification #NEWLINE ######################NEWLINE @staticmethodNEWLINE def send_notification_email():NEWLINE # Email myself when the script finishes so I can start on the next set of dataNEWLINE server = smtplib.SMTP('smtp.gmail.com', 587)NEWLINE server.starttls()NEWLINE server.login(constants.email_address, constants.password)NEWLINENEWLINE subject = 'Botometer Script'NEWLINE text = 'Botometer Script Finished!'NEWLINE message = 'Subject: {}\n\n{}'.format(subject, text)NEWLINE server.sendmail(constants.email_address, constants.real_email, message)NEWLINE server.quit()NEWLINENEWLINE returnNEWLINENEWLINENEWLINElength = len(sys.argv)NEWLINEif length == 1:NEWLINE print('Error: please provide csv file name or type \'showCSVs\' to see the available files or type help for 'NEWLINE 'more information')NEWLINEelif length == 2:NEWLINE arg = sys.argv[1]NEWLINE if arg == 'showCSVs':NEWLINE BotometerClient.show_csv_files()NEWLINE elif arg == 'help':NEWLINE print('Type showCSVs to see a list of the csv files in this directory that can be passed as a parameter')NEWLINE print('Sample call: python3 start_botometer.py StreamData-#maga-#qanon-#roseanne-20180531-105244.csv')NEWLINE else:NEWLINE try:NEWLINE BotometerClient.start_mining(arg)NEWLINE except Exception as e:NEWLINE print('outer exception', e)NEWLINE # print(e.__cause__)NEWLINE print('Botometer exception caught')NEWLINE BotometerClient.send_notification_email()NEWLINENEWLINE |
QUERY = """NEWLINEquery searchByQuery($query:SearchQueryJson!$testListings:Boolean!$smartHide:Boolean$recentHides:[ListingId!])@debug(testListings:$testListings){rentSearch(query:$query smartHide:$smartHide recentHides:$recentHides){...RentResultsMetaData resolvedQuery{...SearchMetadata ...ResultsHeading ...SeoFooterLinks ...SearchResultsBreadcrumb __typename}marketInsights{...ResultsMarketInsightsData __typename}exclusiveShowcase{...RentExclusiveShowcaseData __typename}results{...ResultsSummary ...ResultsPagination ...RentResultsSet ...SearchResultsTotalCount exact{totalCount items{listing{...on RentResidentialListing{id productDepth __typename}...PropertyCard ...RentDetailsAboveTheFold __typename}__typename}__typename}surrounding{items{listing{...on RentResidentialListing{id productDepth __typename}...PropertyCard ...RentDetailsAboveTheFold __typename}__typename}__typename}trackingData totalResultsCount __typename}consumerContext{loggedInStatus __typename}__typename}}fragment RentResultsMetaData on RentResolvedSearch{resolvedQuery{localities{display __typename}__typename}results{__typename totalResultsCount pagination{moreResultsAvailable __typename}exact{items{listing{__typename ...on RentResidentialListing{inspections{startTime endTime __typename}_links{canonical{href __typename}__typename}__typename}...ResidentialListingAddressMetaData}__typename}__typename}}__typename}fragment ResidentialListingAddressMetaData on ResidentialListing{address{display{shortAddress fullAddress __typename}suburb state postcode __typename}__typename}fragment SearchMetadata on ResolvedQuery{metadata{canonicalSearchId savedSearchQuery __typename}__typename}fragment ResultsHeading on ResolvedQuery{localities{display __typename}__typename}fragment SeoFooterLinks on ResolvedQuery{localities{display atlasId urlValue precision name __typename}__typename}fragment SearchResultsBreadcrumb on ResolvedQuery{localities{atlasId display name urlValue precision state parents{display name urlValue precision __typename}__typename}__typename}fragment ResultsMarketInsightsData on MarketInsights{title suburbProfileUrl{href __typename}__typename}fragment RentExclusiveShowcaseData on ExclusiveShowcase{...CommonExclusiveShowcaseData listings{...on RentResidentialListing{inspections{display{shortLabel __typename}__typename}__typename}__typename}__typename}fragment CommonExclusiveShowcaseData on ExclusiveShowcase{listings{title id listingCompany{id name media{logo{templatedUrl __typename}__typename}branding{primaryColour textColour __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}__typename}address{suburb display{shortAddress __typename}__typename}listers{name photo{templatedUrl __typename}__typename}_links{trackedCanonical{path __typename}__typename}...PrimaryFeatures __typename}__typename}fragment PrimaryFeatures on ResidentialListing{...GeneralFeatures ...PropertySize __typename}fragment GeneralFeatures on ResidentialListing{generalFeatures{bedrooms{value __typename}bathrooms{value __typename}parkingSpaces{value __typename}__typename}__typename}fragment PropertySize on ResidentialListing{propertySizes{building{displayValue sizeUnit{displayValue __typename}__typename}land{displayValue sizeUnit{displayValue __typename}__typename}preferred{sizeType size{displayValue sizeUnit{displayValue __typename}__typename}__typename}__typename}__typename}fragment ResultsSummary on SearchResults{totalResultsCount pagination{page pageSize __typename}__typename}fragment ResultsPagination on SearchResults{pagination{maxPageNumberAvailable __typename}__typename}fragment RentResultsSet on RentSearchResults{exact{items{listing{__typename}__typename}__typename}surrounding{totalCount items{listing{__typename}__typename}__typename}pagination{page __typename}__typename}fragment SearchResultsTotalCount on SearchResults{totalResultsCount __typename}fragment PropertyCard on Listing{__typename ...ResidentialPropertyCard ...ProjectProfile}fragment ResidentialPropertyCard on ResidentialListing{...PropertyCardLayout ...BrandingOnSearchResultsConfig ...BrandingResidential badge{...Badge __typename}...ResidentialListingCardHero ...Price ...ResidentialListingCardAddress ...PropertyCardPropertyType ...PropertyCardDetailsLink ...PropertyCardAgentInfo ...ResidentialLaunchButtons ...ResidentialMediaViewerForResults ...ResidentialListingBookmark ...PrimaryFeatures ...PropertySize ...ResidentialListingCardInspection ...InspectionAuction ...DateSold ...ResidentialListingMoreButton ...ResidentialShareListing __typename}fragment PropertyCardLayout on ResidentialListing{productDepth __typename}fragment BrandingOnSearchResultsConfig on ResidentialListing{viewConfiguration{searchResults{agencyBranding __typename}__typename}productDepth __typename}fragment BrandingResidential on ResidentialListing{listingCompany{...Branding __typename}__typename}fragment Branding on ListingCompany{id name branding{primaryColour __typename}media{logo{templatedUrl __typename}__typename}__typename}fragment Badge on ListingBadge{colour label __typename}fragment ResidentialListingCardHero on ResidentialListing{...PowerProfileSlide productDepth address{display{fullAddress __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}floorplans{templatedUrl __typename}__typename}__typename}fragment PowerProfileSlide on ResidentialListing{media{mainImage{templatedUrl __typename}__typename}_links{canonical{path __typename}__typename}listingCompany{name media{logo{templatedUrl __typename}__typename}branding{primaryColour __typename}_links{canonical{href __typename}__typename}__typename}listers{id agentId name jobTitle photo{templatedUrl __typename}_links{canonical{href __typename}__typename}showInMediaViewer __typename}__typename}fragment Price on ResidentialListing{price{display __typename}__typename}fragment ResidentialListingCardAddress on ResidentialListing{address{suburb display{shortAddress __typename}__typename}__typename}fragment PropertyCardPropertyType on ResidentialListing{propertyType{display __typename}__typename}fragment PropertyCardDetailsLink on ResidentialListing{_links{canonical{path __typename}__typename}__typename}fragment PropertyCardAgentInfo on ResidentialListing{viewConfiguration{searchResults{agentPhoto agentName __typename}__typename}listers{name photo{templatedUrl __typename}__typename}listingCompany{branding{textColour __typename}__typename}__typename}fragment ResidentialLaunchButtons on ResidentialListing{media{threeDimensionalTours{href __typename}videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment ResidentialMediaViewerForResults on ResidentialListing{...ResultsAdConfiguration ...ResidentialSlides __typename}fragment ResultsAdConfiguration on ResidentialListing{viewConfiguration{searchResults{adverts{photoGallery __typename}__typename}__typename}__typename}fragment ResidentialSlides on ResidentialListing{...PowerProfileSlide ...MediaViewerEventTracking ...ThreeDimensionalTourSlide ...VideoSlide ...PhotoOverlayWithGallerySlide media{images{templatedUrl __typename}floorplans{templatedUrl __typename}__typename}__typename}fragment MediaViewerEventTracking on ResidentialListing{listers{id agentId __typename}__typename}fragment ThreeDimensionalTourSlide on ResidentialListing{media{threeDimensionalTours{href __typename}__typename}__typename}fragment VideoSlide on ResidentialListing{media{videos{...on YouTubeVideo{__typename id}__typename}__typename}__typename}fragment PhotoOverlayWithGallerySlide on ResidentialListing{...BuilderProfile ...ParentAndSiblings __typename}fragment BuilderProfile on ResidentialListing{media{mainImage{templatedUrl __typename}__typename}listingCompany{...on Builder{name _links{canonical{templated href __typename}__typename}homeDesigns{totalCount designs{name price houseSizeRange{min{displayValue value __typename}max{displayValue value __typename}__typename}generalFeaturesDisplay{bedrooms bathrooms parkingSpaces __typename}_links{canonical{href templated __typename}__typename}media{mainImage{templatedUrl __typename}__typename}__typename}__typename}__typename}__typename}__typename}fragment ParentAndSiblings on BuyResidentialListing{id media{mainImage{templatedUrl __typename}__typename}parent{name _links{canonical{path __typename}__typename}childListings{totalCount results{id media{mainImage{templatedUrl __typename}__typename}title price{display __typename}propertyType{display __typename}_links{canonical{path __typename}__typename}propertySizes{land{displayValue sizeUnit{id displayValue __typename}__typename}__typename}...PrimaryFeatures __typename}__typename}__typename}__typename}fragment ResidentialListingBookmark on ResidentialListing{id __typename}fragment ResidentialListingCardInspection on ResidentialListing{...on BuyResidentialListing{inspections{display{shortLabel longLabel __typename}__typename}__typename}...on RentResidentialListing{inspections{display{shortLabel longLabel __typename}__typename}__typename}__typename}fragment InspectionAuction on ResidentialListing{...PropertyCardAuctionDate ...ResidentialListingCardInspection __typename}fragment PropertyCardAuctionDate on BuyResidentialListing{auction{dateTime{display{shortDate __typename}__typename}__typename}__typename}fragment DateSold on ResidentialListing{...on SoldResidentialListing{dateSold{display __typename}__typename}__typename}fragment ResidentialListingMoreButton on ResidentialListing{id __typename}fragment ResidentialShareListing on ResidentialListing{_links{canonical{href __typename}__typename}address{display{fullAddress __typename}__typename}__typename}fragment ProjectProfile on ProjectProfile{badge{...Badge __typename}...ProjectProfileCardParentListing ...ProjectProfileCardAddress ...ProjectProfileCardHero ...ProjectProfileAgency ...ProjectProfileBranding ...ProjectProfileBookmark ...PropertyCardChildListings ...ProjectLaunchButtons ...ProjectProfileNextOpenTime __typename}fragment ProjectProfileCardParentListing on ProjectProfile{name title productDepth _links{canonical{path __typename}__typename}__typename}fragment ProjectProfileCardAddress on ProjectProfile{address{suburb display{shortAddress __typename}__typename}__typename}fragment ProjectProfileCardHero on ProjectProfile{productDepth address{display{fullAddress __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}__typename}__typename}fragment ProjectProfileAgency on ProjectProfile{listingCompany{id name media{logo{templatedUrl __typename}__typename}__typename}viewConfiguration{searchResults{agencyBranding __typename}__typename}__typename}fragment ProjectProfileBranding on ProjectProfile{name productDepth media{logo{templatedUrl __typename}__typename}branding{primaryColour __typename}__typename}fragment ProjectProfileBookmark on ProjectProfile{id __typename}fragment PropertyCardChildListings on ProjectProfile{productDepth _links{canonical{path __typename}__typename}childListings{totalCount results{id price{display __typename}media{mainImage{templatedUrl __typename}__typename}address{display{fullAddress __typename}__typename}title _links{canonical{path __typename}__typename}...PrimaryFeatures __typename}__typename}__typename}fragment ProjectLaunchButtons on ProjectProfile{media{videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment ProjectProfileNextOpenTime on ProjectProfile{displayLocation{nextAvailableOpeningHours{nextAvailable{display{shortLabel longLabel __typename}__typename}__typename}__typename}__typename}fragment RentDetailsAboveTheFold on RentResidentialListing{aboveTheFoldId:id id badge{...Badge __typename}...Hero ...Price ...Address ...ResidentialShareListing ...Breadcrumb_ResidentialListing ...PrimaryFeatures ...PropertyCardPropertyType ...PropertyInfoPosterBoard ...InspectionsSummaryForRent ...Bond ...DateAvailableSummary ...BrandingOnContactAgentPanelConfig ...ResidentialContactAgentBranding ...AgentInfo ...AgencyInfo ...HeaderLeaderboard ...ListingCompanyHeaderBranding ...RentResidentialListingMetaData __typename}fragment Hero on ResidentialListing{...HeroImage ...ResidentialMediaTypeBar __typename}fragment HeroImage on ResidentialListing{address{display{fullAddress __typename}__typename}viewConfiguration{details{posterBoard __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}floorplans{templatedUrl __typename}threeDimensionalTours{href __typename}videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment ResidentialMediaTypeBar on ResidentialListing{media{images{templatedUrl __typename}floorplans{templatedUrl __typename}threeDimensionalTours{href __typename}videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment Address on ResidentialListing{address{suburb postcode state display{shortAddress __typename}__typename}__typename}fragment Breadcrumb_ResidentialListing on ResidentialListing{__typename id address{suburb state postcode display{shortAddress __typename}__typename}propertyType{id display __typename}_links{canonical{path __typename}__typename}}fragment PropertyInfoPosterBoard on ResidentialListing{viewConfiguration{details{posterBoard __typename}__typename}__typename}fragment InspectionsSummaryForRent on RentResidentialListing{inspections{display{longLabel __typename}__typename}__typename}fragment Bond on RentResidentialListing{bond{display __typename}__typename}fragment DateAvailableSummary on RentResidentialListing{availableDate{display __typename}__typename}fragment BrandingOnContactAgentPanelConfig on ResidentialListing{viewConfiguration{details{agencyBrandingOnSidePanel __typename}__typename}__typename}fragment ResidentialContactAgentBranding on ResidentialListing{productDepth listingCompany{name branding{primaryColour __typename}media{logo{templatedUrl __typename}__typename}_links{canonical{href __typename}__typename}__typename}__typename}fragment AgentInfo on ResidentialListing{listers{name photo{templatedUrl __typename}preferredPhoneNumber _links{canonical{href __typename}__typename}__typename}listingCompany{id businessPhone __typename}__typename}fragment AgencyInfo on ResidentialListing{viewConfiguration{details{agencyInfo __typename}__typename}listingCompany{...on Agency{name __typename address{display{fullAddress __typename}__typename}_links{canonical{href __typename}__typename}}__typename}__typename}fragment HeaderLeaderboard on ResidentialListing{viewConfiguration{details{adverts{headerLeaderboard __typename}__typename}__typename}__typename}fragment ListingCompanyHeaderBranding on ResidentialListing{viewConfiguration{details{branding{header{size __typename}__typename}__typename}__typename}listingCompany{name branding{primaryColour __typename}_links{canonical{href __typename}__typename}media{logo{templatedUrl __typename}__typename}__typename}__typename}fragment RentResidentialListingMetaData on RentResidentialListing{...ResidentialListingMetaData inspections{startTime endTime __typename}__typename}fragment ResidentialListingMetaData on ResidentialListing{__typename id description media{mainImage{templatedUrl __typename}images{__typename}__typename}_links{canonical{href path __typename}__typename}propertyType{id display __typename}address{display{shortAddress fullAddress __typename}suburb state postcode __typename}price{display __typename}generalFeatures{bedrooms{value __typename}__typename}propertySizes{land{displayValue sizeUnit{displayValue __typename}__typename}__typename}}NEWLINE"""NEWLINE |
from bs4 import BeautifulSoupNEWLINEfrom black_list.items import BlEuconsolidatedlistItemNEWLINEfrom scrapy import SpiderNEWLINEimport osNEWLINENEWLINENEWLINEclass BleuconsolidatedlistSpider(Spider):NEWLINE name = 'BL_EUConsolidatedList'NEWLINE allowed_domains = ['ec.europa.eu']NEWLINE start_urls = ['http://ec.europa.eu/external_relations/cfsp/sanctions/list/version4/global/global.xml']NEWLINE header = 'Date|Id|Type|legal_basis|reg_date|pdf_link|programme|remark|Id2|Entity_id|legal_basis2|reg_date2|pdf_link2|programme2|LASTNAME|FIRSTNAME|MIDDLENAME|WHOLENAME|GENDER|TITLE|FUNCTION|LANGUAGE|Id3|Entity_id3|legal_basis3|reg_date3|pdf_link3|programme3|DATE3|PLACE|COUNTRY|Id4|Entity_id4|legal_basis4|reg_date4|pdf_link4|programme4|NUMBER|COUNTRY4|Id5|Entity_id5|legal_basis5|reg_date5|pdf_link5|programme5|COUNTRY5|Id6|Entity_id6|legal_basis6|reg_date6|pdf_link6|programme6|NUMBER6|STREET|ZIPCODE|CITY|COUNTRY6|OTHER \n'NEWLINENEWLINE def parse(self, response):NEWLINE with open(os.path.abspath('results/BL_EUConsolidatedList.txt'), 'a', encoding='utf-8') as f:NEWLINE f.write(self.header)NEWLINE soup = BeautifulSoup(response.text, 'xml')NEWLINE entities = soup.find_all(name='ENTITY')NEWLINE for entity in entities:NEWLINE item = BlEuconsolidatedlistItem()NEWLINE item['date'] = soup.WHOLE['Date']NEWLINE item['id'] = entity['Id']NEWLINE item['type'] = entity['Type']NEWLINE item['legal_basis'] = entity['legal_basis']NEWLINE item['reg_date'] = entity['reg_date']NEWLINE item['pdf_link'] = entity['pdf_link']NEWLINE item['programme'] = entity['programme']NEWLINE item['remark'] = entity['remark']NEWLINE name1 = entity.NAMENEWLINE item['id2'] = name1['Id']NEWLINE item['entity_id'] = name1['Entity_id']NEWLINE item['legal_basis2'] = name1['legal_basis']NEWLINE item['reg_date2'] = name1['reg_date']NEWLINE item['pdf_link2'] = name1['pdf_link']NEWLINE item['programme2'] = name1['programme']NEWLINE item['lastname'] = name1.LASTNAME.textNEWLINE item['firstname'] = name1.FIRSTNAME.textNEWLINE item['middlename'] = name1.MIDDLENAME.textNEWLINE item['wholename'] = name1.WHOLENAME.textNEWLINE item['gender'] = name1.GENDER.textNEWLINE item['title'] = name1.TITLE.textNEWLINE item['function'] = name1.FUNCTION.textNEWLINE item['language'] = name1.LANGUAGE.textNEWLINE birth = entity.BIRTHNEWLINE if birth is not None:NEWLINE item['id3'] = birth['Id']NEWLINE item['entity_id3'] = birth['Entity_id']NEWLINE item['legal_basis3'] = birth['legal_basis']NEWLINE item['reg_date3'] = birth['reg_date']NEWLINE item['pdf_link3'] = birth['pdf_link']NEWLINE item['programme3'] = birth['programme']NEWLINE item['date3'] = birth.DATE.textNEWLINE item['place'] = birth.PLACE.textNEWLINE item['country'] = birth.COUNTRY.textNEWLINE else:NEWLINE item['id3'] = ''NEWLINE item['entity_id3'] = ''NEWLINE item['legal_basis3'] = ''NEWLINE item['reg_date3'] = ''NEWLINE item['pdf_link3'] = ''NEWLINE item['programme3'] = ''NEWLINE item['date3'] = ''NEWLINE item['place'] = ''NEWLINE item['country'] = ''NEWLINE passport = entity.PASSPORTNEWLINE if passport is not None:NEWLINE item['id4'] = passport['Id']NEWLINE item['entity_id4'] = passport['Entity_id']NEWLINE item['legal_basis4'] = passport['legal_basis']NEWLINE item['reg_date4'] = passport['reg_date']NEWLINE item['pdf_link4'] = passport['pdf_link']NEWLINE item['programme4'] = passport['programme']NEWLINE item['number'] = passport.NUMBER.textNEWLINE item['country4'] = passport.COUNTRY.textNEWLINE else:NEWLINE item['id4'] = ''NEWLINE item['entity_id4'] = ''NEWLINE item['legal_basis4'] = ''NEWLINE item['reg_date4'] = ''NEWLINE item['pdf_link4'] = ''NEWLINE item['programme4'] = ''NEWLINE item['number'] = ''NEWLINE item['country4'] = ''NEWLINE citizen = entity.CITIZENNEWLINE if citizen is not None:NEWLINE item['id5'] = citizen['Id']NEWLINE item['entity_id5'] = citizen['Entity_id']NEWLINE item['legal_basis5'] = citizen['legal_basis']NEWLINE item['reg_date5'] = citizen['reg_date']NEWLINE item['pdf_link5'] = citizen['pdf_link']NEWLINE item['programme5'] = citizen['programme']NEWLINE item['country5'] = citizen.COUNTRY.textNEWLINE else:NEWLINE item['id5'] = ''NEWLINE item['entity_id5'] = ''NEWLINE item['legal_basis5'] = ''NEWLINE item['reg_date5'] = ''NEWLINE item['pdf_link5'] = ''NEWLINE item['programme5'] = ''NEWLINE item['country5'] = ''NEWLINE address = entity.ADDRESSNEWLINE if address is not None:NEWLINE item['id6'] = address['Id']NEWLINE item['entity_id6'] = address['Entity_id']NEWLINE item['legal_basis6'] = address['legal_basis']NEWLINE item['reg_date6'] = address['reg_date']NEWLINE item['pdf_link6'] = address['pdf_link']NEWLINE item['programme6'] = address['programme']NEWLINE item['number6'] = address.NUMBER.textNEWLINE item['street'] = address.STREET.textNEWLINE item['zipcode'] = address.ZIPCODE.textNEWLINE item['city'] = address.CITY.textNEWLINE item['country6'] = address.COUNTRY.textNEWLINE item['other'] = address.OTHER.textNEWLINE else:NEWLINE item['id6'] = ''NEWLINE item['entity_id6'] = ''NEWLINE item['legal_basis6'] = ''NEWLINE item['reg_date6'] = ''NEWLINE item['pdf_link6'] = ''NEWLINE item['programme6'] = ''NEWLINE item['number6'] = ''NEWLINE item['street'] = ''NEWLINE item['zipcode'] = ''NEWLINE item['city'] = ''NEWLINE item['country6'] = ''NEWLINE item['other'] = ''NEWLINE yield itemNEWLINE |
from flask import *NEWLINEimport hashlibNEWLINEfrom pymongo import *NEWLINEimport stringNEWLINEimport datetimeNEWLINEimport reNEWLINEfrom flask_cors import *NEWLINENEWLINEapp = Flask(__name__)NEWLINEcors = CORS(app)[email protected](404)NEWLINEdef page_not_found(e):NEWLINE return "error",[email protected]('/')NEWLINE@cross_origin()NEWLINEdef index():NEWLINE return render_template('form.html')[email protected]('/upload')NEWLINEdef upload():NEWLINE return render_template('upload.html')[email protected]('/cat')NEWLINEdef cat():NEWLINE return render_template('cat.html')NEWLINENEWLINEclient = MongoClient(port=27017)NEWLINEdb=client.cc_assignment.usersNEWLINEcat = client.cc_assignment.categoriesNEWLINEact = client.cc_assignment.actNEWLINENEWLINEdef getNextSequence(collection,name):NEWLINE collection.update_one( { '_id': name },{ '$inc': {'seq': 1}})NEWLINE return int(collection.find_one({'_id':name})["seq"])NEWLINENEWLINE#api [email protected]('/api/v1/users', methods=['POST'])NEWLINEdef process():NEWLINE j = request.get_json()NEWLINE name = j['name']NEWLINE password = j['password']NEWLINE if( len(password) != 40 or not all(c in string.hexdigits for c in password)):NEWLINE return jsonify({'code' : 600,"text":"not in sha1"}),400NEWLINENEWLINE if name and password and password != "da39a3ee5e6b4b0d3255bfef95601890afd80709":NEWLINE if(db.count_documents({"name":name})>0):NEWLINE print("username already exist")NEWLINE return jsonify({'code' : 405,"text":"username already exist"}),400NEWLINE result=db.insert_one({'userId': getNextSequence(client.cc_assignment.orgid_counter,"userId"), 'name': name, 'password' : password })NEWLINE return jsonify({'code' : 201}),201NEWLINE print("empty fields")NEWLINE return jsonify({'code' : 400,"text":"empty fields"}),400NEWLINENEWLINE#api [email protected]('/api/v1/users/<username>', methods=['DELETE'])NEWLINEdef userdelete(username):NEWLINE if(db.count_documents({"name":username})>0):NEWLINE db.delete_one({"name":username})NEWLINE return jsonify({'code':200}),200NEWLINE else:NEWLINE print("api 2 user does not exist")NEWLINE return jsonify({'code':404,"text":"user does not exist" }),400NEWLINENEWLINE#api [email protected]('/api/v1/categories', methods=['GET'])NEWLINEdef categorieAdd():NEWLINE j = cat.find()NEWLINE d = dict()NEWLINE for x in j:NEWLINE d[x['catName']]=x['size']NEWLINE return jsonify(d),200NEWLINENEWLINE#api [email protected]('/api/v1/categories', methods=['POST'])NEWLINEdef categorieList():NEWLINE j = re.search("[A-Za-z0-9 _]+",(request.get_data().decode('utf-8')))NEWLINE if(j is None):NEWLINE print("empty input")NEWLINE return jsonify({'code':400}),400NEWLINE j = j.group(0)NEWLINE if(cat.count_documents({"catName":j})>0):NEWLINE print("categoryName already exist")NEWLINE return jsonify({'code':404}),400NEWLINE result=cat.insert_one({'catId': getNextSequence(client.cc_assignment.orgid_counter,"catId"), 'catName':j , 'size' : 0 })NEWLINE return jsonify({'code':200}),201NEWLINENEWLINE#api [email protected]('/api/v1/categories/<categories>', methods=['DELETE'])NEWLINEdef catdelete(categories):NEWLINE if(cat.count_documents({"catName":categories})>0):NEWLINE cat.delete_one({"catName":categories})NEWLINE return jsonify({'code':200}),200NEWLINE else:NEWLINE print("categoryName does not exists")NEWLINE return jsonify({'code':404}),400NEWLINENEWLINE#api 6 and [email protected]('/api/v1/categories/<categoryName>/acts', methods=['GET'])NEWLINEdef catactsizeprint(categoryName):NEWLINE start = request.args.get("start")NEWLINE end = request.args.get("end")NEWLINE if(not cat.count_documents({"catName":categoryName})>0):NEWLINE print("categoryName does not exists")NEWLINE return jsonify({"code": 400}),400NEWLINE if(start is None and end is None):NEWLINE d = dict()NEWLINE j = cat.find_one({"catName" : categoryName})NEWLINE if(j['size'] < 100):NEWLINE l = list()NEWLINE if(act.count_documents({"catName":categoryName}) == 0):NEWLINE print("empty category")NEWLINE return jsonify({'code':404}),204NEWLINE v = act.find({"catName" : categoryName},{"_id":0,"catName":0})NEWLINE for x in v:NEWLINE l.append(x)NEWLINE return jsonify(l),200NEWLINE else:NEWLINE print("more than 100 asked api6")NEWLINE return jsonify({"code":413}),413NEWLINE if(start is None or end is None):NEWLINE print("start or end missing")NEWLINE return jsonify({"code":1400}),400NEWLINE else :NEWLINE start = int(start)NEWLINE end = int(end)NEWLINE if(start > end or (start<0 or end <0)):NEWLINE print("start and end values and not correct")NEWLINE return jsonify({"code":1600}),400NEWLINE else :NEWLINE diff = end-start + 1NEWLINE k = 1NEWLINE ll = list()NEWLINE val = act.count_documents({"catName":categoryName})NEWLINE if(val < diff):NEWLINE print("start and end values and not correct")NEWLINE return jsonify({"code":1500}),400NEWLINE if(diff >100):NEWLINE print("more values than given or more than 100 values")NEWLINE return jsonify({"code" : 1400,"text":"more than 100 values "}),413NEWLINE if(val == 0):NEWLINE return jsonify({'code':1404}),204NEWLINE v = act.find({"catName" : categoryName},{"_id":0}).sort([("timestamp",-1)])NEWLINE for x in v:NEWLINE if(k <= diff):NEWLINE ll.append(x)NEWLINE k = k + 1NEWLINE return jsonify(ll),200NEWLINENEWLINE#api [email protected]('/api/v1/categories/<categories>/acts/size', methods=['GET'])NEWLINEdef catactsize(categories):NEWLINE if(not cat.count_documents({"catName":categories})>0):NEWLINE return jsonify({"code": 400}),400NEWLINE else:NEWLINE j = cat.find({"catName" : categories})NEWLINE for x in j:NEWLINE l = x['size']NEWLINE if (l == 0 ):NEWLINE return jsonify({"code":411}),204NEWLINE return jsonify(x['size'])NEWLINENEWLINE#api [email protected]('/api/v1/acts/upvote', methods=['POST'])NEWLINEdef upvote():NEWLINE if(request.get_data().decode('utf-8') == "[]"):NEWLINE return jsonify({"code": 410}),400 NEWLINE j = re.search("[0-9]+",(request.get_data().decode('utf-8')))NEWLINE j = j.group(0)NEWLINE print(j)NEWLINE if(not act.count_documents({"actId":int(j)})>0):NEWLINE return jsonify({"code": 400}),400NEWLINE else:NEWLINE act.update_one( { 'actId': int(j)},{ '$inc': {'upvote': 1}})NEWLINE return jsonify({"code": 200})NEWLINENEWLINE#api [email protected]('/api/v1/acts/<actId>', methods=['DELETE'])NEWLINEdef actDelete(actId):NEWLINE if(not act.count_documents({"actId":int(actId)})>0):NEWLINE return jsonify({"code": 400}),400NEWLINE else:NEWLINE j = act.find({"actId":int(actId)},{"_id":0})NEWLINE for i in j:NEWLINE l=(i["catName"])NEWLINE print(l)NEWLINE cat.update_one({ 'catName':l },{ '$inc': {'size': -1}})NEWLINE act.delete_one({"actId":int(actId)})NEWLINE return jsonify({'code':200})NEWLINENEWLINENEWLINEdef validateDateTime(date_text):NEWLINE try:NEWLINE datetime.datetime.strptime(date_text, '%d-%m-%Y:%S-%M-%H')NEWLINE return TrueNEWLINE except ValueError:NEWLINE return FalseNEWLINEdef validateBase64(data_text):NEWLINE data_text = data_text.split(",")[1]NEWLINE if(re.search("[A-Za-z0-9+/=]", data_text) and len(data_text)%4==0):NEWLINE return TrueNEWLINE else:NEWLINE return FalseNEWLINE#api [email protected]('/api/v1/acts', methods=['POST'])NEWLINEdef actUpload():NEWLINE j = request.get_json()NEWLINE #to validate unique IDNEWLINE if(act.count_documents({"actId":j['actId']})>0):NEWLINE return jsonify({"code":405}),400NEWLINE #to validate timestampNEWLINE if not validateDateTime(j['timestamp']):NEWLINE return jsonify({"code":406}),400NEWLINE #to validate user existsNEWLINE if(not db.count_documents({"name":j['username']})>0):NEWLINE return jsonify({"code":407}),400NEWLINE #to validate Base64 codeNEWLINE if(not validateBase64(j['imgB64'])):NEWLINE return jsonify({"code":408}),400NEWLINE #to validate upvoteNEWLINE if("upvote" in j):NEWLINE return jsonify({"code":409}),400NEWLINE #to validate that cat existsNEWLINE if(not cat.count_documents({"catName":j['categoryName']})>0):NEWLINE return jsonify({"code":410}),400NEWLINENEWLINE result=act.insert_one({'actId':j['actId'] , 'username': j['username'], 'timestamp' : j['timestamp'], 'caption':j['caption'], 'catName':j['categoryName'], 'imgB64':j['imgB64'], 'upvote':0 })NEWLINE cat.update_one({ 'catName':j['categoryName'] },{ '$inc': {'size': 1}})NEWLINE client.cc_assignment.orgid_counter.update_one( {'_id':"actId"},{'$inc': {'seq': 1}})NEWLINE return jsonify({'code':200}),201NEWLINENEWLINE# helper api'sNEWLINE# get act [email protected]('/api/get/actId')NEWLINEdef actid():NEWLINE f = client.cc_assignment.orgid_counter.find_one({"_id":"actId"})NEWLINE return jsonify(f['seq'])NEWLINENEWLINE#down [email protected]('/api/v1/acts/downvote', methods=['POST'])NEWLINEdef downvote():NEWLINE j = request.get_json()NEWLINE if(not act.count_documents({"actId":j['actId']})>0):NEWLINE return jsonify({"code": 400}),400NEWLINE else:NEWLINE act.update_one( { 'actId': j['actId'] },{ '$inc': {'upvote': -1}})NEWLINE return jsonify({"code": 200})NEWLINENEWLINE#[email protected]('/api/v1/users/login', methods=['POST'])NEWLINEdef processes():NEWLINE j = request.get_json()NEWLINE name = j['name']NEWLINE password = j['password']NEWLINE if( len(password) != 40 or not all(c in string.hexdigits for c in password) ):NEWLINE return jsonify({'code' : 600 ,"text" :"Sha1 error"}),200NEWLINENEWLINE if name and password and password != "da39a3ee5e6b4b0d3255bfef95601890afd80709":NEWLINE if(db.count_documents({"name":name})<=0):NEWLINE return jsonify({'code' : 405 ,"text" :"login fail"}),400NEWLINENEWLINE v = db.find_one({'name': name},{"_id":0})NEWLINE return jsonify({'code' : 201,"text" :"Successfull login","userId":v["userId"]}),201NEWLINE return jsonify({'code' : 400,"text" :"data missing"}),400NEWLINENEWLINE#get list of [email protected]('/api/v1/userlist', methods=['GET'])NEWLINEdef listuser():NEWLINE j = db.find()NEWLINE d = dict()NEWLINE for x in j:NEWLINE d[x['name']]=x['userId']NEWLINE return jsonify(d)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run(host='0.0.0.0',port=80,debug = True)NEWLINE |
# https://wiki.freecadweb.org/Scripting_examplesNEWLINE# https://wiki.freecadweb.org/Scripted_objects_saving_attributesNEWLINENEWLINE# various_states.pyNEWLINEclass VariousStates:NEWLINE def __init__(self, obj):NEWLINE obj.addProperty("App::PropertyLength", "Length")NEWLINE obj.addProperty("App::PropertyArea", "Area")NEWLINE obj.Length = 15NEWLINE obj.Area = 300NEWLINE obj.Proxy = selfNEWLINENEWLINE Type = dict()NEWLINE Type["Version"] = "Custom"NEWLINE Type["Release"] = "production"NEWLINE self.Type = TypeNEWLINE self.Type = "Custom"NEWLINE self.ver = "0.18"NEWLINE self.color = (0, 0, 1)NEWLINE self.width = 2.5NEWLINENEWLINE def execute(self, obj):NEWLINE passNEWLINENEWLINEimport FreeCAD as AppNEWLINEimport various_statesNEWLINENEWLINEdoc = App.newDocument()NEWLINEdoc.FileName = "my_document.FCStd"NEWLINENEWLINEobj = doc.addObject("Part::FeaturePython", "Custom")NEWLINEvarious_states.VariousStates(obj)NEWLINENEWLINEif App.GuiUp:NEWLINE obj.ViewObject.Proxy = 1NEWLINENEWLINEdoc.recompute()NEWLINEdoc.save()NEWLINENEWLINEobj = App.ActiveDocument.CustomNEWLINENEWLINEprint(obj.Proxy)NEWLINENEWLINEprint(obj.Proxy.__dict__)NEWLINENEWLINE# various_states.pyNEWLINEclass CustomStates:NEWLINE def __init__(self, obj):NEWLINE obj.addProperty("App::PropertyLength", "Length")NEWLINE obj.addProperty("App::PropertyArea", "Area")NEWLINE obj.Length = 15NEWLINE obj.Area = 300NEWLINE obj.Proxy = selfNEWLINENEWLINE Type = dict()NEWLINE Type["Version"] = "Custom"NEWLINE Type["Release"] = "production"NEWLINE self.Type = TypeNEWLINE self.ver = "0.18"NEWLINE self.color = (0, 0, 1)NEWLINE self.width = 2.5NEWLINENEWLINE def execute(self, obj):NEWLINE passNEWLINENEWLINE def __getstate__(self):NEWLINE return self.color, self.widthNEWLINENEWLINE def __setstate__(self, state):NEWLINE self.color = state[0]NEWLINE self.width = state[1]NEWLINENEWLINEstate = (self.color, self.width)NEWLINEstate = ((0, 0, 1), 2.5)NEWLINENEWLINEobj2 = App.ActiveDocument.Custom2NEWLINENEWLINEprint(obj2.Proxy)NEWLINENEWLINEprint(obj2.Proxy.__dict__)NEWLINENEWLINEclass DraftObject:NEWLINE def __init__(self, obj, _type):NEWLINE self.Type = _typeNEWLINENEWLINE def __getstate__(self):NEWLINE return self.TypeNEWLINENEWLINE def __setstate__(self, state):NEWLINE if state:NEWLINE self.Type = stateNEWLINENEWLINEclass CustomObject:NEWLINE def __init__(self, obj, _type):NEWLINE self.Type = _typeNEWLINE self.version = "0.18"NEWLINENEWLINE def __getstate__(self):NEWLINE return self.Type, self.versionNEWLINENEWLINE def __setstate__(self, state):NEWLINE if state:NEWLINE self.Type = state[0]NEWLINE self.version = state[1]NEWLINENEWLINEclass CustomObject:NEWLINE def onDocumentRestored(self, obj):NEWLINE if hasattr(obj.Proxy, "version") and obj.Proxy.version:NEWLINE if obj.Proxy.version == "0.18":NEWLINE self.migrate_from_018(obj)NEWLINENEWLINE |
"""NEWLINEAbstract agent for the gym-idsgame environmentNEWLINE"""NEWLINEfrom abc import ABCNEWLINE#from gym_idsgame.envs.dao.game_config import GameConfigNEWLINENEWLINEclass Agent(ABC):NEWLINE """NEWLINE Abstract class representing an agentNEWLINE """NEWLINENEWLINE def __init__(self, game_config):NEWLINE """NEWLINE Class constructorNEWLINENEWLINE :param game_config: the game configurationNEWLINE """NEWLINE self.game_config = game_configNEWLINE # if self.game_config is None:NEWLINE # self.game_config = GameConfig()NEWLINE |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf8 -*-NEWLINENEWLINE"""Initialize userNEWLINE"""NEWLINENEWLINEimport sysNEWLINEsys.path.insert(0, '../')NEWLINENEWLINEfrom app.models import UserNEWLINEfrom app.models import dbNEWLINENEWLINE_admin0 = {'name': 'uadmin', 'hash': '$6$rounds=656000$BGPNku.GTxUFp5/m$z2VoGUbOzZfjEq2TnQjyK4Ho47MYCEHEK5N/TjpgzNuLWOJHwoeIA3AUbbDSMEvQBdqtEv1Vez1OXAYtYc4r80'}NEWLINENEWLINEuser0 = User(nickname=_admin0['name'], password_hash=_admin0['hash'], NEWLINE email='admin@localhost', id=1)NEWLINENEWLINE# default admin accountNEWLINEdb.session.add(user0)NEWLINEdb.session.commit()NEWLINE |
import numpy as npNEWLINENEWLINEfrom yt.funcs import mylogNEWLINEfrom yt.utilities.io_handler import BaseIOHandlerNEWLINENEWLINEfrom .data_structures import chk23NEWLINENEWLINEfloat_size = {"float": np.dtype(">f4").itemsize, "double": np.dtype(">f8").itemsize}NEWLINENEWLINEaxis_list = ["_x", "_y", "_z"]NEWLINENEWLINENEWLINEclass IOHandlerAthena(BaseIOHandler):NEWLINE _dataset_type = "athena"NEWLINE _offset_string = "data:offsets=0"NEWLINE _data_string = "data:datatype=0"NEWLINE _read_table_offset = NoneNEWLINENEWLINE def _field_dict(self, fhandle):NEWLINE keys = fhandle["field_types"].keys()NEWLINE val = fhandle["field_types"].keys()NEWLINE return dict(zip(keys, val))NEWLINENEWLINE def _read_field_names(self, grid):NEWLINE passNEWLINENEWLINE def _read_chunk_data(self, chunk, fields):NEWLINE data = {}NEWLINE if len(chunk.objs) == 0:NEWLINE return dataNEWLINE for grid in chunk.objs:NEWLINE if grid.filename is None:NEWLINE continueNEWLINE f = open(grid.filename, "rb")NEWLINE data[grid.id] = {}NEWLINE grid_dims = grid.ActiveDimensionsNEWLINE read_dims = grid.read_dims.astype("int64")NEWLINE grid_ncells = np.prod(read_dims)NEWLINE grid0_ncells = np.prod(grid.index.grids[0].read_dims)NEWLINE read_table_offset = get_read_table_offset(f)NEWLINE for field in fields:NEWLINE ftype, offsetr, dtype = grid.index._field_map[field]NEWLINE if grid_ncells != grid0_ncells:NEWLINE offset = offsetr + (NEWLINE (grid_ncells - grid0_ncells) * (offsetr // grid0_ncells)NEWLINE )NEWLINE if grid_ncells == grid0_ncells:NEWLINE offset = offsetrNEWLINE offset = int(offset) # Casting to be certain.NEWLINE file_offset = (NEWLINE grid.file_offset[2]NEWLINE * read_dims[0]NEWLINE * read_dims[1]NEWLINE * float_size[dtype]NEWLINE )NEWLINE xread = slice(grid.file_offset[0], grid.file_offset[0] + grid_dims[0])NEWLINE yread = slice(grid.file_offset[1], grid.file_offset[1] + grid_dims[1])NEWLINE f.seek(read_table_offset + offset + file_offset)NEWLINE if dtype == "float":NEWLINE dt = ">f4"NEWLINE elif dtype == "double":NEWLINE dt = ">f8"NEWLINE if ftype == "scalar":NEWLINE f.seek(read_table_offset + offset + file_offset)NEWLINE v = np.fromfile(f, dtype=dt, count=grid_ncells).reshape(NEWLINE read_dims, order="F"NEWLINE )NEWLINE if ftype == "vector":NEWLINE vec_offset = axis_list.index(field[-1][-2:])NEWLINE f.seek(read_table_offset + offset + 3 * file_offset)NEWLINE v = np.fromfile(f, dtype=dt, count=3 * grid_ncells)NEWLINE v = v[vec_offset::3].reshape(read_dims, order="F")NEWLINE if grid.ds.field_ordering == 1:NEWLINE data[grid.id][field] = v[xread, yread, :].T.astype("float64")NEWLINE else:NEWLINE data[grid.id][field] = v[xread, yread, :].astype("float64")NEWLINE f.close()NEWLINE return dataNEWLINENEWLINE def _read_data_slice(self, grid, field, axis, coord):NEWLINE sl = [slice(None), slice(None), slice(None)]NEWLINE sl[axis] = slice(coord, coord + 1)NEWLINE if grid.ds.field_ordering == 1:NEWLINE sl.reverse()NEWLINE return self._read_data_set(grid, field)[tuple(sl)]NEWLINENEWLINE def _read_fluid_selection(self, chunks, selector, fields, size):NEWLINE chunks = list(chunks)NEWLINE if any((ftype != "athena" for ftype, fname in fields)):NEWLINE raise NotImplementedErrorNEWLINE rv = {}NEWLINE for field in fields:NEWLINE rv[field] = np.empty(size, dtype="float64")NEWLINE ng = sum(len(c.objs) for c in chunks)NEWLINE mylog.debug(NEWLINE "Reading %s cells of %s fields in %s grids",NEWLINE size,NEWLINE [f2 for f1, f2 in fields],NEWLINE ng,NEWLINE )NEWLINE ind = 0NEWLINE for chunk in chunks:NEWLINE data = self._read_chunk_data(chunk, fields)NEWLINE for g in chunk.objs:NEWLINE for field in fields:NEWLINE ftype, fname = fieldNEWLINE ds = data[g.id].pop(field)NEWLINE nd = g.select(selector, ds, rv[field], ind) # cachesNEWLINE ind += ndNEWLINE data.pop(g.id)NEWLINE return rvNEWLINENEWLINENEWLINEdef get_read_table_offset(f):NEWLINE line = f.readline()NEWLINE while True:NEWLINE splitup = line.strip().split()NEWLINE chkc = chk23("CELL_DATA")NEWLINE chkp = chk23("POINT_DATA")NEWLINE if chkc in splitup or chkp in splitup:NEWLINE f.readline()NEWLINE read_table_offset = f.tell()NEWLINE breakNEWLINE line = f.readline()NEWLINE return read_table_offsetNEWLINE |
'''cron function.NEWLINENEWLINEScans the user table and, for each user, invokes one lambda checker.NEWLINEIf the check generates an alert, invokes mailer lambda to notify the user.'''NEWLINEfrom sslnotifyme import (lambda_db, lambda_checker, lambda_main_wrapper, LOGGER)NEWLINENEWLINENEWLINEclass Cron(object):NEWLINE '''Cron object class.'''NEWLINENEWLINE @staticmethodNEWLINE def scan_and_notify_alerts_queue():NEWLINE '''Trigger a lambda checker for each validated user.'''NEWLINE counter = 0NEWLINE for record in lambda_db('get_validated_users').get('response'):NEWLINE lambda_checker('check_and_send_alert', record)NEWLINE counter += 1NEWLINE msg = '%d record(s) processed successfully' % counterNEWLINE LOGGER.info(msg)NEWLINE return {'response': msg}NEWLINENEWLINENEWLINE# pylint: disable=unused-argumentNEWLINEdef lambda_main(event, context):NEWLINE '''Lambda entry point.'''NEWLINE return lambda_main_wrapper(event, Cron,NEWLINE default=['scan_and_notify_alerts_queue'])NEWLINE |
#Ben KotrcNEWLINE#1/26/2016NEWLINE#https://github.com/kotrc/GTFS-route-shapesNEWLINE#This script takes an expanded GTFS file and generates a new file,NEWLINE#route_shapes.json, that contains one geojson MultiLineString for eachNEWLINE#entry in the GTFS routes.txt table. This represents the map shape forNEWLINE#each route, including possible variations/line branches/etc, simplifiedNEWLINE#using the Douglas-Peucker algorithm for a minimal resulting file size.NEWLINENEWLINE#Approach:NEWLINE#For each route,NEWLINE#1 - Get the set of shapes corresponding to the route;NEWLINE#2 - Select the longest shape (with the most coordinate pairs);NEWLINE#3 - Draw a buffer around this longest shape;NEWLINE#4 - For each remaining shape,NEWLINE#a - Remove points from the shape within the buffered area,NEWLINE#b - Add the remaining shape to the longest shape as an additionalNEWLINE# LineString in the MultiLineStringNEWLINENEWLINE#Run this from within a directory containing the GTFS csv files.NEWLINENEWLINE#pandas lets us use data frames to load and store the GTFS tablesNEWLINEimport pandas as pdNEWLINE#geojson lets us construct and dump geojson objectsNEWLINEimport geojson as gjNEWLINE#shapely lets us manipulate geometric objectsNEWLINEimport shapely.geometry as shNEWLINENEWLINENEWLINE#Read relevant GTFS tables to pandas dataframesNEWLINEstops = pd.read_csv('stops.txt')NEWLINEshapes = pd.read_csv('shapes.txt')NEWLINEroutes = pd.read_csv('routes.txt')NEWLINEstop_times = pd.read_csv('stop_times.txt')NEWLINEtrips = pd.read_csv('trips.txt')NEWLINENEWLINE#Join routes table to trips table on route_idNEWLINEroutes_trips = pd.merge(routes, trips, on='route_id', how='inner')NEWLINE#Join this table to shapes on shape_idNEWLINEroutes_trips_shapes = pd.merge(routes_trips, shapes, on='shape_id',NEWLINE how='inner')NEWLINENEWLINE#Now we want to get rid of all the columns we don't needNEWLINE#These are the ones we want:NEWLINEcolsretain = ['route_id',NEWLINE 'agency_id',NEWLINE 'route_short_name',NEWLINE 'route_long_name',NEWLINE 'shape_id',NEWLINE 'shape_pt_lat',NEWLINE 'shape_pt_lon',NEWLINE 'shape_pt_sequence',NEWLINE 'shape_dist_traveled']NEWLINE#These are the ones we have:NEWLINEcolshave = routes_trips_shapes.columns.valuesNEWLINE#These are the ones we no longer wantNEWLINEto_drop = list(set(colshave) - set(colsretain))NEWLINE#Drop them from the dataFrameNEWLINEroutes_trips_shapes = routes_trips_shapes.drop(to_drop, axis=1)NEWLINENEWLINE#Since we've thrown out all the columns dealing with trips, there will be a lotNEWLINE#of duplicate rows. Let's get rid of those.NEWLINEroutes_trips_shapes = routes_trips_shapes.drop_duplicates()NEWLINENEWLINE#Create a list to hold each route's shape to write them to file at the end:NEWLINEroute_shape_list = list()NEWLINENEWLINE#Go through each routeNEWLINEfor route_id in routes_trips_shapes['route_id'].unique():NEWLINE #Get the set of shapes corresponding to this route_idNEWLINE shape_ids = set(routes_trips_shapes[routes_trips_shapes['route_id']NEWLINE == route_id]['shape_id'])NEWLINE #First, find the longest shape for this routeNEWLINE #Call the first shape in this route the longest to start withNEWLINE longest = shape_ids.pop()NEWLINE shape_ids.add(longest)NEWLINE #Keep track of how many points the longest shape hasNEWLINE longest_pt_num = shapes[shapes['shape_id']NEWLINE == longest]['shape_pt_sequence'].count()NEWLINE #Go through each shape in this routeNEWLINE for shape_id in shape_ids:NEWLINE #If this shape has more points in this shape with the longest so farNEWLINE if shapes[shapes['shape_id']NEWLINE == shape_id]['shape_pt_sequence'].count() > longest_pt_num:NEWLINE #Designate this shape as the longestNEWLINE longest = shape_idNEWLINE #And keep track of the number of pointsNEWLINE longest_pt_num = shapes[shapes['shape_id']NEWLINE == shape_id]['shape_pt_sequence'].count()NEWLINE #End loop through each shape in this routeNEWLINE #Now that we have the longest shape for the route, create a shapelyNEWLINE #LineString for this route IDNEWLINE multiline = sh.LineString(zip(shapes[shapes['shape_id']NEWLINE == longest]['shape_pt_lat'].tolist(),NEWLINE shapes[shapes['shape_id'] == longest]['shape_pt_lon'].tolist()))NEWLINE #Now let's add the parts of the other shapes that don't overlap with thisNEWLINE #longest shape to create a MultiLineString collectionNEWLINE #First create an area within which we'll reject additional pointsNEWLINE #(this buffer--0.0001 deg--is about 30m, or about the width of Mass Ave)NEWLINE area = multiline.buffer(0.0001)NEWLINE #Get the set of shapes (other than the longest one) to loop overNEWLINE shorter_shape_ids = shape_idsNEWLINE shorter_shape_ids.remove(longest)NEWLINE #Now to go through them, and add only the points from each shape thatNEWLINE #aren't in the area.NEWLINE for shape_id in shorter_shape_ids:NEWLINE #Get the current shape as a shapely shapeNEWLINE this_shape = sh.LineString(zip(shapes[shapes['shape_id']NEWLINE == shape_id]['shape_pt_lat'].tolist(),NEWLINE shapes[shapes['shape_id'] == shape_id]['shape_pt_lon'].tolist()))NEWLINE #Is this shape entirely within the existing area?NEWLINE if not this_shape.within(area):NEWLINE #If there are points outside the area, add to the MultiLineStringNEWLINE new_part = this_shape.difference(area)NEWLINE #Now add this new bit to the MultiLineStringNEWLINE multiline = multiline.union(new_part)NEWLINE #Now update the testing area to include this new lineNEWLINE area = multiline.buffer(0.0001)NEWLINE #Now we have a shapely MultiLineString object with the lines makingNEWLINE #up shape of this route. Next, simplify that object:NEWLINE tolerance = 0.00005NEWLINE simplified_multiline = multiline.simplify(tolerance, preserve_topology=False)NEWLINE #Turn the MultiLine into a geoJSON feature object, and add it to the listNEWLINE #of features that'll be written to file as a featurecollection at the endNEWLINE route_shape_list.append(gj.Feature(geometry=simplified_multiline,NEWLINE properties={"route_id": route_id}))NEWLINE #End of loop through all routesNEWLINENEWLINE#Finally, write our collection of Features (one for each route) to file inNEWLINE#geoJSON format, as a FeatureCollection:NEWLINEwith open('route_shapes1.geojson', 'w') as outfile:NEWLINE gj.dump(gj.FeatureCollection(route_shape_list), outfile) |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ==============================================================================NEWLINE"""Functions for computing statistics of samples."""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom tensorflow.python.framework import dtypesNEWLINEfrom tensorflow.python.framework import opsNEWLINEfrom tensorflow.python.framework import tensor_utilNEWLINEfrom tensorflow.python.ops import array_opsNEWLINEfrom tensorflow.python.ops import check_opsNEWLINEfrom tensorflow.python.ops import control_flow_opsNEWLINEfrom tensorflow.python.ops import math_opsNEWLINEfrom tensorflow.python.ops import nn_opsNEWLINEfrom tensorflow.python.ops import spectral_opsNEWLINEfrom tensorflow.python.ops.distributions import utilNEWLINENEWLINE__all__ = [NEWLINE "auto_correlation",NEWLINE "percentile",NEWLINE]NEWLINENEWLINENEWLINE# TODO(langmore) Write separate versions of this for real/complex dtype, takingNEWLINE# advantage of optimized real-fft ops.NEWLINEdef auto_correlation(NEWLINE x,NEWLINE axis=-1,NEWLINE max_lags=None,NEWLINE center=True,NEWLINE normalize=True,NEWLINE name="auto_correlation"):NEWLINE """Auto correlation along one axis.NEWLINENEWLINE Given a `1-D` wide sense stationary (WSS) sequence `X`, the auto correlationNEWLINE `RXX` may be defined as (with `E` expectation and `Conj` complex conjugate)NEWLINENEWLINE ```NEWLINE RXX[m] := E{ W[m] Conj(W[0]) } = E{ W[0] Conj(W[-m]) },NEWLINE W[n] := (X[n] - MU) / S,NEWLINE MU := E{ X[0] },NEWLINE S**2 := E{ (X[0] - MU) Conj(X[0] - MU) }.NEWLINE ```NEWLINENEWLINE This function takes the viewpoint that `x` is (along one axis) a finiteNEWLINE sub-sequence of a realization of (WSS) `X`, and then uses `x` to produce anNEWLINE estimate of `RXX[m]` as follows:NEWLINENEWLINE After extending `x` from length `L` to `inf` by zero padding, the autoNEWLINE correlation estimate `rxx[m]` is computed for `m = 0, 1, ..., max_lags` asNEWLINENEWLINE ```NEWLINE rxx[m] := (L - m)**-1 sum_n w[n + m] Conj(w[n]),NEWLINE w[n] := (x[n] - mu) / s,NEWLINE mu := L**-1 sum_n x[n],NEWLINE s**2 := L**-1 sum_n (x[n] - mu) Conj(x[n] - mu)NEWLINE ```NEWLINENEWLINE The error in this estimate is proportional to `1 / sqrt(len(x) - m)`, so usersNEWLINE often set `max_lags` small enough so that the entire output is meaningful.NEWLINENEWLINE Note that since `mu` is an imperfect estimate of `E{ X[0] }`, and we divide byNEWLINE `len(x) - m` rather than `len(x) - m - 1`, our estimate of auto correlationNEWLINE contains a slight bias, which goes to zero as `len(x) - m --> infinity`.NEWLINENEWLINE Args:NEWLINE x: `float32` or `complex64` `Tensor`.NEWLINE axis: Python `int`. The axis number along which to compute correlation.NEWLINE Other dimensions index different batch members.NEWLINE max_lags: Positive `int` tensor. The maximum value of `m` to considerNEWLINE (in equation above). If `max_lags >= x.shape[axis]`, we effectivelyNEWLINE re-set `max_lags` to `x.shape[axis] - 1`.NEWLINE center: Python `bool`. If `False`, do not subtract the mean estimate `mu`NEWLINE from `x[n]` when forming `w[n]`.NEWLINE normalize: Python `bool`. If `False`, do not divide by the varianceNEWLINE estimate `s**2` when forming `w[n]`.NEWLINE name: `String` name to prepend to created ops.NEWLINENEWLINE Returns:NEWLINE `rxx`: `Tensor` of same `dtype` as `x`. `rxx.shape[i] = x.shape[i]` forNEWLINE `i != axis`, and `rxx.shape[axis] = max_lags + 1`.NEWLINENEWLINE Raises:NEWLINE TypeError: If `x` is not a supported type.NEWLINE """NEWLINE # Implementation details:NEWLINE # Extend length N / 2 1-D array x to length N by zero padding onto the end.NEWLINE # Then, setNEWLINE # F[x]_k := sum_n x_n exp{-i 2 pi k n / N }.NEWLINE # It is not hard to see thatNEWLINE # F[x]_k Conj(F[x]_k) = F[R]_k, whereNEWLINE # R_m := sum_n x_n Conj(x_{(n - m) mod N}).NEWLINE # One can also check that R_m / (N / 2 - m) is an unbiased estimate of RXX[m].NEWLINENEWLINE # Since F[x] is the DFT of x, this leads us to a zero-padding and FFT/IFFTNEWLINE # based version of estimating RXX.NEWLINE # Note that this is a special case of the Wiener-Khinchin Theorem.NEWLINE with ops.name_scope(name, values=[x]):NEWLINE x = ops.convert_to_tensor(x, name="x")NEWLINENEWLINE # Rotate dimensions of x in order to put axis at the rightmost dim.NEWLINE # FFT op requires this.NEWLINE rank = util.prefer_static_rank(x)NEWLINE if axis < 0:NEWLINE axis = rank + axisNEWLINE shift = rank - 1 - axisNEWLINE # Suppose x.shape[axis] = T, so there are T "time" steps.NEWLINE # ==> x_rotated.shape = B + [T],NEWLINE # where B is x_rotated's batch shape.NEWLINE x_rotated = util.rotate_transpose(x, shift)NEWLINENEWLINE if center:NEWLINE x_rotated -= math_ops.reduce_mean(x_rotated, axis=-1, keepdims=True)NEWLINENEWLINE # x_len = N / 2 from above explanation. The length of x along axis.NEWLINE # Get a value for x_len that works in all cases.NEWLINE x_len = util.prefer_static_shape(x_rotated)[-1]NEWLINENEWLINE # TODO(langmore) Investigate whether this zero padding helps or hurts. AtNEWLINE # the moment is is necessary so that all FFT implementations work.NEWLINE # Zero pad to the next power of 2 greater than 2 * x_len, which equalsNEWLINE # 2**(ceil(Log_2(2 * x_len))). Note: Log_2(X) = Log_e(X) / Log_e(2).NEWLINE x_len_float64 = math_ops.cast(x_len, np.float64)NEWLINE target_length = math_ops.pow(NEWLINE np.float64(2.),NEWLINE math_ops.ceil(math_ops.log(x_len_float64 * 2) / np.log(2.)))NEWLINE pad_length = math_ops.cast(target_length - x_len_float64, np.int32)NEWLINENEWLINE # We should have:NEWLINE # x_rotated_pad.shape = x_rotated.shape[:-1] + [T + pad_length]NEWLINE # = B + [T + pad_length]NEWLINE x_rotated_pad = util.pad(x_rotated, axis=-1, back=True, count=pad_length)NEWLINENEWLINE dtype = x.dtypeNEWLINE if not dtype.is_complex:NEWLINE if not dtype.is_floating:NEWLINE raise TypeError("Argument x must have either float or complex dtype"NEWLINE " found: {}".format(dtype))NEWLINE x_rotated_pad = math_ops.complex(x_rotated_pad,NEWLINE dtype.real_dtype.as_numpy_dtype(0.))NEWLINENEWLINE # Autocorrelation is IFFT of power-spectral density (up to some scaling).NEWLINE fft_x_rotated_pad = spectral_ops.fft(x_rotated_pad)NEWLINE spectral_density = fft_x_rotated_pad * math_ops.conj(fft_x_rotated_pad)NEWLINE # shifted_product is R[m] from above detailed explanation.NEWLINE # It is the inner product sum_n X[n] * Conj(X[n - m]).NEWLINE shifted_product = spectral_ops.ifft(spectral_density)NEWLINENEWLINE # Cast back to real-valued if x was real to begin with.NEWLINE shifted_product = math_ops.cast(shifted_product, dtype)NEWLINENEWLINE # Figure out if we can deduce the final static shape, and set max_lags.NEWLINE # Use x_rotated as a reference, because it has the time dimension in the farNEWLINE # right, and was created before we performed all sorts of crazy shapeNEWLINE # manipulations.NEWLINE know_static_shape = TrueNEWLINE if not x_rotated.shape.is_fully_defined():NEWLINE know_static_shape = FalseNEWLINE if max_lags is None:NEWLINE max_lags = x_len - 1NEWLINE else:NEWLINE max_lags = ops.convert_to_tensor(max_lags, name="max_lags")NEWLINE max_lags_ = tensor_util.constant_value(max_lags)NEWLINE if max_lags_ is None or not know_static_shape:NEWLINE know_static_shape = FalseNEWLINE max_lags = math_ops.minimum(x_len - 1, max_lags)NEWLINE else:NEWLINE max_lags = min(x_len - 1, max_lags_)NEWLINENEWLINE # Chop off the padding.NEWLINE # We allow users to provide a huge max_lags, but cut it off here.NEWLINE # shifted_product_chopped.shape = x_rotated.shape[:-1] + [max_lags]NEWLINE shifted_product_chopped = shifted_product[..., :max_lags + 1]NEWLINENEWLINE # If possible, set shape.NEWLINE if know_static_shape:NEWLINE chopped_shape = x_rotated.shape.as_list()NEWLINE chopped_shape[-1] = min(x_len, max_lags + 1)NEWLINE shifted_product_chopped.set_shape(chopped_shape)NEWLINENEWLINE # Recall R[m] is a sum of N / 2 - m nonzero terms x[n] Conj(x[n - m]). TheNEWLINE # other terms were zeros arising only due to zero padding.NEWLINE # `denominator = (N / 2 - m)` (defined below) is the proper term toNEWLINE # divide by by to make this an unbiased estimate of the expectationNEWLINE # E[X[n] Conj(X[n - m])].NEWLINE x_len = math_ops.cast(x_len, dtype.real_dtype)NEWLINE max_lags = math_ops.cast(max_lags, dtype.real_dtype)NEWLINE denominator = x_len - math_ops.range(0., max_lags + 1.)NEWLINE denominator = math_ops.cast(denominator, dtype)NEWLINE shifted_product_rotated = shifted_product_chopped / denominatorNEWLINENEWLINE if normalize:NEWLINE shifted_product_rotated /= shifted_product_rotated[..., :1]NEWLINENEWLINE # Transpose dimensions back to those of x.NEWLINE return util.rotate_transpose(shifted_product_rotated, -shift)NEWLINENEWLINENEWLINE# TODO(langmore) To make equivalent to numpy.percentile:NEWLINE# Make work with a sequence of floats or single float for 'q'.NEWLINE# Make work with "linear", "midpoint" interpolation. (linear should be default)NEWLINEdef percentile(x,NEWLINE q,NEWLINE axis=None,NEWLINE interpolation=None,NEWLINE keep_dims=False,NEWLINE validate_args=False,NEWLINE name=None):NEWLINE """Compute the `q`-th percentile of `x`.NEWLINENEWLINE Given a vector `x`, the `q`-th percentile of `x` is the value `q / 100` of theNEWLINE way from the minimum to the maximum in a sorted copy of `x`.NEWLINENEWLINE The values and distances of the two nearest neighbors as well as theNEWLINE `interpolation` parameter will determine the percentile if the normalizedNEWLINE ranking does not match the location of `q` exactly.NEWLINENEWLINE This function is the same as the median if `q = 50`, the same as the minimumNEWLINE if `q = 0` and the same as the maximum if `q = 100`.NEWLINENEWLINENEWLINE ```pythonNEWLINE # Get 30th percentile with default ('nearest') interpolation.NEWLINE x = [1., 2., 3., 4.]NEWLINE percentile(x, q=30.)NEWLINE ==> 2.0NEWLINENEWLINE # Get 30th percentile with 'lower' interpolationNEWLINE x = [1., 2., 3., 4.]NEWLINE percentile(x, q=30., interpolation='lower')NEWLINE ==> 1.0NEWLINENEWLINE # Get 100th percentile (maximum). By default, this is computed over every dimNEWLINE x = [[1., 2.]NEWLINE [3., 4.]]NEWLINE percentile(x, q=100.)NEWLINE ==> 4.0NEWLINENEWLINE # Treat the leading dim as indexing samples, and find the 100th quantile (max)NEWLINE # over all such samples.NEWLINE x = [[1., 2.]NEWLINE [3., 4.]]NEWLINE percentile(x, q=100., axis=[0])NEWLINE ==> [3., 4.]NEWLINE ```NEWLINENEWLINE Compare to `numpy.percentile`.NEWLINENEWLINE Args:NEWLINE x: Floating point `N-D` `Tensor` with `N > 0`. If `axis` is not `None`,NEWLINE `x` must have statically known number of dimensions.NEWLINE q: Scalar `Tensor` in `[0, 100]`. The percentile.NEWLINE axis: Optional `0-D` or `1-D` integer `Tensor` with constant values.NEWLINE The axis that hold independent samples over which to return the desiredNEWLINE percentile. If `None` (the default), treat every dimension as a sampleNEWLINE dimension, returning a scalar.NEWLINE interpolation : {"lower", "higher", "nearest"}. Default: "nearest"NEWLINE This optional parameter specifies the interpolation method toNEWLINE use when the desired quantile lies between two data points `i < j`:NEWLINE * lower: `i`.NEWLINE * higher: `j`.NEWLINE * nearest: `i` or `j`, whichever is nearest.NEWLINE keep_dims: Python `bool`. If `True`, the last dimension is kept with size 1NEWLINE If `False`, the last dimension is removed from the output shape.NEWLINE validate_args: Whether to add runtime checks of argument validity.NEWLINE If False, and arguments are incorrect, correct behavior is not guaranteed.NEWLINE name: A Python string name to give this `Op`. Default is "percentile"NEWLINENEWLINE Returns:NEWLINE A `(N - len(axis))` dimensional `Tensor` of same dtype as `x`, or, ifNEWLINE `axis` is `None`, a scalar.NEWLINENEWLINE Raises:NEWLINE ValueError: If argument 'interpolation' is not an allowed type.NEWLINE """NEWLINE name = name or "percentile"NEWLINE allowed_interpolations = {"lower", "higher", "nearest"}NEWLINENEWLINE if interpolation is None:NEWLINE interpolation = "nearest"NEWLINE else:NEWLINE if interpolation not in allowed_interpolations:NEWLINE raise ValueError("Argument 'interpolation' must be in %s. Found %s" %NEWLINE (allowed_interpolations, interpolation))NEWLINENEWLINE with ops.name_scope(name, [x, q]):NEWLINE x = ops.convert_to_tensor(x, name="x")NEWLINE q = math_ops.to_float(q, name="q")NEWLINE _get_static_ndims(q, expect_ndims=0)NEWLINENEWLINE if validate_args:NEWLINE q = control_flow_ops.with_dependencies([NEWLINE check_ops.assert_rank(q, 0), check_ops.assert_greater_equal(q, 0.),NEWLINE check_ops.assert_less_equal(q, 100.)NEWLINE ], q)NEWLINENEWLINE if axis is None:NEWLINE y = array_ops.reshape(x, [-1])NEWLINE else:NEWLINE axis = ops.convert_to_tensor(axis, name="axis")NEWLINE check_ops.assert_integer(axis)NEWLINE axis_ndims = _get_static_ndims(NEWLINE axis, expect_static=True, expect_ndims_no_more_than=1)NEWLINE axis_const = tensor_util.constant_value(axis)NEWLINE if axis_const is None:NEWLINE raise ValueError(NEWLINE "Expected argument 'axis' to be statically available. Found: %s" %NEWLINE axis)NEWLINE axis = axis_constNEWLINE if axis_ndims == 0:NEWLINE axis = [axis]NEWLINE axis = [int(a) for a in axis]NEWLINE x_ndims = _get_static_ndims(NEWLINE x, expect_static=True, expect_ndims_at_least=1)NEWLINE axis = _make_static_axis_non_negative(axis, x_ndims)NEWLINE y = _move_dims_to_flat_end(x, axis, x_ndims)NEWLINENEWLINE frac_at_q_or_above = 1. - q / 100.NEWLINE d = math_ops.to_float(array_ops.shape(y)[-1])NEWLINENEWLINE if interpolation == "lower":NEWLINE index = math_ops.ceil((d - 1) * frac_at_q_or_above)NEWLINE elif interpolation == "higher":NEWLINE index = math_ops.floor((d - 1) * frac_at_q_or_above)NEWLINE elif interpolation == "nearest":NEWLINE index = math_ops.round((d - 1) * frac_at_q_or_above)NEWLINENEWLINE # Sort everything, not just the top 'k' entries, which allows multiple callsNEWLINE # to sort only once (under the hood) and use CSE.NEWLINE sorted_y = _sort_tensor(y)NEWLINENEWLINE # result.shape = BNEWLINE result = sorted_y[..., math_ops.to_int32(index)]NEWLINE result.set_shape(y.get_shape()[:-1])NEWLINENEWLINE if keep_dims:NEWLINE if axis is None:NEWLINE # ones_vec = [1, 1,..., 1], total length = len(S) + len(B).NEWLINE ones_vec = array_ops.ones(NEWLINE shape=[_get_best_effort_ndims(x)], dtype=dtypes.int32)NEWLINE result *= array_ops.ones(ones_vec, dtype=x.dtype)NEWLINE else:NEWLINE result = _insert_back_keep_dims(result, axis)NEWLINENEWLINE return resultNEWLINENEWLINENEWLINEdef _get_static_ndims(x,NEWLINE expect_static=False,NEWLINE expect_ndims=None,NEWLINE expect_ndims_no_more_than=None,NEWLINE expect_ndims_at_least=None):NEWLINE """Get static number of dimensions and assert that some expectations are met.NEWLINENEWLINE This function returns the number of dimensions "ndims" of x, as a Python int.NEWLINENEWLINE The optional expect arguments are used to check the ndims of x, but this isNEWLINE only done if the static ndims of x is not None.NEWLINENEWLINE Args:NEWLINE x: A Tensor.NEWLINE expect_static: Expect `x` to have statically defined `ndims`.NEWLINE expect_ndims: Optional Python integer. If provided, assert that x hasNEWLINE number of dimensions equal to this.NEWLINE expect_ndims_no_more_than: Optional Python integer. If provided, assertNEWLINE that x has no more than this many dimensions.NEWLINE expect_ndims_at_least: Optional Python integer. If provided, assert thatNEWLINE x has at least this many dimensions.NEWLINENEWLINE Returns:NEWLINE ndims: A Python integer.NEWLINENEWLINE Raises:NEWLINE ValueError: If any of the expectations above are violated.NEWLINE """NEWLINE ndims = x.get_shape().ndimsNEWLINE if ndims is None:NEWLINE shape_const = tensor_util.constant_value(array_ops.shape(x))NEWLINE if shape_const is not None:NEWLINE ndims = shape_const.ndimNEWLINENEWLINE if ndims is None:NEWLINE if expect_static:NEWLINE raise ValueError(NEWLINE "Expected argument 'x' to have statically defined 'ndims'. Found: " %NEWLINE x)NEWLINE returnNEWLINENEWLINE if expect_ndims is not None:NEWLINE ndims_message = ("Expected argument 'x' to have ndims %s. Found tensor %s"NEWLINE % (expect_ndims, x))NEWLINE if ndims != expect_ndims:NEWLINE raise ValueError(ndims_message)NEWLINENEWLINE if expect_ndims_at_least is not None:NEWLINE ndims_at_least_message = (NEWLINE "Expected argument 'x' to have ndims >= %d. Found tensor %s" % (NEWLINE expect_ndims_at_least, x))NEWLINE if ndims < expect_ndims_at_least:NEWLINE raise ValueError(ndims_at_least_message)NEWLINENEWLINE if expect_ndims_no_more_than is not None:NEWLINE ndims_no_more_than_message = (NEWLINE "Expected argument 'x' to have ndims <= %d. Found tensor %s" % (NEWLINE expect_ndims_no_more_than, x))NEWLINE if ndims > expect_ndims_no_more_than:NEWLINE raise ValueError(ndims_no_more_than_message)NEWLINENEWLINE return ndimsNEWLINENEWLINENEWLINEdef _get_best_effort_ndims(x,NEWLINE expect_ndims=None,NEWLINE expect_ndims_at_least=None,NEWLINE expect_ndims_no_more_than=None):NEWLINE """Get static ndims if possible. Fallback on `tf.rank(x)`."""NEWLINE ndims_static = _get_static_ndims(NEWLINE x,NEWLINE expect_ndims=expect_ndims,NEWLINE expect_ndims_at_least=expect_ndims_at_least,NEWLINE expect_ndims_no_more_than=expect_ndims_no_more_than)NEWLINE if ndims_static is not None:NEWLINE return ndims_staticNEWLINE return array_ops.rank(x)NEWLINENEWLINENEWLINEdef _insert_back_keep_dims(x, axis):NEWLINE """Insert the dims in `axis` back as singletons after being removed.NEWLINENEWLINE Args:NEWLINE x: `Tensor`.NEWLINE axis: Python list of integers.NEWLINENEWLINE Returns:NEWLINE `Tensor` with same values as `x`, but additional singleton dimensions.NEWLINE """NEWLINE for i in sorted(axis):NEWLINE x = array_ops.expand_dims(x, axis=i)NEWLINE return xNEWLINENEWLINENEWLINEdef _make_static_axis_non_negative(axis, ndims):NEWLINE """Convert possibly negatively indexed axis to non-negative.NEWLINENEWLINE Args:NEWLINE axis: Iterable over Python integers.NEWLINE ndims: Number of dimensions into which axis indexes.NEWLINENEWLINE Returns:NEWLINE A list of non-negative Python integers.NEWLINENEWLINE Raises:NEWLINE ValueError: If values in `axis` are too big/small to index into `ndims`.NEWLINE """NEWLINE non_negative_axis = []NEWLINE for d in axis:NEWLINE if d >= 0:NEWLINE if d >= ndims:NEWLINE raise ValueError("dim %d not in the interval [0, %d]." % (d, ndims - 1))NEWLINE non_negative_axis.append(d)NEWLINE else:NEWLINE if d < -1 * ndims:NEWLINE raise ValueError(NEWLINE "Negatively indexed dim %d not in the interval [-%d, -1]" % (d,NEWLINE ndims))NEWLINE non_negative_axis.append(ndims + d)NEWLINE return non_negative_axisNEWLINENEWLINENEWLINEdef _move_dims_to_flat_end(x, axis, x_ndims):NEWLINE """Move dims corresponding to `axis` in `x` to the end, then flatten.NEWLINENEWLINE Args:NEWLINE x: `Tensor` with shape `[B0,B1,...,Bb]`.NEWLINE axis: Python list of indices into dimensions of `x`.NEWLINE x_ndims: Python integer holding number of dimensions in `x`.NEWLINENEWLINE Returns:NEWLINE `Tensor` with value from `x` and dims in `axis` moved to end into one singleNEWLINE dimension.NEWLINE """NEWLINE # Suppose x.shape = [a, b, c, d]NEWLINE # Suppose axis = [1, 3]NEWLINENEWLINE # front_dims = [0, 2] in example above.NEWLINE front_dims = sorted(set(range(x_ndims)).difference(axis))NEWLINE # x_permed.shape = [a, c, b, d]NEWLINE x_permed = array_ops.transpose(x, perm=front_dims + list(axis))NEWLINENEWLINE if x.get_shape().is_fully_defined():NEWLINE x_shape = x.get_shape().as_list()NEWLINE # front_shape = [a, c], end_shape = [b * d]NEWLINE front_shape = [x_shape[i] for i in front_dims]NEWLINE end_shape = [np.prod([x_shape[i] for i in axis])]NEWLINE full_shape = front_shape + end_shapeNEWLINE else:NEWLINE front_shape = array_ops.shape(x_permed)[:x_ndims - len(axis)]NEWLINE end_shape = [-1]NEWLINE full_shape = array_ops.concat([front_shape, end_shape], axis=0)NEWLINE return array_ops.reshape(x_permed, shape=full_shape)NEWLINENEWLINENEWLINEdef _sort_tensor(tensor):NEWLINE """Use `top_k` to sort a `Tensor` along the last dimension."""NEWLINE sorted_, _ = nn_ops.top_k(tensor, k=array_ops.shape(tensor)[-1])NEWLINE return sorted_NEWLINE |
#!/usr/bin/env python3NEWLINE# Copyright (c) Meta Platforms, Inc. and affiliates.NEWLINE# All rights reserved.NEWLINE#NEWLINE# This source code is licensed under the BSD-style license found in theNEWLINE# LICENSE file in the root directory of this source tree.NEWLINENEWLINEfrom typing import List, OptionalNEWLINENEWLINEimport torchNEWLINEfrom torch import nnNEWLINEfrom torchrec.modules.embedding_modules import EmbeddingBagCollectionNEWLINEfrom torchrec.modules.mlp import MLPNEWLINEfrom torchrec.sparse.jagged_tensor import (NEWLINE KeyedJaggedTensor,NEWLINE KeyedTensor,NEWLINE)NEWLINENEWLINE# Sphinx Documentation Text (for user-facing classes only)NEWLINENEWLINE"""NEWLINE.. fb:display_title::NEWLINE DLRM APINEWLINE=====NEWLINENotations uses throughout:NEWLINENEWLINEF: number of sparseFeaturesNEWLINED: embedding_dimension of sparse featuresNEWLINEB: batch_sizeNEWLINEnum_features: number of dense featuresNEWLINENEWLINE"""NEWLINENEWLINENEWLINEdef choose(n: int, k: int) -> int:NEWLINE """NEWLINE Simple implementation of math.comb for python 3.7 compatibilityNEWLINE """NEWLINE if 0 <= k <= n:NEWLINE ntok = 1NEWLINE ktok = 1NEWLINE for t in range(1, min(k, n - k) + 1):NEWLINE ntok *= nNEWLINE ktok *= tNEWLINE n -= 1NEWLINE return ntok // ktokNEWLINE else:NEWLINE return 0NEWLINENEWLINENEWLINEclass SparseArch(nn.Module):NEWLINE """NEWLINE Processes the Sparse Features of DLRM. Does Embedding Lookup for allNEWLINE EmbeddingBag and Embedding features of each collection.NEWLINENEWLINE Constructor Args:NEWLINE embedding_bag_collection: EmbeddingBagCollection,NEWLINENEWLINE Call Args:NEWLINE features: KeyedJaggedTensor,NEWLINENEWLINE Returns:NEWLINE KeyedJaggedTensor - size F * D X BNEWLINENEWLINE Example:NEWLINE >>> eb1_config = EmbeddingBagConfig(NEWLINE name="t1", embedding_dim=3, num_embeddings=10, feature_names=["f1"]NEWLINE )NEWLINE eb2_config = EmbeddingBagConfig(NEWLINE name="t2", embedding_dim=4, num_embeddings=10, feature_names=["f2"]NEWLINE )NEWLINE ebc_config = EmbeddingBagCollectionConfig(tables=[eb1_config, eb2_config])NEWLINENEWLINE ebc = EmbeddingBagCollection(config=ebc_config)NEWLINENEWLINE # 0 1 2 <-- batchNEWLINE # 0 [0,1] None [2]NEWLINE # 1 [3] [4] [5,6,7]NEWLINE # ^NEWLINE # featureNEWLINE features = KeyedJaggedTensor.from_offsets_sync(NEWLINE keys=["f1", "f2"],NEWLINE values=torch.tensor([0, 1, 2, 3, 4, 5, 6, 7]),NEWLINE offsets=torch.tensor([0, 2, 2, 3, 4, 5, 8]),NEWLINE )NEWLINENEWLINE sparse_arch(features)NEWLINE """NEWLINENEWLINE def __init__(self, embedding_bag_collection: EmbeddingBagCollection) -> None:NEWLINE super().__init__()NEWLINE self.embedding_bag_collection: EmbeddingBagCollection = embedding_bag_collectionNEWLINENEWLINE def forward(NEWLINE self,NEWLINE features: KeyedJaggedTensor,NEWLINE ) -> KeyedTensor:NEWLINE return self.embedding_bag_collection(features)NEWLINENEWLINENEWLINEclass DenseArch(nn.Module):NEWLINE """NEWLINE Processes the dense features of DLRM model.NEWLINENEWLINE Constructor Args:NEWLINE in_features: int - size of the input.NEWLINE layer_sizes: List[int] - list of layer sizes.NEWLINE device: (Optional[torch.device]).NEWLINENEWLINE Call Args:NEWLINE features: torch.Tensor - size B X num_featuresNEWLINENEWLINE Returns:NEWLINE torch.Tensor - size B X DNEWLINENEWLINE Example:NEWLINE >>> B = 20NEWLINE D = 3NEWLINE dense_arch = DenseArch(10, layer_sizes=[15, D])NEWLINE dense_embedded = dense_arch(torch.rand((B, 10)))NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE in_features: int,NEWLINE layer_sizes: List[int],NEWLINE device: Optional[torch.device] = None,NEWLINE ) -> None:NEWLINE super().__init__()NEWLINE self.model: nn.Module = MLP(NEWLINE in_features, layer_sizes, bias=True, activation="relu", device=deviceNEWLINE )NEWLINENEWLINE def forward(self, features: torch.Tensor) -> torch.Tensor:NEWLINE return self.model(features)NEWLINENEWLINENEWLINEclass InteractionArch(nn.Module):NEWLINE """NEWLINE Processes the output of both SparseArch (sparse_features) and DenseArchNEWLINE (dense_features). Returns the pairwise dot product of each sparse feature pair,NEWLINE the dot product of each sparse features with the output of the dense layer,NEWLINE and the dense layer itself (all concatenated).NEWLINENEWLINE NOTE: The dimensionality of the dense_features (D) is expected to match theNEWLINE dimensionality of the sparse_features so that the dot products between them can beNEWLINE computed.NEWLINENEWLINE Constructor Args:NEWLINE num_sparse_features: int - size FNEWLINENEWLINE Call Args:NEWLINE dense_features: torch.Tensor - size B X DNEWLINE sparse_features: KeyedJaggedTensor - size F * D X BNEWLINENEWLINE Returns:NEWLINE torch.Tensor - B X (D + F + F choose 2)NEWLINENEWLINE Example:NEWLINE >>> D = 3NEWLINE B = 10NEWLINE keys = ["f1", "f2"]NEWLINE F = len(keys)NEWLINE inter_arch = InteractionArch(num_sparse_features=F)NEWLINENEWLINE dense_features = torch.rand((B, D))NEWLINENEWLINE sparse_features = KeyedTensor(NEWLINE keys=keys,NEWLINE length_per_key=[D, D],NEWLINE values=torch.rand((B, D * F)),NEWLINE )NEWLINENEWLINE # B X (D + F + F choose 2)NEWLINE concat_dense = inter_arch(dense_features, sparse_features)NEWLINE """NEWLINENEWLINE def __init__(self, num_sparse_features: int) -> None:NEWLINE super().__init__()NEWLINE self.F = num_sparse_featuresNEWLINE self.triu_indices: torch.Tensor = torch.triu_indices(NEWLINE self.F + 1, self.F + 1, offset=1NEWLINE )NEWLINENEWLINE def forward(NEWLINE self, dense_features: torch.Tensor, sparse_features: KeyedTensorNEWLINE ) -> torch.Tensor:NEWLINE if self.F <= 0:NEWLINE return dense_featuresNEWLINE (B, D) = dense_features.shapeNEWLINENEWLINE sparse_values = sparse_features.values().reshape(B, self.F, D)NEWLINE combined_values = torch.cat((dense_features.unsqueeze(1), sparse_values), dim=1)NEWLINENEWLINE # dense/sparse + sparse/sparse interactionNEWLINE # size B X (F + F choose 2)NEWLINE interactions = torch.bmm(NEWLINE combined_values, torch.transpose(combined_values, 1, 2)NEWLINE )NEWLINE interactions_flat = interactions[:, self.triu_indices[0], self.triu_indices[1]]NEWLINENEWLINE return torch.cat((dense_features, interactions_flat), dim=1)NEWLINENEWLINENEWLINEclass OverArch(nn.Module):NEWLINE """NEWLINE Final Arch of DLRM - simple MLP over OverArch.NEWLINENEWLINE Constructor Args:NEWLINE in_features: intNEWLINE layer_sizes: list[int]NEWLINE device: (Optional[torch.device]).NEWLINENEWLINE Call Args:NEWLINE features: torch.TensorNEWLINENEWLINE Returns:NEWLINE torch.Tensor - size B X layer_sizes[-1]NEWLINENEWLINE Example:NEWLINE >>> B = 20NEWLINE D = 3NEWLINE over_arch = OverArch(10, [5, 1])NEWLINE logits = over_arch(torch.rand((B, 10)))NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE in_features: int,NEWLINE layer_sizes: List[int],NEWLINE device: Optional[torch.device] = None,NEWLINE ) -> None:NEWLINE super().__init__()NEWLINE if len(layer_sizes) <= 1:NEWLINE raise ValueError("OverArch must have multiple layers.")NEWLINE self.model: nn.Module = nn.Sequential(NEWLINE MLP(NEWLINE in_features,NEWLINE layer_sizes[:-1],NEWLINE bias=True,NEWLINE activation="relu",NEWLINE device=device,NEWLINE ),NEWLINE nn.Linear(layer_sizes[-2], layer_sizes[-1], bias=True, device=device),NEWLINE )NEWLINENEWLINE def forward(self, features: torch.Tensor) -> torch.Tensor:NEWLINE return self.model(features)NEWLINENEWLINENEWLINEclass DLRM(nn.Module):NEWLINE """NEWLINE Recsys model from "Deep Learning Recommendation Model for Personalization andNEWLINE Recommendation Systems" (https://arxiv.org/abs/1906.00091). Processes sparseNEWLINE features by learning pooled embeddings for each feature. Learns the relationshipNEWLINE between dense features and sparse features by projecting dense features into theNEWLINE same embedding space. Also, learns the pairwise relationships between sparseNEWLINE features.NEWLINENEWLINE The module assumes all sparse features have the same embedding dimensionNEWLINE (i.e, each EmbeddingBagConfig uses the same embedding_dim)NEWLINENEWLINE Constructor Args:NEWLINE embedding_bag_collection (EmbeddingBagCollection): collection of embedding bagsNEWLINE used to define SparseArch.NEWLINE dense_in_features (int): the dimensionality of the dense input features.NEWLINE dense_arch_layer_sizes (list[int]): the layer sizes for the DenseArch.NEWLINE over_arch_layer_sizes (list[int]): the layer sizes for the OverArch. NOTE: TheNEWLINE output dimension of the InteractionArch should not be manually specifiedNEWLINE here.NEWLINE dense_device: (Optional[torch.device]).NEWLINENEWLINE Call Args:NEWLINE dense_features: torch.Tensor,NEWLINE sparse_features: KeyedJaggedTensor,NEWLINENEWLINE Returns:NEWLINE torch.Tensor - logits with size B X 1NEWLINENEWLINE Example:NEWLINE >>> B = 2NEWLINE D = 8NEWLINENEWLINE eb1_config = EmbeddingBagConfig(NEWLINE name="t1", embedding_dim=D, num_embeddings=100, feature_names=["f1", "f3"]NEWLINE )NEWLINE eb2_config = EmbeddingBagConfig(NEWLINE name="t2",NEWLINE embedding_dim=D,NEWLINE num_embeddings=100,NEWLINE feature_names=["f2"],NEWLINE )NEWLINE ebc_config = EmbeddingBagCollectionConfig(tables=[eb1_config, eb2_config])NEWLINENEWLINE ebc = EmbeddingBagCollection(config=ebc_config)NEWLINE model = DLRM(NEWLINE embedding_bag_collection=ebc,NEWLINE dense_in_features=100,NEWLINE dense_arch_layer_sizes=[20],NEWLINE over_arch_layer_sizes=[5, 1],NEWLINE )NEWLINENEWLINE features = torch.rand((B, 100))NEWLINENEWLINE # 0 1NEWLINE # 0 [1,2] [4,5]NEWLINE # 1 [4,3] [2,9]NEWLINE # ^NEWLINE # featureNEWLINE sparse_features = KeyedJaggedTensor.from_offsets_sync(NEWLINE keys=["f1", "f3"],NEWLINE values=torch.tensor([1, 2, 4, 5, 4, 3, 2, 9]),NEWLINE offsets=torch.tensor([0, 2, 4, 6, 8]),NEWLINE )NEWLINENEWLINE logits = model(NEWLINE dense_features=features,NEWLINE sparse_features=sparse_features,NEWLINE )NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE embedding_bag_collection: EmbeddingBagCollection,NEWLINE dense_in_features: int,NEWLINE dense_arch_layer_sizes: List[int],NEWLINE over_arch_layer_sizes: List[int],NEWLINE dense_device: Optional[torch.device] = None,NEWLINE ) -> None:NEWLINE super().__init__()NEWLINE assert (NEWLINE len(embedding_bag_collection.embedding_bag_configs) > 0NEWLINE ), "At least one embedding bag is required"NEWLINE for i in range(1, len(embedding_bag_collection.embedding_bag_configs)):NEWLINE conf_prev = embedding_bag_collection.embedding_bag_configs[i - 1]NEWLINE conf = embedding_bag_collection.embedding_bag_configs[i]NEWLINE assert (NEWLINE conf_prev.embedding_dim == conf.embedding_dimNEWLINE ), "All EmbeddingBagConfigs must have the same dimension"NEWLINE embedding_dim: int = embedding_bag_collection.embedding_bag_configs[NEWLINE 0NEWLINE ].embedding_dimNEWLINE if dense_arch_layer_sizes[-1] != embedding_dim:NEWLINE raise ValueError(NEWLINE f"embedding_bag_collection dimension ({embedding_dim}) and final dense "NEWLINE "arch layer size ({dense_arch_layer_sizes[-1]}) must match."NEWLINE )NEWLINENEWLINE num_feature_names = sum(NEWLINE [NEWLINE len(conf.feature_names)NEWLINE for conf in embedding_bag_collection.embedding_bag_configsNEWLINE ]NEWLINE )NEWLINENEWLINE over_in_features = (NEWLINE embedding_dim + choose(num_feature_names, 2) + num_feature_namesNEWLINE )NEWLINENEWLINE self.sparse_arch = SparseArch(embedding_bag_collection)NEWLINE self.dense_arch = DenseArch(NEWLINE in_features=dense_in_features,NEWLINE layer_sizes=dense_arch_layer_sizes,NEWLINE device=dense_device,NEWLINE )NEWLINE self.inter_arch = InteractionArch(num_sparse_features=num_feature_names)NEWLINE self.over_arch = OverArch(NEWLINE in_features=over_in_features,NEWLINE layer_sizes=over_arch_layer_sizes,NEWLINE device=dense_device,NEWLINE )NEWLINENEWLINE def forward(NEWLINE self,NEWLINE dense_features: torch.Tensor,NEWLINE sparse_features: KeyedJaggedTensor,NEWLINE ) -> torch.Tensor:NEWLINE embedded_dense = self.dense_arch(dense_features)NEWLINE embedded_sparse = self.sparse_arch(sparse_features)NEWLINE concatenated_dense = self.inter_arch(NEWLINE dense_features=embedded_dense, sparse_features=embedded_sparseNEWLINE )NEWLINE logits = self.over_arch(concatenated_dense)NEWLINE return logitsNEWLINE |
from dataclasses import dataclassNEWLINEfrom math import asin, cos, radians, sin, sqrtNEWLINENEWLINENEWLINE@dataclassNEWLINEclass Position:NEWLINE name: strNEWLINE lon: float = 0.0NEWLINE lat: float = 0.0NEWLINENEWLINE def distance_to(self, other):NEWLINE r = 6371 # Earth radius in kilometersNEWLINE lam_1, lam_2 = radians(self.lon), radians(self.lat)NEWLINE phi_1, phi_2 = radians(self.lat), radians(other.lat)NEWLINE h = (sin((phi_2 - phi_1) / 2)**2NEWLINE + cos(phi_1) * cos(phi_2) * sin((lam_2 - lam_1) / 2)**2)NEWLINE return 2 * r * asin(sqrt(h))NEWLINENEWLINENEWLINEoslo = Position('Oslo', 10.8, 59.9)NEWLINEvancouver = Position('Vancouver', -123.1, 49.3)NEWLINEoslo.distance_to(vancouver)NEWLINE |
from django.contrib.contenttypes.models import ContentTypeNEWLINEfrom django.core.exceptions import (ObjectDoesNotExist, PermissionDenied,NEWLINE ImproperlyConfigured)NEWLINEfrom django.shortcuts import get_object_or_404NEWLINEfrom django.http import Http404NEWLINEfrom django.conf import settingsNEWLINENEWLINEfrom . import models, plugins, serializersNEWLINENEWLINEfrom rest_framework import generics, mixins, statusNEWLINEfrom rest_framework.response import ResponseNEWLINENEWLINENEWLINE# TODO: create an endpoint that provides a list of valid targetNEWLINE# addresses for a particular agent to mail to (/api/starsrace/42/addresses/)NEWLINENEWLINE# TODO: add a method to the plugin that provides the list of targetNEWLINE# agents for a given agentNEWLINENEWLINE# TODO: create an endpoint for a realm administrator to create addresses?NEWLINENEWLINENEWLINE# /api/starsrace/42/messages/7/ (read a particular message)NEWLINE# /api/starsrace/42/messages/7/read/ (mark message as read)NEWLINE# /api/starsrace/42/messages/7/unread/ (mark message as unread)NEWLINE# /api/starsrace/42/messages/7/archive/ (mark message as archived)NEWLINE# /api/starsrace/42/messages/7/unarchive/ (mark message as unarchived)NEWLINENEWLINENEWLINE# TODO: replace views with Django REST FrameworkNEWLINENEWLINEclass AddressQuerysetMixin(object):NEWLINE serializer_class = serializers.AddressSerializerNEWLINE queryset = models.Address.objects.all()NEWLINENEWLINE def get_queryset(self):NEWLINE alias = self.kwargs.get('agent_alias')NEWLINE ct = plugins.agent_type(alias)NEWLINENEWLINE return self.queryset.filter(content_type=ct, users=self.request.user)NEWLINENEWLINE def get_object(self):NEWLINE queryset = self.get_queryset()NEWLINENEWLINE kwargs = {'object_id': self.kwargs.get('agent_pk')}NEWLINE obj = get_object_or_404(queryset, **kwargs)NEWLINENEWLINE self.check_object_permissions(self.request, obj)NEWLINE return objNEWLINENEWLINENEWLINEclass AddressListView(AddressQuerysetMixin, generics.ListAPIView):NEWLINE # /api/starsrace/NEWLINE #permission_classes = (PluginPermissions,)NEWLINE passNEWLINENEWLINENEWLINEclass AddressRetrieveView(AddressQuerysetMixin, generics.RetrieveAPIView):NEWLINE # /api/starsrace/42/NEWLINE #permission_classes = (PluginPermissions,)NEWLINE passNEWLINENEWLINENEWLINEclass AddressMixin(object):NEWLINE def get_address(self):NEWLINE alias = self.kwargs.get('agent_alias')NEWLINE ct = plugins.agent_type(alias)NEWLINE pk = self.kwargs.get('agent_pk')NEWLINENEWLINE address = get_object_or_404(NEWLINE models.Address, content_type=ct, object_id=pk)NEWLINENEWLINE if not address.users.filter(id=self.request.user.pk).exists():NEWLINE raise PermissionDeniedNEWLINENEWLINE return addressNEWLINENEWLINENEWLINEclass MessageCreateView(AddressMixin, generics.CreateAPIView):NEWLINE # /api/starsrace/42/post/NEWLINE serializer_class = serializers.MessageSerializerNEWLINE queryset = models.Message.objects.all()NEWLINENEWLINE def create(self, request, *args, **kwargs):NEWLINE address = self.get_address()NEWLINENEWLINE instance = models.Message(author=self.request.user,NEWLINE author_address=address)NEWLINE serializer = self.get_serializer(instance=instance, data=request.data)NEWLINE serializer.is_valid(raise_exception=True)NEWLINE self.perform_create(serializer)NEWLINE headers = self.get_success_headers(serializer.data)NEWLINE return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)NEWLINENEWLINENEWLINEclass MessageListView(AddressMixin, generics.ListAPIView):NEWLINE # /api/starsrace/42/messages/NEWLINE serializer_class = serializers.MessageUserSerializerNEWLINE queryset = models.MessageUser.objects.all()NEWLINENEWLINE def get_queryset(self):NEWLINE address = self.get_address()NEWLINENEWLINE return self.queryset.filter(NEWLINE user=self.request.user,NEWLINE message__addresses=addressNEWLINE )NEWLINE |
"""NEWLINE Copyright (c) 2015-2019 Ad Schellevis <[email protected]>NEWLINE All rights reserved.NEWLINENEWLINE Redistribution and use in source and binary forms, with or withoutNEWLINE modification, are permitted provided that the following conditions are met:NEWLINENEWLINE 1. Redistributions of source code must retain the above copyright notice,NEWLINE this list of conditions and the following disclaimer.NEWLINENEWLINE 2. Redistributions in binary form must reproduce the above copyrightNEWLINE notice, this list of conditions and the following disclaimer in theNEWLINE documentation and/or other materials provided with the distribution.NEWLINENEWLINE THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,NEWLINE INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITYNEWLINE AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THENEWLINE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,NEWLINE OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OFNEWLINE SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESSNEWLINE INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER INNEWLINE CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)NEWLINE ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THENEWLINE POSSIBILITY OF SUCH DAMAGE.NEWLINENEWLINE --------------------------------------------------------------------------------------NEWLINENEWLINE package : configdNEWLINE function: make standard config parser case sensitiveNEWLINE"""NEWLINENEWLINEfrom configparser import ConfigParserNEWLINENEWLINENEWLINEclass CSConfigParser(ConfigParser):NEWLINE def __init__(self):NEWLINE ConfigParser.__init__(self)NEWLINE self.optionxform = strNEWLINE |
# -*- coding: utf-8 -*-NEWLINE#----------------------------------------------------------------------------NEWLINE# Name: art_msw.pyNEWLINE# Purpose:NEWLINE#NEWLINE# Author: Andrea Gavana <[email protected]>NEWLINE#NEWLINE# Created:NEWLINE# Version:NEWLINE# Date:NEWLINE# Licence: wxWindows licenseNEWLINE# Tags: phoenix-port, unittest, documented, py3-portNEWLINE#----------------------------------------------------------------------------NEWLINE"""NEWLINE`art_msw` is responsible for drawing all the components of the ribbonNEWLINEinterface using a Windows appearance.NEWLINENEWLINENEWLINEDescriptionNEWLINE===========NEWLINENEWLINEThis allows a ribbon bar to have a pluggable look-and-feel, while retaining the sameNEWLINEunderlying behaviour. As a single art provider is used for all ribbon components, aNEWLINEribbon bar usually has a consistent (though unique) appearance.NEWLINENEWLINEBy default, a :class:`~wx.lib.agw.ribbon.bar.RibbonBar` uses an instance of a class calledNEWLINE:class:`~wx.lib.agw.ribbon.art_default.RibbonDefaultArtProvider`,NEWLINEwhich resolves to :class:`~wx.lib.agw.ribbon.art_aui.RibbonAUIArtProvider`,NEWLINE:class:`~wx.lib.agw.ribbon.art_msw.RibbonMSWArtProvider`, orNEWLINE:class:`~wx.lib.agw.ribbon.art_osx.RibbonOSXArtProvider` - whichever is most appropriateNEWLINEto the current platform. These art providers are allNEWLINEslightly configurable with regard to colours and fonts, but for larger modifications,NEWLINEyou can derive from one of these classes, or write a completely new art provider class.NEWLINENEWLINECall :meth:`RibbonBar.SetArtProvider() <lib.agw.ribbon.bar.RibbonBar.SetArtProvider>` to change the art provider being used.NEWLINENEWLINENEWLINESee AlsoNEWLINE========NEWLINENEWLINE:class:`~wx.lib.agw.ribbon.bar.RibbonBar`NEWLINE"""NEWLINENEWLINEimport wxNEWLINENEWLINEfrom math import cosNEWLINEfrom math import pi as M_PINEWLINENEWLINEfrom . import panel as PANELNEWLINEfrom . import page as PAGENEWLINENEWLINEfrom .art_internal import RibbonLoadPixmap, RibbonInterpolateColour, RibbonDrawParallelGradientLinesNEWLINEfrom .art_internal import RibbonCanLabelBreakAtPositionNEWLINEfrom .art_internal import RibbonHSLColourNEWLINENEWLINEfrom .art import *NEWLINENEWLINENEWLINEgallery_up_xpm = [b"5 5 2 1", b" c None", b"x c #FF00FF", b" ", b" x ", b" xxx ", b"xxxxx", b" "]NEWLINEgallery_down_xpm = [b"5 5 2 1", b" c None", b"x c #FF00FF", b" ", b"xxxxx", b" xxx ", b" x ", b" "]NEWLINEgallery_left_xpm = [b"5 5 2 1", b" c None", b"x c #FF00FF", b" x ", b" xx ", b" xxx ", b" xx ", b" x "]NEWLINEgallery_right_xpm = [b"5 5 2 1", b" c None", b"x c #FF00FF", b" x ", b" xx ", b" xxx ", b" xx ", b" x "]NEWLINEgallery_extension_xpm = [b"5 5 2 1", b" c None", b"x c #FF00FF", b"xxxxx", b" ", b"xxxxx", b" xxx ", b" x "]NEWLINEpanel_extension_xpm = [b"7 7 2 1", b" c None", b"x c #FF00FF", b"xxxxxx ", b"x ", b"x ",NEWLINE b"x x x", b"x xxx", b"x xxx", b" xxxx"]NEWLINENEWLINENEWLINEdef LikePrimary(primary_hsl, is_gray, h, s, l):NEWLINENEWLINE return primary_hsl.ShiftHue(h).Saturated((is_gray and [0] or [s])[0]).Lighter(l).ToRGB()NEWLINENEWLINENEWLINEdef LikeSecondary(secondary_hsl, is_gray, h, s, l):NEWLINENEWLINE return secondary_hsl.ShiftHue(h).Saturated((is_gray and [0] or [s])[0]).Lighter(l).ToRGB()NEWLINENEWLINENEWLINEdef SingleLine(dc, rect, start, finish):NEWLINENEWLINE dc.DrawLine(start.x + rect.x, start.y + rect.y, finish.x + rect.x, finish.y + rect.y)NEWLINENEWLINENEWLINEclass RibbonMSWArtProvider(object):NEWLINENEWLINE def __init__(self, set_colour_scheme=True):NEWLINENEWLINE self._flags = 0NEWLINE self._tab_label_font = wx.NORMAL_FONTNEWLINE self._button_bar_label_font = wx.NORMAL_FONTNEWLINE self._panel_label_font = wx.NORMAL_FONTNEWLINENEWLINE self._gallery_up_bitmap = [wx.NullBitmap for i in range(4)]NEWLINE self._gallery_down_bitmap = [wx.NullBitmap for i in range(4)]NEWLINE self._gallery_extension_bitmap = [wx.NullBitmap for i in range(4)]NEWLINE self._panel_extension_bitmap = [wx.NullBitmap for i in range(2)]NEWLINENEWLINE if set_colour_scheme:NEWLINE self.SetColourScheme(wx.Colour(194, 216, 241), wx.Colour(255, 223, 114), wx.Colour(0, 0, 0))NEWLINENEWLINE self._cached_tab_separator_visibility = -10.0 # valid visibilities are in range [0, 1]NEWLINE self._tab_separation_size = 3NEWLINE self._page_border_left = 2NEWLINE self._page_border_top = 1NEWLINE self._page_border_right = 2NEWLINE self._page_border_bottom = 3NEWLINE self._panel_x_separation_size = 1NEWLINE self._panel_y_separation_size = 1NEWLINE self._tool_group_separation_size = 3NEWLINE self._gallery_bitmap_padding_left_size = 4NEWLINE self._gallery_bitmap_padding_right_size = 4NEWLINE self._gallery_bitmap_padding_top_size = 4NEWLINE self._gallery_bitmap_padding_bottom__size = 4NEWLINE self._cached_tab_separator = wx.NullBitmapNEWLINENEWLINENEWLINE def GetColourScheme(self, primary, secondary, tertiary):NEWLINE """NEWLINE Get the current colour scheme.NEWLINENEWLINE Returns three colours such that if :meth:`~RibbonMSWArtProvider.SetColourScheme` were called with them, theNEWLINE colour scheme would be restored to what it was when :meth:`~RibbonMSWArtProvider.SetColourScheme` was lastNEWLINE called. In practice, this usually means that the returned values are the threeNEWLINE colours given in the last call to :meth:`~RibbonMSWArtProvider.SetColourScheme`, however ifNEWLINE :meth:`~RibbonMSWArtProvider.SetColourScheme` performs an idempotent operation upon the colours it is givenNEWLINE (like clamping a component of the colour), then the returned values may not beNEWLINE the three colours given in the last call to :meth:`~RibbonMSWArtProvider.SetColourScheme`.NEWLINENEWLINE If :meth:`~RibbonMSWArtProvider.SetColourScheme` has not been called, then the returned values should resultNEWLINE in a colour scheme similar to, if not identical to, the default colours of theNEWLINE art provider. Note that if :meth:`~RibbonMSWArtProvider.SetColour` is called, then :meth:`~RibbonMSWArtProvider.GetColourScheme` doesNEWLINE not try and return a colour scheme similar to colours being used - it's returnNEWLINE values are dependant upon the last values given to :meth:`~RibbonMSWArtProvider.SetColourScheme`, asNEWLINE described above.NEWLINENEWLINE :param `primary`: Pointer to a location to store the primary colour, or ``None``;NEWLINE :param `secondary`: Pointer to a location to store the secondary colour, or ``None``;NEWLINE :param `tertiary`: Pointer to a location to store the tertiary colour, or ``None``.NEWLINENEWLINE """NEWLINENEWLINE if primary is not None:NEWLINE primary = self._primary_scheme_colourNEWLINE if secondary is not None:NEWLINE secondary = self._secondary_scheme_colourNEWLINE if tertiary is not None:NEWLINE tertiary = self._tertiary_scheme_colourNEWLINENEWLINE return primary, secondary, tertiaryNEWLINENEWLINENEWLINE def SetColourScheme(self, primary, secondary, tertiary):NEWLINE """NEWLINE Set all applicable colour settings from a few base colours.NEWLINENEWLINE Uses any or all of the three given colours to create a colour scheme, and thenNEWLINE sets all colour settings which are relevant to the art provider using thatNEWLINE scheme. Note that some art providers may not use the tertiary colour forNEWLINE anything, and some may not use the secondary colour either.NEWLINENEWLINE :param `primary`: MISSING DESCRIPTION;NEWLINE :param `secondary`: MISSING DESCRIPTION;NEWLINE :param `tertiary`: MISSING DESCRIPTION.NEWLINENEWLINE :see: :meth:`~RibbonMSWArtProvider.SetColour`, :meth:`~RibbonMSWArtProvider.GetColourScheme`NEWLINE """NEWLINENEWLINE self._primary_scheme_colour = primaryNEWLINE self._secondary_scheme_colour = secondaryNEWLINE self._tertiary_scheme_colour = tertiaryNEWLINENEWLINE primary_hsl = RibbonHSLColour(primary)NEWLINE secondary_hsl = RibbonHSLColour(secondary)NEWLINE # tertiary not used for anythingNEWLINENEWLINE # Map primary saturation from [0, 1] to [.25, .75]NEWLINE primary_is_gray = FalseNEWLINE gray_saturation_threshold = 0.01NEWLINENEWLINE if primary_hsl.saturation <= gray_saturation_threshold:NEWLINE primary_is_gray = TrueNEWLINE else:NEWLINE primary_hsl.saturation = cos(primary_hsl.saturation * M_PI) * -0.25 + 0.5NEWLINENEWLINE # Map primary luminance from [0, 1] to [.23, .83]NEWLINE primary_hsl.luminance = cos(primary_hsl.luminance * M_PI) * -0.3 + 0.53NEWLINENEWLINE # Map secondary saturation from [0, 1] to [0.16, 0.84]NEWLINE secondary_is_gray = FalseNEWLINENEWLINE if secondary_hsl.saturation <= gray_saturation_threshold:NEWLINE secondary_is_gray = TrueNEWLINE else:NEWLINE secondary_hsl.saturation = cos(secondary_hsl.saturation * M_PI) * -0.34 + 0.5NEWLINENEWLINE # Map secondary luminance from [0, 1] to [0.1, 0.9]NEWLINE secondary_hsl.luminance = cos(secondary_hsl.luminance * M_PI) * -0.4 + 0.5NEWLINENEWLINE self._page_border_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, 1.4, 0.00, -0.08))NEWLINE self._page_background_top_colour = LikePrimary(primary_hsl, primary_is_gray, -0.1, -0.03, 0.12)NEWLINE self._page_hover_background_top_colour = LikePrimary(primary_hsl, primary_is_gray, -2.8, 0.27, 0.17)NEWLINE self._page_background_top_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 0.1, -0.10, 0.08)NEWLINE self._page_hover_background_top_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 3.2, 0.16, 0.13)NEWLINE self._page_background_colour = LikePrimary(primary_hsl, primary_is_gray, 0.4, -0.09, 0.05)NEWLINE self._page_hover_background_colour = LikePrimary(primary_hsl, primary_is_gray, 0.1, 0.19, 0.10)NEWLINE self._page_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, -3.2, 0.27, 0.10)NEWLINE self._page_hover_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.8, 0.01, 0.15)NEWLINENEWLINE self._tab_active_background_colour = LikePrimary(primary_hsl, primary_is_gray, -0.1, -0.31, 0.16)NEWLINE self._tab_active_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, -0.1, -0.03, 0.12)NEWLINE self._tab_separator_colour = LikePrimary(primary_hsl, primary_is_gray, 0.9, 0.24, 0.05)NEWLINE self._tab_ctrl_background_brush = wx.Brush(LikePrimary(primary_hsl, primary_is_gray, 1.0, 0.39, 0.07))NEWLINE self._tab_hover_background_colour = LikePrimary(primary_hsl, primary_is_gray, 1.3, 0.15, 0.10)NEWLINE self._tab_hover_background_top_colour = LikePrimary(primary_hsl, primary_is_gray, 1.4, 0.36, 0.08)NEWLINE self._tab_border_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, 1.4, 0.03, -0.05) )NEWLINE self._tab_separator_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.7, -0.15, -0.18)NEWLINE self._tab_hover_background_top_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.8, 0.34, 0.13)NEWLINE self._tab_label_colour = LikePrimary(primary_hsl, primary_is_gray, 4.3, 0.13, -0.49)NEWLINE self._tab_hover_background_gradient_colour = LikeSecondary(primary_hsl, secondary_is_gray, -1.5, -0.34, 0.01)NEWLINENEWLINE self._panel_minimised_border_gradient_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, -6.9, -0.17, -0.09))NEWLINE self._panel_minimised_border_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, -5.3, -0.24, -0.06))NEWLINE self._panel_border_gradient_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, -5.2, -0.15, -0.06))NEWLINE self._panel_border_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, -2.8, -0.32, 0.02))NEWLINE self._panel_label_background_brush = wx.Brush(LikePrimary(primary_hsl, primary_is_gray, -1.5, 0.03, 0.05))NEWLINE self._panel_active_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 0.5, 0.34, 0.05)NEWLINE self._panel_hover_label_background_brush = wx.Brush(LikePrimary(primary_hsl, primary_is_gray, 1.0, 0.30, 0.09))NEWLINE self._panel_active_background_top_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.4, -0.17, -0.13)NEWLINE self._panel_active_background_colour = LikePrimary(primary_hsl, primary_is_gray, 1.6, -0.18, -0.18)NEWLINE self._panel_active_background_top_colour = LikePrimary(primary_hsl, primary_is_gray, 1.7, -0.20, -0.03)NEWLINE self._panel_label_colour = LikePrimary(primary_hsl, primary_is_gray, 2.8, -0.14, -0.35)NEWLINE self._panel_hover_label_colour = self._panel_label_colourNEWLINE self._panel_minimised_label_colour = self._tab_label_colourNEWLINENEWLINE self._panel_hover_button_background_brush = wx.Brush(LikeSecondary(secondary_hsl, secondary_is_gray, -0.9, 0.16, -0.07))NEWLINE self._panel_hover_button_border_pen = wx.Pen(LikeSecondary(secondary_hsl, secondary_is_gray, -3.9, -0.16, -0.14))NEWLINE self.SetColour(RIBBON_ART_PANEL_BUTTON_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 1.4, -0.21, -0.23))NEWLINE self.SetColour(RIBBON_ART_PANEL_BUTTON_HOVER_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 1.5, -0.24, -0.29))NEWLINENEWLINE self._gallery_button_disabled_background_colour = LikePrimary(primary_hsl, primary_is_gray, -2.8, -0.46, 0.09)NEWLINE self._gallery_button_disabled_background_top_brush = wx.Brush(LikePrimary(primary_hsl, primary_is_gray, -2.8, -0.36, 0.15))NEWLINE self._gallery_hover_background_brush = wx.Brush(LikePrimary(primary_hsl, primary_is_gray, -0.8, 0.05, 0.15))NEWLINE self._gallery_border_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, 0.7, -0.02, 0.03))NEWLINE self._gallery_button_background_top_brush = wx.Brush(LikePrimary(primary_hsl, primary_is_gray, 0.8, 0.34, 0.13))NEWLINE self._gallery_button_background_colour = LikePrimary(primary_hsl, primary_is_gray, 1.3, 0.10, 0.08)NEWLINENEWLINE # SetColour used so that the relevant bitmaps are generatedNEWLINE self.SetColour(RIBBON_ART_GALLERY_BUTTON_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 1.4, -0.21, -0.23))NEWLINE self.SetColour(RIBBON_ART_GALLERY_BUTTON_HOVER_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 1.5, -0.24, -0.29))NEWLINE self.SetColour(RIBBON_ART_GALLERY_BUTTON_ACTIVE_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 1.5, -0.24, -0.29))NEWLINE self.SetColour(RIBBON_ART_GALLERY_BUTTON_DISABLED_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 0.0, -1.0, 0.0))NEWLINE self._gallery_button_disabled_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.5, -0.43, 0.12)NEWLINE self._gallery_button_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.7, 0.11, 0.09)NEWLINE self._gallery_item_border_pen = wx.Pen(LikeSecondary(secondary_hsl, secondary_is_gray, -3.9, -0.16, -0.14))NEWLINE self._gallery_button_hover_background_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -0.9, 0.16, -0.07)NEWLINE self._gallery_button_hover_background_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, 0.1, 0.12, 0.03)NEWLINE self._gallery_button_hover_background_top_brush = wx.Brush(LikeSecondary(secondary_hsl, secondary_is_gray, 4.3, 0.16, 0.17))NEWLINENEWLINE self._gallery_button_active_background_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -9.9, 0.03, -0.22)NEWLINE self._gallery_button_active_background_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -9.5, 0.14, -0.11)NEWLINE self._gallery_button_active_background_top_brush = wx.Brush(LikeSecondary(secondary_hsl, secondary_is_gray, -9.0, 0.15, -0.08))NEWLINENEWLINE self._button_bar_label_colour = self._tab_label_colourNEWLINE self._button_bar_hover_border_pen = wx.Pen(LikeSecondary(secondary_hsl, secondary_is_gray, -6.2, -0.47, -0.14))NEWLINE self._button_bar_hover_background_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -0.6, 0.16, 0.04)NEWLINE self._button_bar_hover_background_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -0.2, 0.16, -0.10)NEWLINE self._button_bar_hover_background_top_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, 0.2, 0.16, 0.03)NEWLINE self._button_bar_hover_background_top_colour = LikeSecondary(secondary_hsl, secondary_is_gray, 8.8, 0.16, 0.17)NEWLINE self._button_bar_active_border_pen = wx.Pen(LikeSecondary(secondary_hsl, secondary_is_gray, -6.2, -0.47, -0.25))NEWLINE self._button_bar_active_background_top_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -8.4, 0.08, 0.06)NEWLINE self._button_bar_active_background_top_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -9.7, 0.13, -0.07)NEWLINE self._button_bar_active_background_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -9.9, 0.14, -0.14)NEWLINE self._button_bar_active_background_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -8.7, 0.17, -0.03)NEWLINENEWLINE self._toolbar_border_pen = wx.Pen(LikePrimary(primary_hsl, primary_is_gray, 1.4, -0.21, -0.16))NEWLINE self.SetColour(RIBBON_ART_TOOLBAR_FACE_COLOUR, LikePrimary(primary_hsl, primary_is_gray, 1.4, -0.17, -0.22))NEWLINE self._tool_background_top_colour = LikePrimary(primary_hsl, primary_is_gray, -1.9, -0.07, 0.06)NEWLINE self._tool_background_top_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.4, 0.12, 0.08)NEWLINE self._tool_background_colour = LikePrimary(primary_hsl, primary_is_gray, 1.4, -0.09, 0.03)NEWLINE self._tool_background_gradient_colour = LikePrimary(primary_hsl, primary_is_gray, 1.9, 0.11, 0.09)NEWLINE self._tool_hover_background_top_colour = LikeSecondary(secondary_hsl, secondary_is_gray, 3.4, 0.11, 0.16)NEWLINE self._tool_hover_background_top_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -1.4, 0.04, 0.08)NEWLINE self._tool_hover_background_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -1.8, 0.16, -0.12)NEWLINE self._tool_hover_background_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -2.6, 0.16, 0.05)NEWLINE self._tool_active_background_top_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -9.9, -0.12, -0.09)NEWLINE self._tool_active_background_top_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -8.5, 0.16, -0.12)NEWLINE self._tool_active_background_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -7.9, 0.16, -0.20)NEWLINE self._tool_active_background_gradient_colour = LikeSecondary(secondary_hsl, secondary_is_gray, -6.6, 0.16, -0.10)NEWLINENEWLINE # Invalidate cached tab separatorNEWLINE self._cached_tab_separator_visibility = -1.0NEWLINENEWLINENEWLINE def Clone(self):NEWLINE """NEWLINE Create a new art provider which is a clone of this one.NEWLINE """NEWLINENEWLINE copy = RibbonMSWArtProvider()NEWLINE self.CloneTo(copy)NEWLINE return copyNEWLINENEWLINENEWLINE def CloneTo(self, copy):NEWLINENEWLINE for i in range(4):NEWLINE copy._gallery_up_bitmap[i] = self._gallery_up_bitmap[i]NEWLINE copy._gallery_down_bitmap[i] = self._gallery_down_bitmap[i]NEWLINE copy._gallery_extension_bitmap[i] = self._gallery_extension_bitmap[i]NEWLINENEWLINE for i in range(2):NEWLINE copy._panel_extension_bitmap[i] = self._panel_extension_bitmap[i]NEWLINENEWLINE copy._toolbar_drop_bitmap = self._toolbar_drop_bitmapNEWLINENEWLINE copy._primary_scheme_colour = self._primary_scheme_colourNEWLINE copy._secondary_scheme_colour = self._secondary_scheme_colourNEWLINE copy._tertiary_scheme_colour = self._tertiary_scheme_colourNEWLINENEWLINE copy._button_bar_label_colour = self._button_bar_label_colourNEWLINE copy._tab_label_colour = self._tab_label_colourNEWLINE copy._tab_separator_colour = self._tab_separator_colourNEWLINE copy._tab_separator_gradient_colour = self._tab_separator_gradient_colourNEWLINE copy._tab_active_background_colour = self._tab_hover_background_colourNEWLINE copy._tab_active_background_gradient_colour = self._tab_hover_background_gradient_colourNEWLINE copy._tab_hover_background_colour = self._tab_hover_background_colourNEWLINE copy._tab_hover_background_gradient_colour = self._tab_hover_background_gradient_colourNEWLINE copy._tab_hover_background_top_colour = self._tab_hover_background_top_colourNEWLINE copy._tab_hover_background_top_gradient_colour = self._tab_hover_background_top_gradient_colourNEWLINE copy._panel_label_colour = self._panel_label_colourNEWLINE copy._panel_hover_label_colour = self._panel_hover_label_colourNEWLINE copy._panel_minimised_label_colour = self._panel_minimised_label_colourNEWLINE copy._panel_button_face_colour = self._panel_button_face_colourNEWLINE copy._panel_button_hover_face_colour = self._panel_button_hover_face_colourNEWLINE copy._panel_active_background_colour = self._panel_active_background_colourNEWLINE copy._panel_active_background_gradient_colour = self._panel_active_background_gradient_colourNEWLINE copy._panel_active_background_top_colour = self._panel_active_background_top_colourNEWLINE copy._panel_active_background_top_gradient_colour = self._panel_active_background_top_gradient_colourNEWLINE copy._page_background_colour = self._page_background_colourNEWLINE copy._page_background_gradient_colour = self._page_background_gradient_colourNEWLINE copy._page_background_top_colour = self._page_background_top_colourNEWLINE copy._page_background_top_gradient_colour = self._page_background_top_gradient_colourNEWLINE copy._page_hover_background_colour = self._page_hover_background_colourNEWLINE copy._page_hover_background_gradient_colour = self._page_hover_background_gradient_colourNEWLINE copy._page_hover_background_top_colour = self._page_hover_background_top_colourNEWLINE copy._page_hover_background_top_gradient_colour = self._page_hover_background_top_gradient_colourNEWLINE copy._button_bar_hover_background_colour = self._button_bar_hover_background_colourNEWLINE copy._button_bar_hover_background_gradient_colour = self._button_bar_hover_background_gradient_colourNEWLINE copy._button_bar_hover_background_top_colour = self._button_bar_hover_background_top_colourNEWLINE copy._button_bar_hover_background_top_gradient_colour = self._button_bar_hover_background_top_gradient_colourNEWLINE copy._button_bar_active_background_colour = self._button_bar_active_background_colourNEWLINE copy._button_bar_active_background_gradient_colour = self._button_bar_active_background_gradient_colourNEWLINE copy._button_bar_active_background_top_colour = self._button_bar_active_background_top_colourNEWLINE copy._button_bar_active_background_top_gradient_colour = self._button_bar_active_background_top_gradient_colourNEWLINE copy._gallery_button_background_colour = self._gallery_button_background_colourNEWLINE copy._gallery_button_background_gradient_colour = self._gallery_button_background_gradient_colourNEWLINE copy._gallery_button_hover_background_colour = self._gallery_button_hover_background_colourNEWLINE copy._gallery_button_hover_background_gradient_colour = self._gallery_button_hover_background_gradient_colourNEWLINE copy._gallery_button_active_background_colour = self._gallery_button_active_background_colourNEWLINE copy._gallery_button_active_background_gradient_colour = self._gallery_button_active_background_gradient_colourNEWLINE copy._gallery_button_disabled_background_colour = self._gallery_button_disabled_background_colourNEWLINE copy._gallery_button_disabled_background_gradient_colour = self._gallery_button_disabled_background_gradient_colourNEWLINE copy._gallery_button_face_colour = self._gallery_button_face_colourNEWLINE copy._gallery_button_hover_face_colour = self._gallery_button_hover_face_colourNEWLINE copy._gallery_button_active_face_colour = self._gallery_button_active_face_colourNEWLINE copy._gallery_button_disabled_face_colour = self._gallery_button_disabled_face_colourNEWLINENEWLINE copy._tab_ctrl_background_brush = self._tab_ctrl_background_brushNEWLINE copy._panel_label_background_brush = self._panel_label_background_brushNEWLINE copy._panel_hover_label_background_brush = self._panel_hover_label_background_brushNEWLINE copy._panel_hover_button_background_brush = self._panel_hover_button_background_brushNEWLINE copy._gallery_hover_background_brush = self._gallery_hover_background_brushNEWLINE copy._gallery_button_background_top_brush = self._gallery_button_background_top_brushNEWLINE copy._gallery_button_hover_background_top_brush = self._gallery_button_hover_background_top_brushNEWLINE copy._gallery_button_active_background_top_brush = self._gallery_button_active_background_top_brushNEWLINE copy._gallery_button_disabled_background_top_brush = self._gallery_button_disabled_background_top_brushNEWLINENEWLINE copy._tab_label_font = self._tab_label_fontNEWLINE copy._button_bar_label_font = self._button_bar_label_fontNEWLINE copy._panel_label_font = self._panel_label_fontNEWLINENEWLINE copy._page_border_pen = self._page_border_penNEWLINE copy._panel_border_pen = self._panel_border_penNEWLINE copy._panel_border_gradient_pen = self._panel_border_gradient_penNEWLINE copy._panel_hover_button_border_pen = self._panel_hover_button_border_penNEWLINE copy._panel_minimised_border_pen = self._panel_minimised_border_penNEWLINE copy._panel_minimised_border_gradient_pen = self._panel_minimised_border_gradient_penNEWLINE copy._tab_border_pen = self._tab_border_penNEWLINE copy._gallery_border_pen = self._gallery_border_penNEWLINE copy._button_bar_hover_border_pen = self._button_bar_hover_border_penNEWLINE copy._button_bar_active_border_pen = self._button_bar_active_border_penNEWLINE copy._gallery_item_border_pen = self._gallery_item_border_penNEWLINE copy._toolbar_border_pen = self._toolbar_border_penNEWLINENEWLINE copy._flags = self._flagsNEWLINE copy._tab_separation_size = self._tab_separation_sizeNEWLINE copy._page_border_left = self._page_border_leftNEWLINE copy._page_border_top = self._page_border_topNEWLINE copy._page_border_right = self._page_border_rightNEWLINE copy._page_border_bottom = self._page_border_bottomNEWLINE copy._panel_x_separation_size = self._panel_x_separation_sizeNEWLINE copy._panel_y_separation_size = self._panel_y_separation_sizeNEWLINE copy._gallery_bitmap_padding_left_size = self._gallery_bitmap_padding_left_sizeNEWLINE copy._gallery_bitmap_padding_right_size = self._gallery_bitmap_padding_right_sizeNEWLINE copy._gallery_bitmap_padding_top_size = self._gallery_bitmap_padding_top_sizeNEWLINE copy._gallery_bitmap_padding_bottom__size = self._gallery_bitmap_padding_bottom__sizeNEWLINENEWLINENEWLINE def GetFlags(self):NEWLINE """NEWLINE Get the previously set style flags.NEWLINE """NEWLINENEWLINE return self._flagsNEWLINENEWLINENEWLINE def SetFlags(self, flags):NEWLINE """NEWLINE Set the style flags.NEWLINENEWLINE Normally called automatically by :meth:`RibbonBar.SetArtProvider() <lib.agw.ribbon.bar.RibbonBar.SetArtProvider>` with the ribbonNEWLINE bar's style flags, so that the art provider has the same flags as the bar whichNEWLINE it is serving.NEWLINENEWLINE :param `flags`: MISSING DESCRIPTION.NEWLINENEWLINE """NEWLINENEWLINE if (flags ^ self._flags) & RIBBON_BAR_FLOW_VERTICAL:NEWLINE if flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE self._page_border_left += 1NEWLINE self._page_border_right += 1NEWLINE self._page_border_top -= 1NEWLINE self._page_border_bottom -= 1NEWLINE else:NEWLINE self._page_border_left -= 1NEWLINE self._page_border_right -= 1NEWLINE self._page_border_top += 1NEWLINE self._page_border_bottom += 1NEWLINENEWLINE self._flags = flagsNEWLINENEWLINE # Need to reload some bitmaps when flags changeNEWLINE self.Reload(RIBBON_ART_GALLERY_BUTTON_FACE_COLOUR)NEWLINE self.Reload(RIBBON_ART_GALLERY_BUTTON_HOVER_FACE_COLOUR)NEWLINE self.Reload(RIBBON_ART_GALLERY_BUTTON_ACTIVE_FACE_COLOUR)NEWLINE self.Reload(RIBBON_ART_GALLERY_BUTTON_DISABLED_FACE_COLOUR)NEWLINE self.Reload(RIBBON_ART_PANEL_BUTTON_FACE_COLOUR)NEWLINE self.Reload(RIBBON_ART_PANEL_BUTTON_HOVER_FACE_COLOUR)NEWLINENEWLINENEWLINE def Reload(self, setting):NEWLINENEWLINE self.SetColour(setting, self.GetColour(setting))NEWLINENEWLINENEWLINE def GetMetric(self, id):NEWLINE """NEWLINE Get the value of a certain integer setting.NEWLINENEWLINE can be one of the size values of `RibbonArtSetting`.NEWLINENEWLINE :param `id`: a metric id.NEWLINENEWLINE """NEWLINENEWLINE if id == RIBBON_ART_TAB_SEPARATION_SIZE:NEWLINE return self._tab_separation_sizeNEWLINE elif id == RIBBON_ART_PAGE_BORDER_LEFT_SIZE:NEWLINE return self._page_border_leftNEWLINE elif id == RIBBON_ART_PAGE_BORDER_TOP_SIZE:NEWLINE return self._page_border_topNEWLINE elif id == RIBBON_ART_PAGE_BORDER_RIGHT_SIZE:NEWLINE return self._page_border_rightNEWLINE elif id == RIBBON_ART_PAGE_BORDER_BOTTOM_SIZE:NEWLINE return self._page_border_bottomNEWLINE elif id == RIBBON_ART_PANEL_X_SEPARATION_SIZE:NEWLINE return self._panel_x_separation_sizeNEWLINE elif id == RIBBON_ART_PANEL_Y_SEPARATION_SIZE:NEWLINE return self._panel_y_separation_sizeNEWLINE elif id == RIBBON_ART_TOOL_GROUP_SEPARATION_SIZE:NEWLINE return self._tool_group_separation_sizeNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_LEFT_SIZE:NEWLINE return self._gallery_bitmap_padding_left_sizeNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_RIGHT_SIZE:NEWLINE return self._gallery_bitmap_padding_right_sizeNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_TOP_SIZE:NEWLINE return self._gallery_bitmap_padding_top_sizeNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_BOTTOM_SIZE:NEWLINE return self._gallery_bitmap_padding_bottom__sizeNEWLINE else:NEWLINE raise Exception("Invalid Metric Ordinal")NEWLINENEWLINENEWLINE def SetMetric(self, id, new_val):NEWLINE """NEWLINE Set the value of a certain integer setting to the value.NEWLINENEWLINE can be one of the size values of `RibbonArtSetting`.NEWLINENEWLINE :param `id`: a metric id;NEWLINE :param `new_val`: the new value of the metric setting.NEWLINENEWLINE """NEWLINENEWLINE if id == RIBBON_ART_TAB_SEPARATION_SIZE:NEWLINE self._tab_separation_size = new_valNEWLINE elif id == RIBBON_ART_PAGE_BORDER_LEFT_SIZE:NEWLINE self._page_border_left = new_valNEWLINE elif id == RIBBON_ART_PAGE_BORDER_TOP_SIZE:NEWLINE self._page_border_top = new_valNEWLINE elif id == RIBBON_ART_PAGE_BORDER_RIGHT_SIZE:NEWLINE self._page_border_right = new_valNEWLINE elif id == RIBBON_ART_PAGE_BORDER_BOTTOM_SIZE:NEWLINE self._page_border_bottom = new_valNEWLINE elif id == RIBBON_ART_PANEL_X_SEPARATION_SIZE:NEWLINE self._panel_x_separation_size = new_valNEWLINE elif id == RIBBON_ART_PANEL_Y_SEPARATION_SIZE:NEWLINE self._panel_y_separation_size = new_valNEWLINE elif id == RIBBON_ART_TOOL_GROUP_SEPARATION_SIZE:NEWLINE self._tool_group_separation_size = new_valNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_LEFT_SIZE:NEWLINE self._gallery_bitmap_padding_left_size = new_valNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_RIGHT_SIZE:NEWLINE self._gallery_bitmap_padding_right_size = new_valNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_TOP_SIZE:NEWLINE self._gallery_bitmap_padding_top_size = new_valNEWLINE elif id == RIBBON_ART_GALLERY_BITMAP_PADDING_BOTTOM_SIZE:NEWLINE self._gallery_bitmap_padding_bottom__size = new_valNEWLINE else:NEWLINE raise Exception("Invalid Metric Ordinal")NEWLINENEWLINENEWLINE def SetFont(self, id, font):NEWLINE """NEWLINE Set the value of a certain font setting to the value.NEWLINENEWLINE can be one of the font values of `RibbonArtSetting`.NEWLINENEWLINE :param `id`: a font id;NEWLINE :param `font`: the new font.NEWLINENEWLINE """NEWLINENEWLINE if id == RIBBON_ART_TAB_LABEL_FONT:NEWLINE self._tab_label_font = fontNEWLINE elif id == RIBBON_ART_BUTTON_BAR_LABEL_FONT:NEWLINE self._button_bar_label_font = fontNEWLINE elif id == RIBBON_ART_PANEL_LABEL_FONT:NEWLINE self._panel_label_font = fontNEWLINE else:NEWLINE raise Exception("Invalid Font Ordinal")NEWLINENEWLINENEWLINE def GetFont(self, id):NEWLINE """NEWLINE Get the value of a certain font setting.NEWLINENEWLINE can be one of the font values of `RibbonArtSetting`.NEWLINENEWLINE :param `id`: the font id.NEWLINENEWLINE """NEWLINENEWLINE if id == RIBBON_ART_TAB_LABEL_FONT:NEWLINE return self._tab_label_fontNEWLINE elif id == RIBBON_ART_BUTTON_BAR_LABEL_FONT:NEWLINE return self._button_bar_label_fontNEWLINE elif id == RIBBON_ART_PANEL_LABEL_FONT:NEWLINE return self._panel_label_fontNEWLINE else:NEWLINE raise Exception("Invalid Font Ordinal")NEWLINENEWLINENEWLINE def GetColour(self, id):NEWLINE """NEWLINE Get the value of a certain colour setting.NEWLINENEWLINE can be one of the colour values of `RibbonArtSetting`.NEWLINENEWLINE :param `id`: the colour id.NEWLINENEWLINE """NEWLINENEWLINE if id == RIBBON_ART_BUTTON_BAR_LABEL_COLOUR:NEWLINE return self._button_bar_label_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BORDER_COLOUR:NEWLINE return self._button_bar_hover_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE return self._button_bar_hover_background_top_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE return self._button_bar_hover_background_top_gradient_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_COLOUR:NEWLINE return self._button_bar_hover_background_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._button_bar_hover_background_gradient_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BORDER_COLOUR:NEWLINE return self._button_bar_active_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_TOP_COLOUR:NEWLINE return self._button_bar_active_background_top_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE return self._button_bar_active_background_top_gradient_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_COLOUR:NEWLINE return self._button_bar_active_background_colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._button_bar_active_background_gradient_colourNEWLINE elif id == RIBBON_ART_GALLERY_BORDER_COLOUR:NEWLINE return self._gallery_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_GALLERY_HOVER_BACKGROUND_COLOUR:NEWLINE return self._gallery_hover_background_brush.GetColour()NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_BACKGROUND_COLOUR:NEWLINE return self._gallery_button_background_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._gallery_button_background_gradient_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_BACKGROUND_TOP_COLOUR:NEWLINE return self._gallery_button_background_top_brush.GetColour()NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_FACE_COLOUR:NEWLINE return self._gallery_button_face_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_BACKGROUND_COLOUR:NEWLINE return self._gallery_button_hover_background_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._gallery_button_hover_background_gradient_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE return self._gallery_button_hover_background_top_brush.GetColour()NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_FACE_COLOUR:NEWLINE return self._gallery_button_face_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_BACKGROUND_COLOUR:NEWLINE return self._gallery_button_active_background_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._gallery_button_active_background_gradient_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_BACKGROUND_TOP_COLOUR:NEWLINE return self._gallery_button_background_top_brush.GetColour()NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_FACE_COLOUR:NEWLINE return self._gallery_button_active_face_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_BACKGROUND_COLOUR:NEWLINE return self._gallery_button_disabled_background_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._gallery_button_disabled_background_gradient_colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_BACKGROUND_TOP_COLOUR:NEWLINE return self._gallery_button_disabled_background_top_brush.GetColour()NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_FACE_COLOUR:NEWLINE return self._gallery_button_disabled_face_colourNEWLINE elif id == RIBBON_ART_GALLERY_ITEM_BORDER_COLOUR:NEWLINE return self._gallery_item_border_pen.GetColour()NEWLINE elif id in [RIBBON_ART_TAB_CTRL_BACKGROUND_COLOUR, RIBBON_ART_TAB_CTRL_BACKGROUND_GRADIENT_COLOUR]:NEWLINE return self._tab_ctrl_background_brush.GetColour()NEWLINE elif id == RIBBON_ART_TAB_LABEL_COLOUR:NEWLINE return self._tab_label_colourNEWLINE elif id == RIBBON_ART_TAB_SEPARATOR_COLOUR:NEWLINE return self._tab_separator_colourNEWLINE elif id == RIBBON_ART_TAB_SEPARATOR_GRADIENT_COLOUR:NEWLINE return self._tab_separator_gradient_colourNEWLINE elif id in [RIBBON_ART_TAB_ACTIVE_BACKGROUND_TOP_COLOUR, RIBBON_ART_TAB_ACTIVE_BACKGROUND_TOP_GRADIENT_COLOUR]:NEWLINE return wx.Colour(0, 0, 0)NEWLINE elif id == RIBBON_ART_TAB_ACTIVE_BACKGROUND_COLOUR:NEWLINE return self._tab_active_background_colourNEWLINE elif id == RIBBON_ART_TAB_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._tab_active_background_gradient_colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE return self._tab_hover_background_top_colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE return self._tab_hover_background_top_gradient_colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_COLOUR:NEWLINE return self._tab_hover_background_colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._tab_hover_background_gradient_colourNEWLINE elif id == RIBBON_ART_TAB_BORDER_COLOUR:NEWLINE return self._tab_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_PANEL_BORDER_COLOUR:NEWLINE return self._panel_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_PANEL_BORDER_GRADIENT_COLOUR:NEWLINE return self._panel_border_gradient_pen.GetColour()NEWLINE elif id == RIBBON_ART_PANEL_MINIMISED_BORDER_COLOUR:NEWLINE return self._panel_minimised_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_PANEL_MINIMISED_BORDER_GRADIENT_COLOUR:NEWLINE return self._panel_minimised_border_gradient_pen.GetColour()NEWLINE elif id in [RIBBON_ART_PANEL_LABEL_BACKGROUND_COLOUR, RIBBON_ART_PANEL_LABEL_BACKGROUND_GRADIENT_COLOUR]:NEWLINE return self._panel_label_background_brush.GetColour()NEWLINE elif id == RIBBON_ART_PANEL_LABEL_COLOUR:NEWLINE return self._panel_label_colourNEWLINE elif id == RIBBON_ART_PANEL_MINIMISED_LABEL_COLOUR:NEWLINE return self._panel_minimised_label_colourNEWLINE elif id in [RIBBON_ART_PANEL_HOVER_LABEL_BACKGROUND_COLOUR, RIBBON_ART_PANEL_HOVER_LABEL_BACKGROUND_GRADIENT_COLOUR]:NEWLINE return self._panel_hover_label_background_brush.GetColour()NEWLINE elif id == RIBBON_ART_PANEL_HOVER_LABEL_COLOUR:NEWLINE return self._panel_hover_label_colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_TOP_COLOUR:NEWLINE return self._panel_active_background_top_colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE return self._panel_active_background_top_gradient_colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_COLOUR:NEWLINE return self._panel_active_background_colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._panel_active_background_gradient_colourNEWLINE elif id == RIBBON_ART_PANEL_BUTTON_FACE_COLOUR:NEWLINE return self._panel_button_face_colourNEWLINE elif id == RIBBON_ART_PANEL_BUTTON_HOVER_FACE_COLOUR:NEWLINE return self._panel_button_hover_face_colourNEWLINE elif id == RIBBON_ART_PAGE_BORDER_COLOUR:NEWLINE return self._page_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_TOP_COLOUR:NEWLINE return self._page_background_top_colourNEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE return self._page_background_top_gradient_colourNEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_COLOUR:NEWLINE return self._page_background_colourNEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._page_background_gradient_colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE return self._page_hover_background_top_colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE return self._page_hover_background_top_gradient_colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_COLOUR:NEWLINE return self._page_hover_background_colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE return self._page_hover_background_gradient_colourNEWLINE elif id in [RIBBON_ART_TOOLBAR_BORDER_COLOUR, RIBBON_ART_TOOLBAR_HOVER_BORDER_COLOUR]:NEWLINE return self._toolbar_border_pen.GetColour()NEWLINE elif id == RIBBON_ART_TOOLBAR_FACE_COLOUR:NEWLINE return self._tool_face_colourNEWLINE else:NEWLINE raise Exception("Invalid Colour Ordinal")NEWLINENEWLINENEWLINE def SetColour(self, id, colour):NEWLINE """NEWLINE Set the value of a certain colour setting to the value.NEWLINENEWLINE can be one of the colour values of `RibbonArtSetting`, though not all colourNEWLINE settings will have an affect on every art provider.NEWLINENEWLINE :param `id`: the colour id;NEWLINE :param `colour`: the colour.NEWLINENEWLINE :see: :meth:`~RibbonMSWArtProvider.SetColourScheme`NEWLINE """NEWLINENEWLINE if id == RIBBON_ART_BUTTON_BAR_LABEL_COLOUR:NEWLINE self._button_bar_label_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BORDER_COLOUR:NEWLINE self._button_bar_hover_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE self._button_bar_hover_background_top_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE self._button_bar_hover_background_top_gradient_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_COLOUR:NEWLINE self._button_bar_hover_background_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._button_bar_hover_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BORDER_COLOUR:NEWLINE self._button_bar_active_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_TOP_COLOUR:NEWLINE self._button_bar_active_background_top_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE self._button_bar_active_background_top_gradient_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_COLOUR:NEWLINE self._button_bar_active_background_colour = colourNEWLINE elif id == RIBBON_ART_BUTTON_BAR_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._button_bar_active_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BORDER_COLOUR:NEWLINE self._gallery_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_GALLERY_HOVER_BACKGROUND_COLOUR:NEWLINE self._gallery_hover_background_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_BACKGROUND_COLOUR:NEWLINE self._gallery_button_background_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._gallery_button_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_BACKGROUND_TOP_COLOUR:NEWLINE self._gallery_button_background_top_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_FACE_COLOUR:NEWLINE self._gallery_button_face_colour = colourNEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE self._gallery_up_bitmap[0] = RibbonLoadPixmap(gallery_left_xpm, colour)NEWLINE self._gallery_down_bitmap[0] = RibbonLoadPixmap(gallery_right_xpm, colour)NEWLINE else:NEWLINE self._gallery_up_bitmap[0] = RibbonLoadPixmap(gallery_up_xpm, colour)NEWLINE self._gallery_down_bitmap[0] = RibbonLoadPixmap(gallery_down_xpm, colour)NEWLINENEWLINE self._gallery_extension_bitmap[0] = RibbonLoadPixmap(gallery_extension_xpm, colour)NEWLINENEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_BACKGROUND_COLOUR:NEWLINE self._gallery_button_hover_background_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._gallery_button_hover_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE self._gallery_button_hover_background_top_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_HOVER_FACE_COLOUR:NEWLINE self._gallery_button_hover_face_colour = colourNEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE self._gallery_up_bitmap[1] = RibbonLoadPixmap(gallery_left_xpm, colour)NEWLINE self._gallery_down_bitmap[1] = RibbonLoadPixmap(gallery_right_xpm, colour)NEWLINE else:NEWLINE self._gallery_up_bitmap[1] = RibbonLoadPixmap(gallery_up_xpm, colour)NEWLINE self._gallery_down_bitmap[1] = RibbonLoadPixmap(gallery_down_xpm, colour)NEWLINENEWLINE self._gallery_extension_bitmap[1] = RibbonLoadPixmap(gallery_extension_xpm, colour)NEWLINENEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_BACKGROUND_COLOUR:NEWLINE self._gallery_button_active_background_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._gallery_button_active_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_BACKGROUND_TOP_COLOUR:NEWLINE self._gallery_button_background_top_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_ACTIVE_FACE_COLOUR:NEWLINE self._gallery_button_active_face_colour = colourNEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE self._gallery_up_bitmap[2] = RibbonLoadPixmap(gallery_left_xpm, colour)NEWLINE self._gallery_down_bitmap[2] = RibbonLoadPixmap(gallery_right_xpm, colour)NEWLINE else:NEWLINE self._gallery_up_bitmap[2] = RibbonLoadPixmap(gallery_up_xpm, colour)NEWLINE self._gallery_down_bitmap[2] = RibbonLoadPixmap(gallery_down_xpm, colour)NEWLINENEWLINE self._gallery_extension_bitmap[2] = RibbonLoadPixmap(gallery_extension_xpm, colour)NEWLINENEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_BACKGROUND_COLOUR:NEWLINE self._gallery_button_disabled_background_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._gallery_button_disabled_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_BACKGROUND_TOP_COLOUR:NEWLINE self._gallery_button_disabled_background_top_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_GALLERY_BUTTON_DISABLED_FACE_COLOUR:NEWLINE self._gallery_button_disabled_face_colour = colourNEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE self._gallery_up_bitmap[3] = RibbonLoadPixmap(gallery_left_xpm, colour)NEWLINE self._gallery_down_bitmap[3] = RibbonLoadPixmap(gallery_right_xpm, colour)NEWLINE else:NEWLINE self._gallery_up_bitmap[3] = RibbonLoadPixmap(gallery_up_xpm, colour)NEWLINE self._gallery_down_bitmap[3] = RibbonLoadPixmap(gallery_down_xpm, colour)NEWLINENEWLINE self._gallery_extension_bitmap[3] = RibbonLoadPixmap(gallery_extension_xpm, colour)NEWLINENEWLINE elif id == RIBBON_ART_GALLERY_ITEM_BORDER_COLOUR:NEWLINE self._gallery_item_border_pen.SetColour(colour)NEWLINENEWLINE elif id in [RIBBON_ART_TAB_CTRL_BACKGROUND_COLOUR, RIBBON_ART_TAB_CTRL_BACKGROUND_GRADIENT_COLOUR]:NEWLINE self._tab_ctrl_background_brush.SetColour(colour)NEWLINE self._cached_tab_separator_visibility = -1.0NEWLINE elif id == RIBBON_ART_TAB_LABEL_COLOUR:NEWLINE self._tab_label_colour = colourNEWLINE elif id == RIBBON_ART_TAB_SEPARATOR_COLOUR:NEWLINE self._tab_separator_colour = colourNEWLINE self._cached_tab_separator_visibility = -1.0NEWLINE elif id == RIBBON_ART_TAB_SEPARATOR_GRADIENT_COLOUR:NEWLINE self._tab_separator_gradient_colour = colourNEWLINE self._cached_tab_separator_visibility = -1.0NEWLINE elif id in [RIBBON_ART_TAB_ACTIVE_BACKGROUND_TOP_COLOUR, RIBBON_ART_TAB_ACTIVE_BACKGROUND_TOP_GRADIENT_COLOUR]:NEWLINE passNEWLINE elif id == RIBBON_ART_TAB_ACTIVE_BACKGROUND_COLOUR:NEWLINE self._tab_active_background_colour = colourNEWLINE elif id == RIBBON_ART_TAB_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._tab_active_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE self._tab_hover_background_top_colour = colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE self._tab_hover_background_top_gradient_colour = colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_COLOUR:NEWLINE self._tab_hover_background_colour = colourNEWLINE elif id == RIBBON_ART_TAB_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._tab_hover_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_TAB_BORDER_COLOUR:NEWLINE self._tab_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_PANEL_BORDER_COLOUR:NEWLINE self._panel_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_PANEL_BORDER_GRADIENT_COLOUR:NEWLINE self._panel_border_gradient_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_PANEL_MINIMISED_BORDER_COLOUR:NEWLINE self._panel_minimised_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_PANEL_MINIMISED_BORDER_GRADIENT_COLOUR:NEWLINE self._panel_minimised_border_gradient_pen.SetColour(colour)NEWLINE elif id in [RIBBON_ART_PANEL_LABEL_BACKGROUND_COLOUR, RIBBON_ART_PANEL_LABEL_BACKGROUND_GRADIENT_COLOUR]:NEWLINE self._panel_label_background_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_PANEL_LABEL_COLOUR:NEWLINE self._panel_label_colour = colourNEWLINE elif id in [RIBBON_ART_PANEL_HOVER_LABEL_BACKGROUND_COLOUR, RIBBON_ART_PANEL_HOVER_LABEL_BACKGROUND_GRADIENT_COLOUR]:NEWLINE self._panel_hover_label_background_brush.SetColour(colour)NEWLINE elif id == RIBBON_ART_PANEL_HOVER_LABEL_COLOUR:NEWLINE self._panel_hover_label_colour = colourNEWLINE elif id == RIBBON_ART_PANEL_MINIMISED_LABEL_COLOUR:NEWLINE self._panel_minimised_label_colour = colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_TOP_COLOUR:NEWLINE self._panel_active_background_top_colour = colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE self._panel_active_background_top_gradient_colour = colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_COLOUR:NEWLINE self._panel_active_background_colour = colourNEWLINE elif id == RIBBON_ART_PANEL_ACTIVE_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._panel_active_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_PANEL_BUTTON_FACE_COLOUR:NEWLINE self._panel_button_face_colour = colourNEWLINE self._panel_extension_bitmap[0] = RibbonLoadPixmap(panel_extension_xpm, colour)NEWLINE elif id == RIBBON_ART_PANEL_BUTTON_HOVER_FACE_COLOUR:NEWLINE self._panel_button_hover_face_colour = colourNEWLINE self._panel_extension_bitmap[1] = RibbonLoadPixmap(panel_extension_xpm, colour)NEWLINE elif id == RIBBON_ART_PAGE_BORDER_COLOUR:NEWLINE self._page_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_TOP_COLOUR:NEWLINE self._page_background_top_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE self._page_background_top_gradient_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_COLOUR:NEWLINE self._page_background_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._page_background_gradient_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_TOP_COLOUR:NEWLINE self._page_hover_background_top_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_TOP_GRADIENT_COLOUR:NEWLINE self._page_hover_background_top_gradient_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_COLOUR:NEWLINE self._page_hover_background_colour = colourNEWLINE elif id == RIBBON_ART_PAGE_HOVER_BACKGROUND_GRADIENT_COLOUR:NEWLINE self._page_hover_background_gradient_colour = colourNEWLINE elif id in [RIBBON_ART_TOOLBAR_BORDER_COLOUR, RIBBON_ART_TOOLBAR_HOVER_BORDER_COLOUR]:NEWLINE self._toolbar_border_pen.SetColour(colour)NEWLINE elif id == RIBBON_ART_TOOLBAR_FACE_COLOUR:NEWLINE self._tool_face_colour = colourNEWLINE self._toolbar_drop_bitmap = RibbonLoadPixmap(gallery_down_xpm, colour)NEWLINE else:NEWLINE raise Exception("Invalid Colour Ordinal")NEWLINENEWLINENEWLINE def DrawTabCtrlBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background of the tab region of a ribbon bar.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto;NEWLINE :param `rect`: The rectangle within which to draw.NEWLINENEWLINE """NEWLINENEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINENEWLINE dc.SetBrush(self._tab_ctrl_background_brush)NEWLINE dc.DrawRectangle(rect.x, rect.y, rect.width, rect.height)NEWLINENEWLINE dc.SetPen(self._page_border_pen)NEWLINENEWLINE if rect.width > 6:NEWLINE dc.DrawLine(rect.x + 3, rect.y + rect.height - 1, rect.x + rect.width - 3, rect.y + rect.height - 1)NEWLINE else:NEWLINE dc.DrawLine(rect.x, rect.y + rect.height - 1, rect.x + rect.width, rect.y + rect.height - 1)NEWLINENEWLINENEWLINE def DrawTab(self, dc, wnd, tab):NEWLINE """NEWLINE Draw a single tab in the tab region of a ribbon bar.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto (not the :class:`~wx.lib.agw.ribbon.page.RibbonPage` associatedNEWLINE with the tab being drawn);NEWLINE :param `tab`: The rectangle within which to draw, and also the tab label, icon, andNEWLINE state (active and/or hovered). The drawing rectangle will be entirely within aNEWLINE rectangle on the same device context previously painted with :meth:`~RibbonMSWArtProvider.DrawTabCtrlBackground`.NEWLINE The rectangle's width will be at least the minimum value returned by :meth:`~RibbonMSWArtProvider.GetBarTabWidth`,NEWLINE and height will be the value returned by :meth:`~RibbonMSWArtProvider.GetTabCtrlHeight`.NEWLINENEWLINE """NEWLINENEWLINE if tab.rect.height <= 2:NEWLINE returnNEWLINENEWLINE if tab.active or tab.hovered:NEWLINE if tab.active:NEWLINE background = wx.Rect(*tab.rect)NEWLINE background.x += 2NEWLINE background.y += 2NEWLINE background.width -= 4NEWLINE background.height -= 2NEWLINENEWLINE dc.GradientFillLinear(background, self._tab_active_background_colour,NEWLINE self._tab_active_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE # TODO: active and hoveredNEWLINENEWLINE elif tab.hovered:NEWLINE background = wx.Rect(*tab.rect)NEWLINE background.x += 2NEWLINE background.y += 2NEWLINE background.width -= 4NEWLINE background.height -= 3NEWLINE h = background.heightNEWLINE background.height /= 2NEWLINE dc.GradientFillLinear(background, self._tab_hover_background_top_colour,NEWLINE self._tab_hover_background_top_gradient_colour, wx.SOUTH)NEWLINENEWLINE background.y += background.heightNEWLINE background.height = h - background.heightNEWLINE dc.GradientFillLinear(background, self._tab_hover_background_colour,NEWLINE self._tab_hover_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE border_points = [wx.Point() for i in range(6)]NEWLINE border_points[0] = wx.Point(1, tab.rect.height - 2)NEWLINE border_points[1] = wx.Point(1, 3)NEWLINE border_points[2] = wx.Point(3, 1)NEWLINE border_points[3] = wx.Point(tab.rect.width - 4, 1)NEWLINE border_points[4] = wx.Point(tab.rect.width - 2, 3)NEWLINE border_points[5] = wx.Point(tab.rect.width - 2, tab.rect.height - 1)NEWLINENEWLINE dc.SetPen(self._tab_border_pen)NEWLINE dc.DrawLines(border_points, tab.rect.x, tab.rect.y)NEWLINENEWLINE if tab.active:NEWLINE # Give the tab a curved outward border at the bottomNEWLINE dc.DrawPoint(tab.rect.x, tab.rect.y + tab.rect.height - 2)NEWLINE dc.DrawPoint(tab.rect.x + tab.rect.width - 1, tab.rect.y + tab.rect.height - 2)NEWLINENEWLINE p = wx.Pen(self._tab_active_background_gradient_colour)NEWLINE dc.SetPen(p)NEWLINENEWLINE # Technically the first two points are the wrong colour, but they're near enoughNEWLINE dc.DrawPoint(tab.rect.x + 1, tab.rect.y + tab.rect.height - 2)NEWLINE dc.DrawPoint(tab.rect.x + tab.rect.width - 2, tab.rect.y + tab.rect.height - 2)NEWLINE dc.DrawPoint(tab.rect.x + 1, tab.rect.y + tab.rect.height - 1)NEWLINE dc.DrawPoint(tab.rect.x, tab.rect.y + tab.rect.height - 1)NEWLINE dc.DrawPoint(tab.rect.x + tab.rect.width - 2, tab.rect.y + tab.rect.height - 1)NEWLINE dc.DrawPoint(tab.rect.x + tab.rect.width - 1, tab.rect.y + tab.rect.height - 1)NEWLINENEWLINE if self._flags & RIBBON_BAR_SHOW_PAGE_ICONS:NEWLINE icon = tab.page.GetIcon()NEWLINENEWLINE if icon.IsOk():NEWLINE x = tab.rect.x + 4NEWLINE if self._flags & RIBBON_BAR_SHOW_PAGE_LABELS == 0:NEWLINE x = tab.rect.x + (tab.rect.width - icon.GetWidth()) / 2NEWLINENEWLINE dc.DrawBitmap(icon, x, tab.rect.y + 1 + (tab.rect.height - 1 - icon.GetHeight()) / 2, True)NEWLINENEWLINE if self._flags & RIBBON_BAR_SHOW_PAGE_LABELS:NEWLINE label = tab.page.GetLabel()NEWLINE if label.strip():NEWLINE dc.SetFont(self._tab_label_font)NEWLINE dc.SetTextForeground(self._tab_label_colour)NEWLINE dc.SetBackgroundMode(wx.TRANSPARENT)NEWLINENEWLINE text_width, text_height = dc.GetTextExtent(label)NEWLINE width = tab.rect.width - 5NEWLINE x = tab.rect.x + 3NEWLINENEWLINE if self._flags & RIBBON_BAR_SHOW_PAGE_ICONS:NEWLINE x += 3 + tab.page.GetIcon().GetWidth()NEWLINE width -= 3 + tab.page.GetIcon().GetWidth()NEWLINENEWLINE y = tab.rect.y + (tab.rect.height - text_height) / 2NEWLINENEWLINE if width <= text_width:NEWLINE dc.SetClippingRegion(x, tab.rect.y, width, tab.rect.height)NEWLINE dc.DrawText(label, x, y)NEWLINE else:NEWLINE dc.DrawText(label, x + (width - text_width) / 2 + 1, y)NEWLINENEWLINENEWLINE def DrawTabSeparator(self, dc, wnd, rect, visibility):NEWLINE """NEWLINE Draw a separator between two tabs in a ribbon bar.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto;NEWLINE :param `rect`: The rectangle within which to draw, which will be entirelyNEWLINE within a rectangle on the same device context previously painted withNEWLINE :meth:`~RibbonMSWArtProvider.DrawTabCtrlBackground`;NEWLINE :param `visibility`: The opacity with which to draw the separator. ValuesNEWLINE are in the range [0, 1], with 0 being totally transparent, and 1 being totallyNEWLINE opaque.NEWLINENEWLINE """NEWLINENEWLINE if visibility <= 0.0:NEWLINE returnNEWLINENEWLINE if visibility > 1.0:NEWLINE visibility = 1.0NEWLINENEWLINE # The tab separator is relatively expensive to draw (for its size), and isNEWLINE # usually drawn multiple times sequentially (in different positions), so itNEWLINE # makes sense to draw it once and cache it.NEWLINE if not self._cached_tab_separator.IsOk() or self._cached_tab_separator.GetSize() != rect.GetSize() or \NEWLINE visibility != self._cached_tab_separator_visibility:NEWLINENEWLINE size = wx.Rect(0, 0, *rect.GetSize())NEWLINE self.ReallyDrawTabSeparator(wnd, size, visibility)NEWLINENEWLINE dc.DrawBitmap(self._cached_tab_separator, rect.x, rect.y, False)NEWLINENEWLINENEWLINE def ReallyDrawTabSeparator(self, wnd, rect, visibility):NEWLINENEWLINE if not self._cached_tab_separator.IsOk() or self._cached_tab_separator.GetSize() != rect.GetSize():NEWLINE self._cached_tab_separator = wx.Bitmap(*rect.GetSize())NEWLINENEWLINE dc = wx.MemoryDC(self._cached_tab_separator)NEWLINE self.DrawTabCtrlBackground(dc, wnd, rect)NEWLINENEWLINE x = rect.x + rect.width / 2NEWLINE h = float(rect.height - 1)NEWLINENEWLINE r1 = self._tab_ctrl_background_brush.GetColour().Red() * (1.0 - visibility) + 0.5NEWLINE g1 = self._tab_ctrl_background_brush.GetColour().Green() * (1.0 - visibility) + 0.5NEWLINE b1 = self._tab_ctrl_background_brush.GetColour().Blue() * (1.0 - visibility) + 0.5NEWLINE r2 = self._tab_separator_colour.Red()NEWLINE g2 = self._tab_separator_colour.Green()NEWLINE b2 = self._tab_separator_colour.Blue()NEWLINE r3 = self._tab_separator_gradient_colour.Red()NEWLINE g3 = self._tab_separator_gradient_colour.Green()NEWLINE b3 = self._tab_separator_gradient_colour.Blue()NEWLINENEWLINE for i in range(rect.height-1):NEWLINENEWLINE p = float(i)/hNEWLINENEWLINE r = int((p * r3 + (1.0 - p) * r2) * visibility + r1)NEWLINE g = int((p * g3 + (1.0 - p) * g2) * visibility + g1)NEWLINE b = int((p * b3 + (1.0 - p) * b2) * visibility + b1)NEWLINENEWLINE P = wx.Pen(wx.Colour(r, g, b))NEWLINE dc.SetPen(P)NEWLINE dc.DrawPoint(x, rect.y + i)NEWLINENEWLINE self._cached_tab_separator_visibility = visibilityNEWLINENEWLINENEWLINE def DrawPartialPageBackground(self, dc, wnd, rect, allow_hovered_or_page=True, offset=None, hovered=False):NEWLINENEWLINE if isinstance(allow_hovered_or_page, bool):NEWLINE self.DrawPartialPageBackground2(dc, wnd, rect, allow_hovered_or_page)NEWLINE else:NEWLINE self.DrawPartialPageBackground1(dc, wnd, rect, allow_hovered_or_page, offset, hovered)NEWLINENEWLINENEWLINE def DrawPartialPageBackground1(self, dc, wnd, rect, page, offset, hovered=False):NEWLINENEWLINE background = wx.Rect(0, 0, *page.GetSize())NEWLINE background = page.AdjustRectToIncludeScrollButtons(background)NEWLINE background.height -= 2NEWLINENEWLINE # Page background isn't dependant upon the width of the pageNEWLINE # (at least not the part of it intended to be painted by thisNEWLINE # function). Set to wider than the page itself for when externallyNEWLINE # expanded panels need a background - the expanded panel can be widerNEWLINE # than the bar.NEWLINENEWLINE background.x = 0NEWLINE background.width = 10000NEWLINENEWLINE # upper_rect, lower_rect, paint_rect are all in page co-ordinatesNEWLINE upper_rect = wx.Rect(*background)NEWLINE upper_rect.height /= 5NEWLINENEWLINE lower_rect = wx.Rect(*background)NEWLINE lower_rect.y += upper_rect.heightNEWLINE lower_rect.height -= upper_rect.heightNEWLINENEWLINE paint_rect = wx.Rect(*rect)NEWLINE paint_rect.x += offset.xNEWLINE paint_rect.y += offset.yNEWLINENEWLINE if hovered:NEWLINE bg_top = self._page_hover_background_top_colourNEWLINE bg_top_grad = self._page_hover_background_top_gradient_colourNEWLINE bg_btm = self._page_hover_background_colourNEWLINE bg_btm_grad = self._page_hover_background_gradient_colourNEWLINE else:NEWLINE bg_top = self._page_background_top_colourNEWLINE bg_top_grad = self._page_background_top_gradient_colourNEWLINE bg_btm = self._page_background_colourNEWLINE bg_btm_grad = self._page_background_gradient_colourNEWLINENEWLINE if paint_rect.Intersects(upper_rect):NEWLINE rect = wx.Rect(*upper_rect)NEWLINE rect.Intersect(paint_rect)NEWLINE rect.x -= offset.xNEWLINE rect.y -= offset.yNEWLINE starting_colour = RibbonInterpolateColour(bg_top, bg_top_grad,NEWLINE paint_rect.y, upper_rect.y,NEWLINE upper_rect.y + upper_rect.height)NEWLINE ending_colour = RibbonInterpolateColour(bg_top, bg_top_grad,NEWLINE paint_rect.y + paint_rect.height, upper_rect.y,NEWLINE upper_rect.y + upper_rect.height)NEWLINE dc.GradientFillLinear(rect, starting_colour, ending_colour, wx.SOUTH)NEWLINENEWLINENEWLINE if paint_rect.Intersects(lower_rect):NEWLINE rect = wx.Rect(*lower_rect)NEWLINE rect.Intersect(paint_rect)NEWLINE rect.x -= offset.xNEWLINE rect.y -= offset.yNEWLINE starting_colour = RibbonInterpolateColour(bg_btm, bg_btm_grad,NEWLINE paint_rect.y, lower_rect.y,NEWLINE lower_rect.y + lower_rect.height)NEWLINE ending_colour = RibbonInterpolateColour(bg_btm, bg_btm_grad,NEWLINE paint_rect.y + paint_rect.height,NEWLINE lower_rect.y, lower_rect.y + lower_rect.height)NEWLINENEWLINE dc.GradientFillLinear(rect, starting_colour, ending_colour, wx.SOUTH)NEWLINENEWLINENEWLINE def DrawPageBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background of a ribbon page.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto (which is commonly theNEWLINE :class:`~wx.lib.agw.ribbon.page.RibbonPage` whose background is being drawn, but doesn't have to be);NEWLINE :param `rect`: The rectangle within which to draw.NEWLINENEWLINE :see: :meth:`~RibbonMSWArtProvider.GetPageBackgroundRedrawArea`NEWLINE """NEWLINENEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(self._tab_ctrl_background_brush)NEWLINENEWLINE edge = wx.Rect(*rect)NEWLINENEWLINE edge.width = 2NEWLINE dc.DrawRectangle(edge.x, edge.y, edge.width, edge.height)NEWLINENEWLINE edge.x += rect.width - 2NEWLINE dc.DrawRectangle(edge.x, edge.y, edge.width, edge.height)NEWLINENEWLINE edge = wx.Rect(*rect)NEWLINE edge.height = 2NEWLINE edge.y += (rect.height - edge.height)NEWLINE dc.DrawRectangle(edge.x, edge.y, edge.width, edge.height)NEWLINENEWLINE background = wx.Rect(*rect)NEWLINE background.x += 2NEWLINE background.width -= 4NEWLINE background.height -= 2NEWLINENEWLINE background.height /= 5NEWLINE dc.GradientFillLinear(background, self._page_background_top_colour,NEWLINE self._page_background_top_gradient_colour, wx.SOUTH)NEWLINENEWLINE background.y += background.heightNEWLINE background.height = rect.height - 2 - background.heightNEWLINE dc.GradientFillLinear(background, self._page_background_colour,NEWLINE self._page_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE border_points = [wx.Point() for i in range(8)]NEWLINE border_points[0] = wx.Point(2, 0)NEWLINE border_points[1] = wx.Point(1, 1)NEWLINE border_points[2] = wx.Point(1, rect.height - 4)NEWLINE border_points[3] = wx.Point(3, rect.height - 2)NEWLINE border_points[4] = wx.Point(rect.width - 4, rect.height - 2)NEWLINE border_points[5] = wx.Point(rect.width - 2, rect.height - 4)NEWLINE border_points[6] = wx.Point(rect.width - 2, 1)NEWLINE border_points[7] = wx.Point(rect.width - 4, -1)NEWLINENEWLINE dc.SetPen(self._page_border_pen)NEWLINE dc.DrawLines(border_points, rect.x, rect.y)NEWLINENEWLINENEWLINE def DrawScrollButton(self, dc, wnd, rect_, style):NEWLINE """NEWLINE Draw a ribbon-style scroll button.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto;NEWLINE :param `rect`: The rectangle within which to draw. The size of this rectangleNEWLINE will be at least the size returned by :meth:`~RibbonMSWArtProvider.GetScrollButtonMinimumSize` for aNEWLINE scroll button with the same style. For tab scroll buttons, this rectangleNEWLINE will be entirely within a rectangle on the same device context previouslyNEWLINE painted with :meth:`~RibbonMSWArtProvider.DrawTabCtrlBackground`, but this is not guaranteed for otherNEWLINE types of button (for example, page scroll buttons will not be painted on anNEWLINE area previously painted with :meth:`~RibbonMSWArtProvider.DrawPageBackground` );NEWLINE :param `style`: A combination of flags from `RibbonScrollButtonStyle`,NEWLINE including a direction, a for flag, and one or more states.NEWLINENEWLINE """NEWLINENEWLINE rect = wx.Rect(*rect_)NEWLINENEWLINE if (style & RIBBON_SCROLL_BTN_FOR_MASK) == RIBBON_SCROLL_BTN_FOR_PAGE:NEWLINENEWLINE # Page scroll buttons do not have the luxury of rendering on top of anythingNEWLINE # else, and their size includes some padding, hence the background paintingNEWLINE # and size adjustment.NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(self._tab_ctrl_background_brush)NEWLINE dc.DrawRectangle(rect)NEWLINE dc.SetClippingRegion(rect)NEWLINENEWLINE result = style & RIBBON_SCROLL_BTN_DIRECTION_MASKNEWLINENEWLINE if result == RIBBON_SCROLL_BTN_LEFT:NEWLINE rect.x += 1NEWLINE elif result == RIBBON_SCROLL_BTN_RIGHT:NEWLINE rect.y -= 1NEWLINE rect.width -= 1NEWLINE elif result == RIBBON_SCROLL_BTN_UP:NEWLINE rect.x += 1NEWLINE rect.y -= 1NEWLINE rect.width -= 2NEWLINE rect.height += 1NEWLINE elif result == RIBBON_SCROLL_BTN_DOWN:NEWLINE rect.x += 1NEWLINE rect.width -= 2NEWLINE rect.height -= 1NEWLINENEWLINE background = wx.Rect(*rect)NEWLINE background.x += 1NEWLINE background.y += 1NEWLINE background.width -= 2NEWLINE background.height -= 2NEWLINENEWLINE if style & RIBBON_SCROLL_BTN_UP:NEWLINE background.height /= 2NEWLINE else:NEWLINE background.height /= 5NEWLINENEWLINE dc.GradientFillLinear(background, self._page_background_top_colour,NEWLINE self._page_background_top_gradient_colour, wx.SOUTH)NEWLINENEWLINE background.y += background.heightNEWLINE background.height = rect.height - 2 - background.heightNEWLINE dc.GradientFillLinear(background, self._page_background_colour,NEWLINE self._page_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE border_points = [wx.Point() for i in range(7)]NEWLINE result = style & RIBBON_SCROLL_BTN_DIRECTION_MASKNEWLINENEWLINE if result == RIBBON_SCROLL_BTN_LEFT:NEWLINE border_points[0] = wx.Point(2, 0)NEWLINE border_points[1] = wx.Point(rect.width - 1, 0)NEWLINE border_points[2] = wx.Point(rect.width - 1, rect.height - 1)NEWLINE border_points[3] = wx.Point(2, rect.height - 1)NEWLINE border_points[4] = wx.Point(0, rect.height - 3)NEWLINE border_points[5] = wx.Point(0, 2)NEWLINENEWLINE elif result == RIBBON_SCROLL_BTN_RIGHT:NEWLINE border_points[0] = wx.Point(0, 0)NEWLINE border_points[1] = wx.Point(rect.width - 3, 0)NEWLINE border_points[2] = wx.Point(rect.width - 1, 2)NEWLINE border_points[3] = wx.Point(rect.width - 1, rect.height - 3)NEWLINE border_points[4] = wx.Point(rect.width - 3, rect.height - 1)NEWLINE border_points[5] = wx.Point(0, rect.height - 1)NEWLINENEWLINE elif result == RIBBON_SCROLL_BTN_UP:NEWLINE border_points[0] = wx.Point(2, 0)NEWLINE border_points[1] = wx.Point(rect.width - 3, 0)NEWLINE border_points[2] = wx.Point(rect.width - 1, 2)NEWLINE border_points[3] = wx.Point(rect.width - 1, rect.height - 1)NEWLINE border_points[4] = wx.Point(0, rect.height - 1)NEWLINE border_points[5] = wx.Point(0, 2)NEWLINENEWLINE elif result == RIBBON_SCROLL_BTN_DOWN:NEWLINE border_points[0] = wx.Point(0, 0)NEWLINE border_points[1] = wx.Point(rect.width - 1, 0)NEWLINE border_points[2] = wx.Point(rect.width - 1, rect.height - 3)NEWLINE border_points[3] = wx.Point(rect.width - 3, rect.height - 1)NEWLINE border_points[4] = wx.Point(2, rect.height - 1)NEWLINE border_points[5] = wx.Point(0, rect.height - 3)NEWLINENEWLINE border_points[6] = border_points[0]NEWLINENEWLINE dc.SetPen(self._page_border_pen)NEWLINE dc.DrawLines(border_points, rect.x, rect.y)NEWLINENEWLINE # NB: Code for handling hovered/active state is temporaryNEWLINE arrow_points = [wx.Point() for i in range(3)]NEWLINE result = style & RIBBON_SCROLL_BTN_DIRECTION_MASKNEWLINENEWLINE if result == RIBBON_SCROLL_BTN_LEFT:NEWLINE arrow_points[0] = wx.Point(rect.width / 2 - 2, rect.height / 2)NEWLINE if style & RIBBON_SCROLL_BTN_ACTIVE:NEWLINE arrow_points[0].y += 1NEWLINE arrow_points[1] = arrow_points[0] + wx.Point(3, -3)NEWLINE arrow_points[2] = arrow_points[0] + wx.Point(3, 3)NEWLINENEWLINE elif result == RIBBON_SCROLL_BTN_RIGHT:NEWLINE arrow_points[0] = wx.Point(rect.width / 2 + 2, rect.height / 2)NEWLINE if style & RIBBON_SCROLL_BTN_ACTIVE:NEWLINE arrow_points[0].y += 1NEWLINE arrow_points[1] = arrow_points[0] - wx.Point(3, 3)NEWLINE arrow_points[2] = arrow_points[0] - wx.Point(3, -3)NEWLINENEWLINE elif result == RIBBON_SCROLL_BTN_UP:NEWLINE arrow_points[0] = wx.Point(rect.width / 2, rect.height / 2 - 2)NEWLINE if style & RIBBON_SCROLL_BTN_ACTIVE:NEWLINE arrow_points[0].y += 1NEWLINE arrow_points[1] = arrow_points[0] + wx.Point( 3, 3)NEWLINE arrow_points[2] = arrow_points[0] + wx.Point(-3, 3)NEWLINENEWLINE elif result == RIBBON_SCROLL_BTN_DOWN:NEWLINE arrow_points[0] = wx.Point(rect.width / 2, rect.height / 2 + 2)NEWLINE if style & RIBBON_SCROLL_BTN_ACTIVE:NEWLINE arrow_points[0].y += 1NEWLINE arrow_points[1] = arrow_points[0] - wx.Point( 3, 3)NEWLINE arrow_points[2] = arrow_points[0] - wx.Point(-3, 3)NEWLINENEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE B = wx.Brush((style & RIBBON_SCROLL_BTN_HOVERED and [self._tab_active_background_colour] or [self._tab_label_colour])[0])NEWLINE dc.SetBrush(B)NEWLINE dc.DrawPolygon(arrow_points, rect.x, rect.y)NEWLINENEWLINENEWLINE def DrawDropdownArrow(self, dc, x, y, colour):NEWLINENEWLINE arrow_points = [wx.Point() for i in range(3)]NEWLINE brush = wx.Brush(colour)NEWLINE arrow_points[0] = wx.Point(1, 2)NEWLINE arrow_points[1] = arrow_points[0] + wx.Point(-3, -3)NEWLINE arrow_points[2] = arrow_points[0] + wx.Point( 3, -3)NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(brush)NEWLINE dc.DrawPolygon(arrow_points, x, y)NEWLINENEWLINENEWLINE def RemovePanelPadding(self, rect):NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE rect.y += 1NEWLINE rect.height -= 2NEWLINE else:NEWLINE rect.x += 1NEWLINE rect.width -= 2NEWLINENEWLINE return rectNEWLINENEWLINENEWLINE def DrawPanelBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background and chrome for a ribbon panel.NEWLINENEWLINE This should draw the border, background, label, and any other items of a panelNEWLINE which are outside the client area of a panel. Note that when a panel isNEWLINE minimised, this function is not called - only :meth:`~RibbonMSWArtProvider.DrawMinimisedPanel` is called,NEWLINE so a background should be explicitly painted by that if required.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto, which is always the panelNEWLINE whose background and chrome is being drawn. The panel label and other panelNEWLINE attributes can be obtained by querying this;NEWLINE :param `rect`: The rectangle within which to draw.NEWLINENEWLINE """NEWLINENEWLINE self.DrawPartialPageBackground(dc, wnd, rect, False)NEWLINENEWLINE true_rect = wx.Rect(*rect)NEWLINE true_rect = self.RemovePanelPadding(true_rect)NEWLINENEWLINE dc.SetFont(self._panel_label_font)NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINENEWLINE has_ext_button = wnd.HasExtButton()NEWLINENEWLINE if wnd.IsHovered():NEWLINE dc.SetBrush(self._panel_hover_label_background_brush)NEWLINE dc.SetTextForeground(self._panel_hover_label_colour)NEWLINE else:NEWLINE dc.SetBrush(self._panel_label_background_brush)NEWLINE dc.SetTextForeground(self._panel_label_colour)NEWLINENEWLINE label_rect = wx.Rect(*true_rect)NEWLINE label = wnd.GetLabel().strip()NEWLINE clip_label = FalseNEWLINE label_size = wx.Size(*dc.GetTextExtent(label))NEWLINENEWLINE label_rect.SetX(label_rect.GetX() + 1)NEWLINE label_rect.SetWidth(label_rect.GetWidth() - 2)NEWLINE label_rect.SetHeight(label_size.GetHeight() + 2)NEWLINE label_rect.SetY(true_rect.GetBottom() - label_rect.GetHeight())NEWLINE label_height = label_rect.GetHeight()NEWLINENEWLINE label_bg_rect = wx.Rect(*label_rect)NEWLINENEWLINE if has_ext_button:NEWLINE label_rect.SetWidth(label_rect.GetWidth() - 13)NEWLINENEWLINE if label_size.GetWidth() > label_rect.GetWidth():NEWLINE # Test if there is enough length for 3 letters and ...NEWLINE new_label = label[0:3] + "..."NEWLINE label_size = wx.Size(*dc.GetTextExtent(new_label))NEWLINENEWLINE if label_size.GetWidth() > label_rect.GetWidth():NEWLINE # Not enough room for three characters and ...NEWLINE # Display the entire label and just crop itNEWLINE clip_label = TrueNEWLINE else:NEWLINE # Room for some characters and ...NEWLINE # Display as many characters as possible and append ...NEWLINE for l in range(len(label)-1, 3, -1):NEWLINE new_label = label[0:l] + "..."NEWLINE label_size = wx.Size(*dc.GetTextExtent(new_label))NEWLINE if label_size.GetWidth() <= label_rect.GetWidth():NEWLINE label = new_labelNEWLINE breakNEWLINENEWLINE dc.DrawRectangle(label_rect)NEWLINENEWLINE if clip_label:NEWLINE clip = wx.DCClipper(dc, label_rect)NEWLINE dc.DrawText(label, label_rect.x, label_rect.y + (label_rect.GetHeight() - label_size.GetHeight()) / 2)NEWLINE else:NEWLINE dc.DrawText(label, label_rect.x + (label_rect.GetWidth() - label_size.GetWidth()) / 2,NEWLINE label_rect.y + (label_rect.GetHeight() - label_size.GetHeight()) / 2)NEWLINENEWLINE if has_ext_button:NEWLINE if wnd.IsExtButtonHovered():NEWLINE dc.SetPen(self._panel_hover_button_border_pen)NEWLINE dc.SetBrush(self._panel_hover_button_background_brush)NEWLINE dc.DrawRoundedRectangle(label_rect.GetRight(), label_rect.GetBottom() - 13, 13, 13, 1)NEWLINE dc.DrawBitmap(self._panel_extension_bitmap[1], label_rect.GetRight() + 3, label_rect.GetBottom() - 10, True)NEWLINE else:NEWLINE dc.DrawBitmap(self._panel_extension_bitmap[0], label_rect.GetRight() + 3, label_rect.GetBottom() - 10, True)NEWLINENEWLINE if wnd.IsHovered():NEWLINE client_rect = wx.Rect(*true_rect)NEWLINE client_rect.x += 1NEWLINE client_rect.width -= 2NEWLINE client_rect.y += 1NEWLINE client_rect.height -= 2 + label_heightNEWLINE self.DrawPartialPageBackground(dc, wnd, client_rect, True)NEWLINENEWLINE self.DrawPanelBorder(dc, true_rect, self._panel_border_pen, self._panel_border_gradient_pen)NEWLINENEWLINENEWLINE def GetPanelExtButtonArea(self, dc, wnd, rect):NEWLINE """NEWLINE Retrieve the extension button area rectangle.NEWLINENEWLINE :param `dc`: The device context used to measure text extents;NEWLINE :param `wnd`: The panel where the extension button resides;NEWLINE :param `rect`: The panel client rectangle.NEWLINE """NEWLINENEWLINE true_rect = wx.Rect(self.RemovePanelPadding(rect))NEWLINE true_rect = wx.Rect(true_rect.GetRight()-13, true_rect.GetBottom()-13, 13, 13)NEWLINE return true_rectNEWLINENEWLINENEWLINE def DrawGalleryBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background and chrome for a :class:`~wx.lib.agw.ribbon.gallery.RibbonGallery` control.NEWLINENEWLINE This should draw the border, brackground, scroll buttons, extension button, andNEWLINE any other UI elements which are not attached to a specific gallery item.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto, which is always the galleryNEWLINE whose background and chrome is being drawn. Attributes used during drawing likeNEWLINE the gallery hover state and individual button states can be queried from thisNEWLINE parameter by :meth:`RibbonGallery.IsHovered() <lib.agw.ribbon.gallery.RibbonGallery.IsHovered>`,NEWLINE :meth:`RibbonGallery.GetExtensionButtonState() <lib.agw.ribbon.gallery.RibbonGallery.GetExtensionButtonState>`,NEWLINE :meth:`RibbonGallery.GetUpButtonState() <lib.agw.ribbon.gallery.RibbonGallery.GetUpButtonState>`, andNEWLINE :meth:`RibbonGallery.GetDownButtonState() <lib.agw.ribbon.gallery.RibbonGallery.GetDownButtonState>`;NEWLINE :param `rect`: The rectangle within which to draw. This rectangle is the entireNEWLINE area of the gallery control, not just the client rectangle.NEWLINE """NEWLINENEWLINE self.DrawPartialPageBackground(dc, wnd, rect)NEWLINENEWLINE if wnd.IsHovered():NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(self._gallery_hover_background_brush)NEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE dc.DrawRectangle(rect.x + 1, rect.y + 1, rect.width - 2, rect.height - 16)NEWLINE else:NEWLINE dc.DrawRectangle(rect.x + 1, rect.y + 1, rect.width - 16, rect.height - 2)NEWLINENEWLINE dc.SetPen(self._gallery_border_pen)NEWLINE # OutlineNEWLINE dc.DrawLine(rect.x + 1, rect.y, rect.x + rect.width - 1, rect.y)NEWLINE dc.DrawLine(rect.x, rect.y + 1, rect.x, rect.y + rect.height - 1)NEWLINE dc.DrawLine(rect.x + 1, rect.y + rect.height - 1, rect.x + rect.width - 1, rect.y + rect.height - 1)NEWLINE dc.DrawLine(rect.x + rect.width - 1, rect.y + 1, rect.x + rect.width - 1, rect.y + rect.height - 1)NEWLINENEWLINE self.DrawGalleryBackgroundCommon(dc, wnd, rect)NEWLINENEWLINENEWLINE def DrawGalleryBackgroundCommon(self, dc, wnd, rect):NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE # Divider between items and buttonsNEWLINE dc.DrawLine(rect.x, rect.y + rect.height - 15, rect.x + rect.width, rect.y + rect.height - 15)NEWLINENEWLINE up_btn = wx.Rect(rect.x, rect.y + rect.height - 15, rect.width / 3, 15)NEWLINE down_btn = wx.Rect(up_btn.GetRight() + 1, up_btn.GetTop(), up_btn.GetWidth(), up_btn.GetHeight())NEWLINE dc.DrawLine(down_btn.GetLeft(), down_btn.GetTop(), down_btn.GetLeft(), down_btn.GetBottom())NEWLINE ext_btn = wx.Rect(down_btn.GetRight() + 1, up_btn.GetTop(), rect.width - up_btn.GetWidth() - down_btn.GetWidth() - 1, up_btn.GetHeight())NEWLINE dc.DrawLine(ext_btn.GetLeft(), ext_btn.GetTop(), ext_btn.GetLeft(), ext_btn.GetBottom())NEWLINENEWLINE else:NEWLINE # Divider between items and buttonsNEWLINE dc.DrawLine(rect.x + rect.width - 15, rect.y, rect.x + rect.width - 15, rect.y + rect.height)NEWLINENEWLINE up_btn = wx.Rect(rect.x + rect.width - 15, rect.y, 15, rect.height / 3)NEWLINE down_btn = wx.Rect(up_btn.GetLeft(), up_btn.GetBottom() + 1, up_btn.GetWidth(), up_btn.GetHeight())NEWLINE dc.DrawLine(down_btn.GetLeft(), down_btn.GetTop(), down_btn.GetRight(), down_btn.GetTop())NEWLINE ext_btn = wx.Rect(up_btn.GetLeft(), down_btn.GetBottom() + 1, up_btn.GetWidth(), rect.height - up_btn.GetHeight() - down_btn.GetHeight() - 1)NEWLINE dc.DrawLine(ext_btn.GetLeft(), ext_btn.GetTop(), ext_btn.GetRight(), ext_btn.GetTop())NEWLINENEWLINE self.DrawGalleryButton(dc, up_btn, wnd.GetUpButtonState(), self._gallery_up_bitmap)NEWLINE self.DrawGalleryButton(dc, down_btn, wnd.GetDownButtonState(), self._gallery_down_bitmap)NEWLINE self.DrawGalleryButton(dc, ext_btn, wnd.GetExtensionButtonState(), self._gallery_extension_bitmap)NEWLINENEWLINENEWLINE def DrawGalleryButton(self, dc, rect, state, bitmaps):NEWLINENEWLINE if state == RIBBON_GALLERY_BUTTON_NORMAL:NEWLINE btn_top_brush = self._gallery_button_background_top_brushNEWLINE btn_colour = self._gallery_button_background_colourNEWLINE btn_grad_colour = self._gallery_button_background_gradient_colourNEWLINE btn_bitmap = bitmaps[0]NEWLINE elif state == RIBBON_GALLERY_BUTTON_HOVERED:NEWLINE btn_top_brush = self._gallery_button_hover_background_top_brushNEWLINE btn_colour = self._gallery_button_hover_background_colourNEWLINE btn_grad_colour = self._gallery_button_hover_background_gradient_colourNEWLINE btn_bitmap = bitmaps[1]NEWLINE elif state == RIBBON_GALLERY_BUTTON_ACTIVE:NEWLINE btn_top_brush = self._gallery_button_active_background_top_brushNEWLINE btn_colour = self._gallery_button_active_background_colourNEWLINE btn_grad_colour = self._gallery_button_active_background_gradient_colourNEWLINE btn_bitmap = bitmaps[2]NEWLINE elif state == RIBBON_GALLERY_BUTTON_DISABLED:NEWLINE btn_top_brush = self._gallery_button_disabled_background_top_brushNEWLINE btn_colour = self._gallery_button_disabled_background_colourNEWLINE btn_grad_colour = self._gallery_button_disabled_background_gradient_colourNEWLINE btn_bitmap = bitmaps[3]NEWLINENEWLINE rect.x += 1NEWLINE rect.y += 1NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE rect.width -= 1NEWLINE rect.height -= 2NEWLINE else:NEWLINE rect.width -= 2NEWLINE rect.height -= 1NEWLINENEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(btn_top_brush)NEWLINE dc.DrawRectangle(rect.x, rect.y, rect.width, rect.height / 2)NEWLINENEWLINE lower = wx.Rect(*rect)NEWLINE lower.height = (lower.height + 1) / 2NEWLINE lower.y += rect.height - lower.heightNEWLINE dc.GradientFillLinear(lower, btn_colour, btn_grad_colour, wx.SOUTH)NEWLINENEWLINE dc.DrawBitmap(btn_bitmap, rect.x + rect.width / 2 - 2, lower.y - 2, True)NEWLINENEWLINENEWLINE def DrawGalleryItemBackground(self, dc, wnd, rect, item):NEWLINE """NEWLINE Draw the background of a single item in a :class:`~wx.lib.agw.ribbon.gallery.RibbonGallery` control.NEWLINENEWLINE This is painted on top of a gallery background, and behind the items bitmap.NEWLINE Unlike :meth:`~RibbonMSWArtProvider.DrawButtonBarButton` and :meth:`~RibbonMSWArtProvider.DrawTool`, it is not expected to draw theNEWLINE item bitmap - that is done by the gallery control itself.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto, which is always the galleryNEWLINE which contains the item being drawn;NEWLINE :param `rect`: The rectangle within which to draw. The size of this rectangleNEWLINE will be the size of the item's bitmap, expanded by gallery item padding valuesNEWLINE (``RIBBON_ART_GALLERY_BITMAP_PADDING_LEFT_SIZE``, ``RIBBON_ART_GALLERY_BITMAP_PADDING_RIGHT_SIZE``,NEWLINE ``RIBBON_ART_GALLERY_BITMAP_PADDING_TOP_SIZE``, and ``RIBBON_ART_GALLERY_BITMAP_PADDING_BOTTOM_SIZE``).NEWLINE The drawing rectangle will be entirely within a rectangle on the same deviceNEWLINE context previously painted with :meth:`~RibbonMSWArtProvider.DrawGalleryBackground`;NEWLINE :param `item`: The item whose background is being painted. Typically the backgroundNEWLINE will vary if the item is hovered, active, or selected; :meth:`RibbonGallery.GetSelection() <lib.agw.ribbon.gallery.RibbonGallery.GetSelection>`,NEWLINE :meth:`RibbonGallery.GetActiveItem() <lib.agw.ribbon.gallery.RibbonGallery.GetActiveItem>`, andNEWLINE :meth:`RibbonGallery.GetHoveredItem() <lib.agw.ribbon.gallery.RibbonGallery.GetHoveredItem>` can beNEWLINE called to test if the given item is in one of these states.NEWLINENEWLINE """NEWLINENEWLINE if wnd.GetHoveredItem() != item and wnd.GetActiveItem() != item and \NEWLINE wnd.GetSelection() != item:NEWLINE returnNEWLINENEWLINE dc.SetPen(self._gallery_item_border_pen)NEWLINE dc.DrawLine(rect.x + 1, rect.y, rect.x + rect.width - 1, rect.y)NEWLINE dc.DrawLine(rect.x, rect.y + 1, rect.x, rect.y + rect.height - 1)NEWLINE dc.DrawLine(rect.x + 1, rect.y + rect.height - 1, rect.x + rect.width - 1, rect.y + rect.height - 1)NEWLINE dc.DrawLine(rect.x + rect.width - 1, rect.y + 1, rect.x + rect.width - 1, rect.y + rect.height - 1)NEWLINENEWLINE if wnd.GetActiveItem() == item or wnd.GetSelection() == item:NEWLINE top_brush = self._gallery_button_active_background_top_brushNEWLINE bg_colour = self._gallery_button_active_background_colourNEWLINE bg_gradient_colour = self._gallery_button_active_background_gradient_colourNEWLINE else:NEWLINE top_brush = self._gallery_button_hover_background_top_brushNEWLINE bg_colour = self._gallery_button_hover_background_colourNEWLINE bg_gradient_colour = self._gallery_button_hover_background_gradient_colourNEWLINENEWLINE upper = wx.Rect(*rect)NEWLINE upper.x += 1NEWLINE upper.width -= 2NEWLINE upper.y += 1NEWLINE upper.height /= 3NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(top_brush)NEWLINE dc.DrawRectangle(upper.x, upper.y, upper.width, upper.height)NEWLINENEWLINE lower = wx.Rect(*upper)NEWLINE lower.y += lower.heightNEWLINE lower.height = rect.height - 2 - lower.heightNEWLINE dc.GradientFillLinear(lower, bg_colour, bg_gradient_colour, wx.SOUTH)NEWLINENEWLINENEWLINE def DrawPanelBorder(self, dc, rect, primary_colour, secondary_colour):NEWLINENEWLINE border_points = [wx.Point() for i in range(9)]NEWLINE border_points[0] = wx.Point(2, 0)NEWLINE border_points[1] = wx.Point(rect.width - 3, 0)NEWLINE border_points[2] = wx.Point(rect.width - 1, 2)NEWLINE border_points[3] = wx.Point(rect.width - 1, rect.height - 3)NEWLINE border_points[4] = wx.Point(rect.width - 3, rect.height - 1)NEWLINE border_points[5] = wx.Point(2, rect.height - 1)NEWLINE border_points[6] = wx.Point(0, rect.height - 3)NEWLINE border_points[7] = wx.Point(0, 2)NEWLINENEWLINE if primary_colour.GetColour() == secondary_colour.GetColour():NEWLINE border_points[8] = border_points[0]NEWLINE dc.SetPen(primary_colour)NEWLINE dc.DrawLines(border_points, rect.x, rect.y)NEWLINE else:NEWLINE dc.SetPen(primary_colour)NEWLINE dc.DrawLines(border_points[0:3], rect.x, rect.y)NEWLINENEWLINE SingleLine(dc, rect, border_points[0], border_points[7])NEWLINE dc.SetPen(secondary_colour)NEWLINE dc.DrawLines(border_points[4:7], rect.x, rect.y)NEWLINE SingleLine(dc, rect, border_points[4], border_points[3])NEWLINENEWLINE border_points[6] = border_points[2]NEWLINE RibbonDrawParallelGradientLines(dc, 2, border_points[6:8], 0, 1,NEWLINE border_points[3].y - border_points[2].y + 1, rect.x, rect.y,NEWLINE primary_colour.GetColour(), secondary_colour.GetColour())NEWLINENEWLINENEWLINE def DrawMinimisedPanel(self, dc, wnd, rect, bitmap):NEWLINE """NEWLINE Draw a minimised ribbon panel.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto, which is always the panelNEWLINE which is minimised. The panel label can be obtained from this window. TheNEWLINE minimised icon obtained from querying the window may not be the size requestedNEWLINE by :meth:`~RibbonMSWArtProvider.GetMinimisedPanelMinimumSize` - the argument contains the icon in theNEWLINE requested size;NEWLINE :param `rect`: The rectangle within which to draw. The size of the rectangleNEWLINE will be at least the size returned by :meth:`~RibbonMSWArtProvider.GetMinimisedPanelMinimumSize`;NEWLINE :param `bitmap`: A copy of the panel's minimised bitmap rescaled to the sizeNEWLINE returned by :meth:`~RibbonMSWArtProvider.GetMinimisedPanelMinimumSize`.NEWLINENEWLINE """NEWLINENEWLINE self.DrawPartialPageBackground(dc, wnd, rect, False)NEWLINENEWLINE true_rect = wx.Rect(*rect)NEWLINE true_rect = self.RemovePanelPadding(true_rect)NEWLINENEWLINE if wnd.GetExpandedPanel() is not None:NEWLINE client_rect = wx.Rect(*true_rect)NEWLINE client_rect.x += 1NEWLINE client_rect.width -= 2NEWLINE client_rect.y += 1NEWLINE client_rect.height = (rect.y + rect.height / 5) - client_rect.xNEWLINE dc.GradientFillLinear(client_rect,NEWLINE self._panel_active_background_top_colour,NEWLINE self._panel_active_background_top_gradient_colour, wx.SOUTH)NEWLINENEWLINE client_rect.y += client_rect.heightNEWLINE client_rect.height = (true_rect.y + true_rect.height) - client_rect.yNEWLINE dc.GradientFillLinear(client_rect,NEWLINE self._panel_active_background_colour,NEWLINE self._panel_active_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE elif wnd.IsHovered():NEWLINE client_rect = wx.Rect(*true_rect)NEWLINE client_rect.x += 1NEWLINE client_rect.width -= 2NEWLINE client_rect.y += 1NEWLINE client_rect.height -= 2NEWLINE self.DrawPartialPageBackground(dc, wnd, client_rect, True)NEWLINENEWLINE preview = self.DrawMinimisedPanelCommon(dc, wnd, true_rect)NEWLINENEWLINE dc.SetBrush(self._panel_hover_label_background_brush)NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.DrawRectangle(preview.x + 1, preview.y + preview.height - 8, preview.width - 2, 7)NEWLINENEWLINE mid_pos = rect.y + rect.height / 5 - preview.yNEWLINENEWLINE if mid_pos < 0 or mid_pos >= preview.height:NEWLINE full_rect = wx.Rect(*preview)NEWLINE full_rect.x += 1NEWLINE full_rect.y += 1NEWLINE full_rect.width -= 2NEWLINE full_rect.height -= 9NEWLINE if mid_pos < 0:NEWLINE dc.GradientFillLinear(full_rect, self._page_hover_background_colour,NEWLINE self._page_hover_background_gradient_colour, wx.SOUTH)NEWLINE else:NEWLINE dc.GradientFillLinear(full_rect, self._page_hover_background_top_colour,NEWLINE self._page_hover_background_top_gradient_colour, wx.SOUTH)NEWLINENEWLINE else:NEWLINE top_rect = wx.Rect(*preview)NEWLINE top_rect.x += 1NEWLINE top_rect.y += 1NEWLINE top_rect.width -= 2NEWLINE top_rect.height = mid_posNEWLINE dc.GradientFillLinear(top_rect, self._page_hover_background_top_colour,NEWLINE self._page_hover_background_top_gradient_colour, wx.SOUTH)NEWLINENEWLINE btm_rect = wx.Rect(*top_rect)NEWLINE btm_rect.y = preview.y + mid_posNEWLINE btm_rect.height = preview.y + preview.height - 7 - btm_rect.yNEWLINE dc.GradientFillLinear(btm_rect, self._page_hover_background_colour,NEWLINE self._page_hover_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE if bitmap.IsOk():NEWLINE dc.DrawBitmap(bitmap, preview.x + (preview.width - bitmap.GetWidth()) / 2,NEWLINE preview.y + (preview.height - 7 - bitmap.GetHeight()) / 2, True)NEWLINENEWLINE self.DrawPanelBorder(dc, preview, self._panel_border_pen, self._panel_border_gradient_pen)NEWLINE self.DrawPanelBorder(dc, true_rect, self._panel_minimised_border_pen, self._panel_minimised_border_gradient_pen)NEWLINENEWLINENEWLINE def DrawMinimisedPanelCommon(self, dc, wnd, true_rect):NEWLINENEWLINE preview = wx.Rect(0, 0, 32, 32)NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE preview.x = true_rect.x + 4NEWLINE preview.y = true_rect.y + (true_rect.height - preview.height) / 2NEWLINE else:NEWLINE preview.x = true_rect.x + (true_rect.width - preview.width) / 2NEWLINE preview.y = true_rect.y + 4NEWLINENEWLINE dc.SetFont(self._panel_label_font)NEWLINE label_width, label_height = dc.GetTextExtent(wnd.GetLabel())NEWLINENEWLINE xpos = true_rect.x + (true_rect.width - label_width + 1) / 2NEWLINE ypos = preview.y + preview.height + 5NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE xpos = preview.x + preview.width + 5NEWLINE ypos = true_rect.y + (true_rect.height - label_height) / 2NEWLINENEWLINE dc.SetTextForeground(self._panel_minimised_label_colour)NEWLINE dc.DrawText(wnd.GetLabel(), xpos, ypos)NEWLINENEWLINE arrow_points = [wx.Point() for i in range(3)]NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE xpos += label_widthNEWLINE arrow_points[0] = wx.Point(xpos + 5, ypos + label_height / 2)NEWLINE arrow_points[1] = arrow_points[0] + wx.Point(-3, 3)NEWLINE arrow_points[2] = arrow_points[0] + wx.Point(-3, -3)NEWLINE else:NEWLINE ypos += label_heightNEWLINE arrow_points[0] = wx.Point(true_rect.width / 2, ypos + 5)NEWLINE arrow_points[1] = arrow_points[0] + wx.Point(-3, -3)NEWLINE arrow_points[2] = arrow_points[0] + wx.Point( 3, -3)NEWLINENEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE B = wx.Brush(self._panel_minimised_label_colour)NEWLINE dc.SetBrush(B)NEWLINE dc.DrawPolygon(arrow_points, true_rect.x, true_rect.y)NEWLINENEWLINE return previewNEWLINENEWLINENEWLINE def DrawButtonBarBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background for a :class:`~wx.lib.agw.ribbon.buttonbar.RibbonButtonBar` control.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto (which will typically beNEWLINE the button bar itself, though this is not guaranteed);NEWLINE :param `rect`: The rectangle within which to draw.NEWLINENEWLINE """NEWLINENEWLINE self.DrawPartialPageBackground(dc, wnd, rect, True)NEWLINENEWLINENEWLINE def DrawPartialPageBackground2(self, dc, wnd, rect, allow_hovered=True):NEWLINENEWLINE # Assume the window is a child of a ribbon page, and also check for aNEWLINE # hovered panel somewhere between the window and the page, as it causesNEWLINE # the background to change.NEWLINE offset = wx.Point(*wnd.GetPosition())NEWLINE page = NoneNEWLINE parent = wnd.GetParent()NEWLINE hovered = FalseNEWLINE panel = NoneNEWLINENEWLINE if isinstance(wnd, PANEL.RibbonPanel):NEWLINE panel = wndNEWLINE hovered = allow_hovered and panel.IsHovered()NEWLINE if panel.GetExpandedDummy() is not None:NEWLINE offset = panel.GetExpandedDummy().GetPosition()NEWLINE parent = panel.GetExpandedDummy().GetParent()NEWLINENEWLINE while 1:NEWLINENEWLINE if panel is None:NEWLINE panel = parentNEWLINE if isinstance(panel, PANEL.RibbonPanel):NEWLINE hovered = allow_hovered and panel.IsHovered()NEWLINE if panel.GetExpandedDummy() is not None:NEWLINE parent = panel.GetExpandedDummy()NEWLINENEWLINE page = parentNEWLINE if isinstance(page, PAGE.RibbonPage):NEWLINE breakNEWLINENEWLINE offset += parent.GetPosition()NEWLINE parent = parent.GetParent()NEWLINE if parent is None:NEWLINE breakNEWLINENEWLINE if page is not None:NEWLINE self.DrawPartialPageBackground(dc, wnd, rect, page, offset, hovered)NEWLINE returnNEWLINENEWLINE # No page found - fallback to painting with a stock brushNEWLINE dc.SetBrush(wx.WHITE_BRUSH)NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.DrawRectangle(rect.x, rect.y, rect.width, rect.height)NEWLINENEWLINENEWLINE def DrawButtonBarButton(self, dc, wnd, rect, kind, state, label, bitmap_large, bitmap_small):NEWLINE """NEWLINE Draw a single button for a :class:`~wx.lib.agw.ribbon.buttonbar.RibbonButtonBar` control.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto;NEWLINE :param `rect`: The rectangle within which to draw. The size of this rectangleNEWLINE will be a size previously returned by :meth:`~RibbonMSWArtProvider.GetButtonBarButtonSize`, and theNEWLINE rectangle will be entirely within a rectangle on the same device contextNEWLINE previously painted with :meth:`~RibbonMSWArtProvider.DrawButtonBarBackground`;NEWLINE :param `kind`: The kind of button to draw (normal, dropdown or hybrid);NEWLINE :param `state`: Combination of a size flag and state flags from theNEWLINE `RibbonButtonBarButtonState` enumeration;NEWLINE :param `label`: The label of the button;NEWLINE :param `bitmap_large`: The large bitmap of the button (or the large disabledNEWLINE bitmap when ``RIBBON_BUTTONBAR_BUTTON_DISABLED`` is set in `state`);NEWLINE :param `bitmap_small`: The small bitmap of the button (or the small disabledNEWLINE bitmap when ``RIBBON_BUTTONBAR_BUTTON_DISABLED`` is set in `state`).NEWLINENEWLINE """NEWLINENEWLINE if kind == RIBBON_BUTTON_TOGGLE:NEWLINE kind = RIBBON_BUTTON_NORMALNEWLINE if state & RIBBON_BUTTONBAR_BUTTON_TOGGLED:NEWLINE state ^= RIBBON_BUTTONBAR_BUTTON_ACTIVE_MASKNEWLINENEWLINE if state & (RIBBON_BUTTONBAR_BUTTON_HOVER_MASK | RIBBON_BUTTONBAR_BUTTON_ACTIVE_MASK):NEWLINE if state & RIBBON_BUTTONBAR_BUTTON_ACTIVE_MASK:NEWLINE dc.SetPen(self._button_bar_active_border_pen)NEWLINE else:NEWLINE dc.SetPen(self._button_bar_hover_border_pen)NEWLINENEWLINE bg_rect = wx.Rect(*rect)NEWLINE bg_rect.x += 1NEWLINE bg_rect.y += 1NEWLINE bg_rect.width -= 2NEWLINE bg_rect.height -= 2NEWLINENEWLINE bg_rect_top = wx.Rect(*bg_rect)NEWLINE bg_rect_top.height /= 3NEWLINE bg_rect.y += bg_rect_top.heightNEWLINE bg_rect.height -= bg_rect_top.heightNEWLINENEWLINE if kind == RIBBON_BUTTON_HYBRID:NEWLINENEWLINE result = state & RIBBON_BUTTONBAR_BUTTON_SIZE_MASKNEWLINENEWLINE if result == RIBBON_BUTTONBAR_BUTTON_LARGE:NEWLINE iYBorder = rect.y + bitmap_large.GetHeight() + 4NEWLINE partial_bg = wx.Rect(*rect)NEWLINENEWLINE if state & RIBBON_BUTTONBAR_BUTTON_NORMAL_HOVERED:NEWLINE partial_bg.SetBottom(iYBorder - 1)NEWLINE else:NEWLINE partial_bg.height -= (iYBorder - partial_bg.y + 1)NEWLINE partial_bg.y = iYBorder + 1NEWLINENEWLINE dc.DrawLine(rect.x, iYBorder, rect.x + rect.width, iYBorder)NEWLINE bg_rect.Intersect(partial_bg)NEWLINE bg_rect_top.Intersect(partial_bg)NEWLINENEWLINE elif result == RIBBON_BUTTONBAR_BUTTON_MEDIUM:NEWLINE iArrowWidth = 9NEWLINENEWLINE if state & RIBBON_BUTTONBAR_BUTTON_NORMAL_HOVERED:NEWLINE bg_rect.width -= iArrowWidthNEWLINE bg_rect_top.width -= iArrowWidthNEWLINE dc.DrawLine(bg_rect_top.x + bg_rect_top.width, rect.y, bg_rect_top.x + bg_rect_top.width,NEWLINE rect.y + rect.height)NEWLINE else:NEWLINE iArrowWidth -= 1NEWLINE bg_rect.x += bg_rect.width - iArrowWidthNEWLINE bg_rect_top.x += bg_rect_top.width - iArrowWidthNEWLINE bg_rect.width = iArrowWidthNEWLINE bg_rect_top.width = iArrowWidthNEWLINE dc.DrawLine(bg_rect_top.x - 1, rect.y, bg_rect_top.x - 1, rect.y + rect.height)NEWLINENEWLINE if state & RIBBON_BUTTONBAR_BUTTON_ACTIVE_MASK:NEWLINENEWLINE dc.GradientFillLinear(bg_rect_top, self._button_bar_active_background_top_colour,NEWLINE self._button_bar_active_background_top_gradient_colour, wx.SOUTH)NEWLINE dc.GradientFillLinear(bg_rect, self._button_bar_active_background_colour,NEWLINE self._button_bar_active_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE else:NEWLINE dc.GradientFillLinear(bg_rect_top, self._button_bar_hover_background_top_colour,NEWLINE self._button_bar_hover_background_top_gradient_colour, wx.SOUTH)NEWLINE dc.GradientFillLinear(bg_rect, self._button_bar_hover_background_colour,NEWLINE self._button_bar_hover_background_gradient_colour, wx.SOUTH)NEWLINENEWLINE border_points = [wx.Point() for i in range(9)]NEWLINE border_points[0] = wx.Point(2, 0)NEWLINE border_points[1] = wx.Point(rect.width - 3, 0)NEWLINE border_points[2] = wx.Point(rect.width - 1, 2)NEWLINE border_points[3] = wx.Point(rect.width - 1, rect.height - 3)NEWLINE border_points[4] = wx.Point(rect.width - 3, rect.height - 1)NEWLINE border_points[5] = wx.Point(2, rect.height - 1)NEWLINE border_points[6] = wx.Point(0, rect.height - 3)NEWLINE border_points[7] = wx.Point(0, 2)NEWLINE border_points[8] = border_points[0]NEWLINENEWLINE dc.DrawLines(border_points, rect.x, rect.y)NEWLINENEWLINE dc.SetFont(self._button_bar_label_font)NEWLINE dc.SetTextForeground(self._button_bar_label_colour)NEWLINE self.DrawButtonBarButtonForeground(dc, rect, kind, state, label, bitmap_large, bitmap_small)NEWLINENEWLINENEWLINE def DrawButtonBarButtonForeground(self, dc, rect, kind, state, label, bitmap_large, bitmap_small):NEWLINENEWLINE result = state & RIBBON_BUTTONBAR_BUTTON_SIZE_MASKNEWLINENEWLINE if result == RIBBON_BUTTONBAR_BUTTON_LARGE:NEWLINENEWLINE padding = 2NEWLINE dc.DrawBitmap(bitmap_large, rect.x + (rect.width - bitmap_large.GetWidth()) / 2,NEWLINE rect.y + padding, True)NEWLINE ypos = rect.y + padding + bitmap_large.GetHeight() + paddingNEWLINE arrow_width = (kind == RIBBON_BUTTON_NORMAL and [0] or [8])[0]NEWLINENEWLINE label_w, label_h = dc.GetTextExtent(label)NEWLINENEWLINE if label_w + 2 * padding <= rect.width:NEWLINENEWLINE dc.DrawText(label, rect.x + (rect.width - label_w) / 2, ypos)NEWLINE if arrow_width != 0:NEWLINE self.DrawDropdownArrow(dc, rect.x + rect.width / 2,NEWLINE ypos + (label_h * 3) / 2,NEWLINE self._button_bar_label_colour)NEWLINE else:NEWLINE breaki = len(label)NEWLINENEWLINE while breaki > 0:NEWLINE breaki -= 1NEWLINE if RibbonCanLabelBreakAtPosition(label, breaki):NEWLINE label_top = label[0:breaki]NEWLINE label_w, label_h = dc.GetTextExtent(label_top)NEWLINENEWLINE if label_w + 2 * padding <= rect.width:NEWLINE dc.DrawText(label_top, rect.x + (rect.width - label_w) / 2, ypos)NEWLINE ypos += label_hNEWLINE label_bottom = label[breaki:]NEWLINE label_w, label_h = dc.GetTextExtent(label_bottom)NEWLINE label_w += arrow_widthNEWLINE iX = rect.x + (rect.width - label_w) / 2NEWLINE dc.DrawText(label_bottom, iX, ypos)NEWLINENEWLINE if arrow_width != 0:NEWLINE self.DrawDropdownArrow(dc, iX + 2 +label_w - arrow_width,NEWLINE ypos + label_h / 2 + 1,NEWLINE self._button_bar_label_colour)NEWLINENEWLINE breakNEWLINENEWLINE elif result == RIBBON_BUTTONBAR_BUTTON_MEDIUM:NEWLINENEWLINE x_cursor = rect.x + 2NEWLINE dc.DrawBitmap(bitmap_small, x_cursor, rect.y + (rect.height - bitmap_small.GetHeight())/2, True)NEWLINE x_cursor += bitmap_small.GetWidth() + 2NEWLINE label_w, label_h = dc.GetTextExtent(label)NEWLINE dc.DrawText(label, x_cursor, rect.y + (rect.height - label_h) / 2)NEWLINE x_cursor += label_w + 3NEWLINENEWLINE if kind != RIBBON_BUTTON_NORMAL:NEWLINE self.DrawDropdownArrow(dc, x_cursor, rect.y + rect.height / 2,NEWLINE self._button_bar_label_colour)NEWLINENEWLINE else:NEWLINE # TODONEWLINE passNEWLINENEWLINENEWLINE def DrawToolBarBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background for a :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar` control.NEWLINENEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The which is being drawn onto. In most cases this will be aNEWLINE :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar`, but it doesn't have to be;NEWLINE :param `rect`: The rectangle within which to draw. Some of this rectangleNEWLINE will later be drawn over using :meth:`~RibbonMSWArtProvider.DrawToolGroupBackground` and :meth:`~RibbonMSWArtProvider.DrawTool`,NEWLINE but not all of it will (unless there is only a single group of tools).NEWLINENEWLINE """NEWLINENEWLINE self.DrawPartialPageBackground(dc, wnd, rect)NEWLINENEWLINENEWLINE def DrawToolGroupBackground(self, dc, wnd, rect):NEWLINE """NEWLINE Draw the background for a group of tools on a :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar` control.NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto. In most cases this willNEWLINE be a :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar`, but it doesn't have to be;NEWLINE :param `rect`: The rectangle within which to draw. This rectangle is a unionNEWLINE of the individual tools' rectangles. As there are no gaps between tools,NEWLINE this rectangle will be painted over exactly once by calls to :meth:`~RibbonMSWArtProvider.DrawTool`.NEWLINE The group background could therefore be painted by :meth:`~RibbonMSWArtProvider.DrawTool`, though itNEWLINE can be conceptually easier and more efficient to draw it all at once here.NEWLINE The rectangle will be entirely within a rectangle on the same device contextNEWLINE previously painted with :meth:`~RibbonMSWArtProvider.DrawToolBarBackground`.NEWLINENEWLINE """NEWLINENEWLINE dc.SetPen(self._toolbar_border_pen)NEWLINE outline = [wx.Point() for i in range(9)]NEWLINE outline[0] = wx.Point(2, 0)NEWLINE outline[1] = wx.Point(rect.width - 3, 0)NEWLINE outline[2] = wx.Point(rect.width - 1, 2)NEWLINE outline[3] = wx.Point(rect.width - 1, rect.height - 3)NEWLINE outline[4] = wx.Point(rect.width - 3, rect.height - 1)NEWLINE outline[5] = wx.Point(2, rect.height - 1)NEWLINE outline[6] = wx.Point(0, rect.height - 3)NEWLINE outline[7] = wx.Point(0, 2)NEWLINE outline[8] = outline[0]NEWLINENEWLINE dc.DrawLines(outline, rect.x, rect.y)NEWLINENEWLINENEWLINE def DrawTool(self, dc, wnd, rect, bitmap, kind, state):NEWLINE """NEWLINE Draw a single tool (for a :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar` control).NEWLINENEWLINE :param `dc`: The device context to draw onto;NEWLINE :param `wnd`: The window which is being drawn onto. In most cases this willNEWLINE be a :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar`, but it doesn't have to be;NEWLINE :param `rect`: The rectangle within which to draw. The size of this rectangleNEWLINE will at least the size returned by :meth:`~RibbonMSWArtProvider.GetToolSize`, and the height of it willNEWLINE be equal for all tools within the same group. The rectangle will be entirelyNEWLINE within a rectangle on the same device context previously painted withNEWLINE :meth:`~RibbonMSWArtProvider.DrawToolGroupBackground`;NEWLINE :param `bitmap`: The bitmap to use as the tool's foreground. If the tool is aNEWLINE hybrid or dropdown tool, then the foreground should also contain a standardNEWLINE dropdown button;NEWLINE :param `kind`: The kind of tool to draw (normal, dropdown, or hybrid);NEWLINE :param `state`: A combination of `RibbonToolBarToolState` flags giving theNEWLINE state of the tool and it's relative position within a tool group.NEWLINENEWLINE """NEWLINENEWLINE if kind == RIBBON_BUTTON_TOGGLE:NEWLINE if state & RIBBON_TOOLBAR_TOOL_TOGGLED:NEWLINE state ^= RIBBON_TOOLBAR_TOOL_ACTIVE_MASKNEWLINENEWLINE bg_rect = wx.Rect(*rect)NEWLINE bg_rect.Deflate(1, 1)NEWLINENEWLINE if state & RIBBON_TOOLBAR_TOOL_LAST == 0:NEWLINE bg_rect.width += 1NEWLINENEWLINE is_split_hybrid = (kind == RIBBON_BUTTON_HYBRID and (state & (RIBBON_TOOLBAR_TOOL_HOVER_MASK | RIBBON_TOOLBAR_TOOL_ACTIVE_MASK)))NEWLINENEWLINE # BackgroundNEWLINE bg_rect_top = wx.Rect(*bg_rect)NEWLINE bg_rect_top.height = (bg_rect_top.height * 2) / 5NEWLINE bg_rect_btm = wx.Rect(*bg_rect)NEWLINE bg_rect_btm.y += bg_rect_top.heightNEWLINE bg_rect_btm.height -= bg_rect_top.heightNEWLINENEWLINE bg_top_colour = self._tool_background_top_colourNEWLINE bg_top_grad_colour = self._tool_background_top_gradient_colourNEWLINE bg_colour = self._tool_background_colourNEWLINE bg_grad_colour = self._tool_background_gradient_colourNEWLINENEWLINE if state & RIBBON_TOOLBAR_TOOL_ACTIVE_MASK:NEWLINE bg_top_colour = self._tool_active_background_top_colourNEWLINE bg_top_grad_colour = self._tool_active_background_top_gradient_colourNEWLINE bg_colour = self._tool_active_background_colourNEWLINE bg_grad_colour = self._tool_active_background_gradient_colourNEWLINENEWLINE elif state & RIBBON_TOOLBAR_TOOL_HOVER_MASK:NEWLINE bg_top_colour = self._tool_hover_background_top_colourNEWLINE bg_top_grad_colour = self._tool_hover_background_top_gradient_colourNEWLINE bg_colour = self._tool_hover_background_colourNEWLINE bg_grad_colour = self._tool_hover_background_gradient_colourNEWLINENEWLINE dc.GradientFillLinear(bg_rect_top, bg_top_colour, bg_top_grad_colour, wx.SOUTH)NEWLINE dc.GradientFillLinear(bg_rect_btm, bg_colour, bg_grad_colour, wx.SOUTH)NEWLINENEWLINE if is_split_hybrid:NEWLINE nonrect = wx.Rect(*bg_rect)NEWLINE if state & (RIBBON_TOOLBAR_TOOL_DROPDOWN_HOVERED | RIBBON_TOOLBAR_TOOL_DROPDOWN_ACTIVE):NEWLINE nonrect.width -= 8NEWLINE else:NEWLINE nonrect.x += nonrect.width - 8NEWLINE nonrect.width = 8NEWLINENEWLINE B = wx.Brush(self._tool_hover_background_top_colour)NEWLINE dc.SetPen(wx.TRANSPARENT_PEN)NEWLINE dc.SetBrush(B)NEWLINE dc.DrawRectangle(nonrect.x, nonrect.y, nonrect.width, nonrect.height)NEWLINENEWLINE # BorderNEWLINE dc.SetPen(self._toolbar_border_pen)NEWLINENEWLINE if state & RIBBON_TOOLBAR_TOOL_FIRST:NEWLINE dc.DrawPoint(rect.x + 1, rect.y + 1)NEWLINE dc.DrawPoint(rect.x + 1, rect.y + rect.height - 2)NEWLINE else:NEWLINE dc.DrawLine(rect.x, rect.y + 1, rect.x, rect.y + rect.height - 1)NEWLINENEWLINE if state & RIBBON_TOOLBAR_TOOL_LAST:NEWLINE dc.DrawPoint(rect.x + rect.width - 2, rect.y + 1)NEWLINE dc.DrawPoint(rect.x + rect.width - 2, rect.y + rect.height - 2)NEWLINENEWLINE # ForegroundNEWLINE avail_width = bg_rect.GetWidth()NEWLINENEWLINE if kind & RIBBON_BUTTON_DROPDOWN:NEWLINE avail_width -= 8NEWLINE if is_split_hybrid:NEWLINE dc.DrawLine(rect.x + avail_width + 1, rect.y, rect.x + avail_width + 1, rect.y + rect.height)NEWLINENEWLINE dc.DrawBitmap(self._toolbar_drop_bitmap, bg_rect.x + avail_width + 2,NEWLINE bg_rect.y + (bg_rect.height / 2) - 2, True)NEWLINENEWLINE dc.DrawBitmap(bitmap, bg_rect.x + (avail_width - bitmap.GetWidth()) / 2,NEWLINE bg_rect.y + (bg_rect.height - bitmap.GetHeight()) / 2, True)NEWLINENEWLINENEWLINE def GetBarTabWidth(self, dc, wnd, label, bitmap, ideal=None, small_begin_need_separator=None,NEWLINE small_must_have_separator=None, minimum=None):NEWLINENEWLINE """NEWLINE Calculate the ideal and minimum width (in pixels) of a tab in a ribbon bar.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The window onto which the tab will eventually be drawn;NEWLINE :param `label`: The tab's label (or "" if it has none);NEWLINE :param `bitmap`: The tab's icon (or :class:`NullBitmap` if it has none);NEWLINE :param `ideal`: The ideal width (in pixels) of the tab;NEWLINE :param `small_begin_need_separator`: A size less than the size, at which aNEWLINE tab separator should begin to be drawn (i.e. drawn, but still fairly transparent);NEWLINE :param `small_must_have_separator`: A size less than the size, at which aNEWLINE tab separator must be drawn (i.e. drawn at full opacity);NEWLINE :param `minimum`: A size less than the size, and greater than or equal toNEWLINE zero, which is the minimum pixel width for the tab.NEWLINENEWLINE """NEWLINENEWLINE width = 0NEWLINE mini = 0NEWLINENEWLINE if (self._flags & RIBBON_BAR_SHOW_PAGE_LABELS) and label.strip():NEWLINE dc.SetFont(self._tab_label_font)NEWLINE width += dc.GetTextExtent(label)[0]NEWLINE mini += min(25, width) # enough for a few charsNEWLINENEWLINE if bitmap.IsOk():NEWLINE # gap between label and bitmapNEWLINE width += 4NEWLINE mini += 2NEWLINENEWLINE if (self._flags & RIBBON_BAR_SHOW_PAGE_ICONS) and bitmap.IsOk():NEWLINE width += bitmap.GetWidth()NEWLINE mini += bitmap.GetWidth()NEWLINENEWLINE ideal = width + 30NEWLINE small_begin_need_separator = width + 20NEWLINE small_must_have_separator = width + 10NEWLINE minimum = miniNEWLINENEWLINE return ideal, small_begin_need_separator, small_must_have_separator, minimumNEWLINENEWLINENEWLINE def GetTabCtrlHeight(self, dc, wnd, pages):NEWLINE """NEWLINE Calculate the height (in pixels) of the tab region of a ribbon bar.NEWLINENEWLINE Note that as the tab region can contain scroll buttons, the height should beNEWLINE greater than or equal to the minimum height for a tab scroll button.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The window onto which the tabs will eventually be drawn;NEWLINE :param `pages`: The tabs which will acquire the returned height.NEWLINENEWLINE """NEWLINENEWLINE text_height = 0NEWLINE icon_height = 0NEWLINENEWLINE if len(pages) <= 1 and (self._flags & RIBBON_BAR_ALWAYS_SHOW_TABS) == 0:NEWLINE # To preserve space, a single tab need not be displayed. We still needNEWLINE # two pixels of border / padding though.NEWLINE return 2NEWLINENEWLINE if self._flags & RIBBON_BAR_SHOW_PAGE_LABELS:NEWLINE dc.SetFont(self._tab_label_font)NEWLINE text_height = dc.GetTextExtent("ABCDEFXj")[1] + 10NEWLINENEWLINE if self._flags & RIBBON_BAR_SHOW_PAGE_ICONS:NEWLINE for info in pages:NEWLINE if info.page.GetIcon().IsOk():NEWLINE icon_height = max(icon_height, info.page.GetIcon().GetHeight() + 4)NEWLINENEWLINE return max(text_height, icon_height)NEWLINENEWLINENEWLINE def GetScrollButtonMinimumSize(self, dc, wnd, style):NEWLINE """NEWLINE Calculate the minimum size (in pixels) of a scroll button.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The window onto which the scroll button will eventually be drawn;NEWLINE :param `style`: A combination of flags from `RibbonScrollButtonStyle`, includingNEWLINE a direction, and a for flag (state flags may be given too, but should be ignored,NEWLINE as a button should retain a constant size, regardless of its state).NEWLINENEWLINE """NEWLINENEWLINE return wx.Size(12, 12)NEWLINENEWLINENEWLINE def GetPanelSize(self, dc, wnd, client_size, client_offset=None):NEWLINE """NEWLINE Calculate the size of a panel for a given client size.NEWLINENEWLINE This should increment the given size by enough to fit the panel label and otherNEWLINE chrome.NEWLINENEWLINE :param `dc`: A device context to use if one is required for size calculations;NEWLINE :param `wnd`: The ribbon panel in question;NEWLINE :param `client_size`: The client size;NEWLINE :param `client_offset`: The offset where the client rectangle begins within theNEWLINE panel (may be ``None``).NEWLINENEWLINE :see: :meth:`~RibbonMSWArtProvider.GetPanelClientSize`NEWLINE """NEWLINENEWLINE dc.SetFont(self._panel_label_font)NEWLINE label_size = wx.Size(*dc.GetTextExtent(wnd.GetLabel()))NEWLINENEWLINE client_size.IncBy(0, label_size.GetHeight())NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE client_size.IncBy(4, 8)NEWLINE else:NEWLINE client_size.IncBy(6, 6)NEWLINENEWLINE if client_offset is not None:NEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE client_offset = wx.Point(2, 3)NEWLINE else:NEWLINE client_offset = wx.Point(3, 2)NEWLINENEWLINE return client_sizeNEWLINENEWLINENEWLINE def GetPanelClientSize(self, dc, wnd, size, client_offset=None):NEWLINE """NEWLINE Calculate the client size of a panel for a given overall size.NEWLINENEWLINE This should act as the inverse to :meth:`~RibbonMSWArtProvider.GetPanelSize`, and decrement the given sizeNEWLINE by enough to fit the panel label and other chrome.NEWLINENEWLINE :param `dc`: A device context to use if one is required for size calculations;NEWLINE :param `wnd`: The ribbon panel in question;NEWLINE :param `size`: The overall size to calculate client size for;NEWLINE :param `client_offset`: The offset where the returned client size begins withinNEWLINE the given (may be ``None``).NEWLINENEWLINE :see: :meth:`~RibbonMSWArtProvider.GetPanelSize`NEWLINE """NEWLINENEWLINE dc.SetFont(self._panel_label_font)NEWLINE label_size = wx.Size(*dc.GetTextExtent(wnd.GetLabel()))NEWLINENEWLINE size.DecBy(0, label_size.GetHeight())NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE size.DecBy(4, 8)NEWLINE else:NEWLINE size.DecBy(6, 6)NEWLINENEWLINE if client_offset is not None:NEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE client_offset = wx.Point(2, 3)NEWLINE else:NEWLINE client_offset = wx.Point(3, 2)NEWLINENEWLINE if size.x < 0:NEWLINE size.x = 0NEWLINE if size.y < 0:NEWLINE size.y = 0NEWLINENEWLINE return size, client_offsetNEWLINENEWLINENEWLINE def GetGallerySize(self, dc, wnd, client_size):NEWLINE """NEWLINE Calculate the size of a :class:`~wx.lib.agw.ribbon.gallery.RibbonGallery` control for a given client size.NEWLINENEWLINE This should increment the given size by enough to fit the gallery border,NEWLINE buttons, and any other chrome.NEWLINENEWLINE :param `dc`: A device context to use if one is required for size calculations;NEWLINE :param `wnd`: The gallery in question;NEWLINE :param `client_size`: The client size.NEWLINENEWLINE :see: :meth:`~RibbonMSWArtProvider.GetGalleryClientSize`NEWLINE """NEWLINENEWLINE client_size.IncBy(2, 1) # Left / top paddingNEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE client_size.IncBy(1, 16) # Right / bottom paddingNEWLINE else:NEWLINE client_size.IncBy(16, 1) # Right / bottom paddingNEWLINENEWLINE return client_sizeNEWLINENEWLINENEWLINE def GetGalleryClientSize(self, dc, wnd, size, client_offset=None, scroll_up_button=None,NEWLINE scroll_down_button=None, extension_button=None):NEWLINENEWLINE """NEWLINE Calculate the client size of a :class:`~wx.lib.agw.ribbon.gallery.RibbonGallery` control for a given size.NEWLINENEWLINE This should act as the inverse to :meth:`~RibbonMSWArtProvider.GetGallerySize`, and decrement the givenNEWLINE size by enough to fir the gallery border, buttons, and other chrome.NEWLINENEWLINE :param `dc`: A device context to use if one is required for size calculations;NEWLINE :param `wnd`: The gallery in question;NEWLINE :param `size`: The overall size to calculate the client size for;NEWLINE :param `client_offset`: The position within the given size at which theNEWLINE returned client size begins;NEWLINE :param `scroll_up_button`: The rectangle within the given size which theNEWLINE scroll up button occupies;NEWLINE :param `scroll_down_button`: The rectangle within the given size which theNEWLINE scroll down button occupies;NEWLINE :param `extension_button`: The rectangle within the given size which theNEWLINE extension button occupies.NEWLINENEWLINE """NEWLINENEWLINE scroll_up = wx.Rect()NEWLINE scroll_down = wx.Rect()NEWLINE extension = wx.Rect()NEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE # Flow is vertical - put buttons on bottomNEWLINE scroll_up.y = size.GetHeight() - 15NEWLINE scroll_up.height = 15NEWLINE scroll_up.x = 0NEWLINE scroll_up.width = (size.GetWidth() + 2) / 3NEWLINE scroll_down.y = scroll_up.yNEWLINE scroll_down.height = scroll_up.heightNEWLINE scroll_down.x = scroll_up.x + scroll_up.widthNEWLINE scroll_down.width = scroll_up.widthNEWLINE extension.y = scroll_down.yNEWLINE extension.height = scroll_down.heightNEWLINE extension.x = scroll_down.x + scroll_down.widthNEWLINE extension.width = size.GetWidth() - scroll_up.width - scroll_down.widthNEWLINE size.DecBy(1, 16)NEWLINE size.DecBy(2, 1)NEWLINENEWLINE else:NEWLINE # Flow is horizontal - put buttons on rightNEWLINE scroll_up.x = size.GetWidth() - 15NEWLINE scroll_up.width = 15NEWLINE scroll_up.y = 0NEWLINE scroll_up.height = (size.GetHeight() + 2) / 3NEWLINE scroll_down.x = scroll_up.xNEWLINE scroll_down.width = scroll_up.widthNEWLINE scroll_down.y = scroll_up.y + scroll_up.heightNEWLINE scroll_down.height = scroll_up.heightNEWLINE extension.x = scroll_down.xNEWLINE extension.width = scroll_down.widthNEWLINE extension.y = scroll_down.y + scroll_down.heightNEWLINE extension.height = size.GetHeight() - scroll_up.height - scroll_down.heightNEWLINE size.DecBy(16, 1)NEWLINE size.DecBy( 2, 1)NEWLINENEWLINE client_offset = wx.Point(2, 1)NEWLINE scroll_up_button = scroll_upNEWLINE scroll_down_button = scroll_downNEWLINE extension_button = extensionNEWLINENEWLINE return size, client_offset, scroll_up_button, scroll_down_button, extension_buttonNEWLINENEWLINENEWLINE def GetPageBackgroundRedrawArea(self, dc, wnd, page_old_size, page_new_size):NEWLINE """NEWLINE Calculate the portion of a page background which needs to be redrawn when a pageNEWLINE is resized.NEWLINENEWLINE To optimise the drawing of page backgrounds, as small an area as possible shouldNEWLINE be returned. Of couse, if the way in which a background is drawn means that theNEWLINE entire background needs to be repainted on resize, then the entire new sizeNEWLINE should be returned.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The page which is being resized;NEWLINE :param `page_old_size`: The size of the page prior to the resize (which hasNEWLINE already been painted);NEWLINE :param `page_new_size`: The size of the page after the resize.NEWLINENEWLINE """NEWLINENEWLINE if page_new_size.GetWidth() != page_old_size.GetWidth():NEWLINE if page_new_size.GetHeight() != page_old_size.GetHeight():NEWLINE # Width and height both changed - redraw everythingNEWLINE return wx.Rect(0, 0, *page_new_size)NEWLINE else:NEWLINE # Only width changed - redraw right hand sideNEWLINE right_edge_width = 4NEWLINE new_rect = wx.Rect(page_new_size.GetWidth() - right_edge_width, 0, right_edge_width, page_new_size.GetHeight())NEWLINE old_rect = wx.Rect(page_old_size.GetWidth() - right_edge_width, 0, right_edge_width, page_old_size.GetHeight())NEWLINENEWLINE else:NEWLINE if page_new_size.GetHeight() == page_old_size.GetHeight():NEWLINE # Nothing changed (should never happen) - redraw nothingNEWLINE return wx.Rect(0, 0, 0, 0)NEWLINE else:NEWLINE # Height changed - need to redraw everything (as the backgroundNEWLINE # gradient is done vertically).NEWLINE return wx.Rect(0, 0, *page_new_size)NEWLINENEWLINE new_rect.Union(old_rect)NEWLINE new_rect.Intersect(wx.Rect(0, 0, *page_new_size))NEWLINE return new_rectNEWLINENEWLINENEWLINE def GetButtonBarButtonSize(self, dc, wnd, kind, size, label, bitmap_size_large, bitmap_size_small,NEWLINE button_size=None, normal_region=None, dropdown_region=None):NEWLINE """NEWLINE Calculate the size of a button within a :class:`~wx.lib.agw.ribbon.buttonbar.RibbonButtonBar`.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The window onto which the button will eventually be drawnNEWLINE (which is normally a :class:`~wx.lib.agw.ribbon.buttonbar.RibbonButtonBar`, though this is not guaranteed);NEWLINE :param `kind`: The kind of button;NEWLINE :param `size`: The size-class to calculate the size for. Buttons on a buttonNEWLINE bar can have three distinct sizes: ``RIBBON_BUTTONBAR_BUTTON_SMALL``,NEWLINE ``RIBBON_BUTTONBAR_BUTTON_MEDIUM``, and ``RIBBON_BUTTONBAR_BUTTON_LARGE``.NEWLINE If the requested size-class is not applicable, then ``False`` should be returned;NEWLINE :param `label`: The label of the button;NEWLINE :param `bitmap_size_large`: The size of all "large" bitmaps on the button bar;NEWLINE :param `bitmap_size_small`: The size of all "small" bitmaps on the button bar;NEWLINE :param `button_size`: The size, in pixels, of the button;NEWLINE :param `normal_region`: The region of the button which constitutes the normal button;NEWLINE :param `dropdown_region`: The region of the button which constitutes the dropdown button.NEWLINENEWLINE :returns: ``True`` if a size exists for the button, ``False`` otherwise.NEWLINE """NEWLINENEWLINE drop_button_width = 8NEWLINENEWLINE normal_region = wx.Rect()NEWLINE dropdown_region = wx.Rect()NEWLINENEWLINE dc.SetFont(self._button_bar_label_font)NEWLINE result = size & RIBBON_BUTTONBAR_BUTTON_SIZE_MASKNEWLINENEWLINE if result == RIBBON_BUTTONBAR_BUTTON_SMALL:NEWLINE # Small bitmap, no labelNEWLINE button_size = wx.Size(bitmap_size_small + wx.Size(6, 4))NEWLINENEWLINE if kind in [RIBBON_BUTTON_NORMAL, RIBBON_BUTTON_TOGGLE]:NEWLINE normal_region = wx.Rect(0, 0, *button_size)NEWLINE dropdown_region = wx.Rect(0, 0, 0, 0)NEWLINENEWLINE elif kind == RIBBON_BUTTON_DROPDOWN:NEWLINE button_size += wx.Size(drop_button_width, 0)NEWLINE dropdown_region = wx.Rect(0, 0, *button_size)NEWLINE normal_region = wx.Rect(0, 0, 0, 0)NEWLINENEWLINE elif kind == RIBBON_BUTTON_HYBRID:NEWLINE normal_region = wx.Rect(0, 0, *button_size)NEWLINE dropdown_region = wx.Rect(button_size.GetWidth(), 0, drop_button_width, button_size.GetHeight())NEWLINE button_size += wx.Size(drop_button_width, 0)NEWLINENEWLINE elif result == RIBBON_BUTTONBAR_BUTTON_MEDIUM:NEWLINE # Small bitmap, with label to the rightNEWLINE is_supported, button_size, normal_region, dropdown_region = self.GetButtonBarButtonSize(dc, wnd, kind,NEWLINE RIBBON_BUTTONBAR_BUTTON_SMALL,NEWLINE label, bitmap_size_large,NEWLINE bitmap_size_small)NEWLINE text_size = dc.GetTextExtent(label)[0]NEWLINE button_size.SetWidth(button_size.GetWidth() + text_size)NEWLINENEWLINE if kind == RIBBON_BUTTON_DROPDOWN:NEWLINE dropdown_region.SetWidth(dropdown_region.GetWidth() + text_size)NEWLINENEWLINE elif kind == RIBBON_BUTTON_HYBRID:NEWLINE dropdown_region.SetX(dropdown_region.GetX() + text_size)NEWLINE normal_region.SetWidth(normal_region.GetWidth() + text_size)NEWLINE # no breakNEWLINE elif kind in [RIBBON_BUTTON_NORMAL, RIBBON_BUTTON_TOGGLE]:NEWLINE normal_region.SetWidth(normal_region.GetWidth() + text_size)NEWLINENEWLINE elif result == RIBBON_BUTTONBAR_BUTTON_LARGE:NEWLINE # Large bitmap, with label below (possibly split over 2 lines)NEWLINENEWLINE icon_size = wx.Size(*bitmap_size_large)NEWLINE icon_size += wx.Size(4, 4)NEWLINE best_width, label_height = dc.GetTextExtent(label)NEWLINE last_line_extra_width = 0NEWLINENEWLINE if kind not in [RIBBON_BUTTON_NORMAL, RIBBON_BUTTON_TOGGLE]:NEWLINE last_line_extra_width += 8NEWLINENEWLINE for i in range(0, len(label)):NEWLINE if RibbonCanLabelBreakAtPosition(label, i):NEWLINENEWLINE width = max(dc.GetTextExtent(label[0:i])[0],NEWLINE dc.GetTextExtent(label[i+1:])[0] + last_line_extra_width)NEWLINE if width < best_width:NEWLINE best_width = widthNEWLINENEWLINE label_height *= 2 # Assume two lines even when only one is usedNEWLINE # (to give all buttons a consistent height)NEWLINE icon_size.SetWidth(max(icon_size.GetWidth(), best_width) + 6)NEWLINE icon_size.SetHeight(icon_size.GetHeight() + label_height)NEWLINE button_size = wx.Size(*icon_size)NEWLINENEWLINE if kind == RIBBON_BUTTON_DROPDOWN:NEWLINE dropdown_region = wx.Rect(0, 0, *icon_size)NEWLINE elif kind == RIBBON_BUTTON_HYBRID:NEWLINE normal_region = wx.Rect(0, 0, *icon_size)NEWLINE normal_region.height -= 2 + label_heightNEWLINE dropdown_region.x = 0NEWLINE dropdown_region.y = normal_region.heightNEWLINE dropdown_region.width = icon_size.GetWidth()NEWLINE dropdown_region.height = icon_size.GetHeight() - normal_region.heightNEWLINE elif kind in [RIBBON_BUTTON_NORMAL, RIBBON_BUTTON_TOGGLE]:NEWLINE normal_region = wx.Rect(0, 0, *icon_size)NEWLINENEWLINE return True, button_size, normal_region, dropdown_regionNEWLINENEWLINENEWLINE def GetMinimisedPanelMinimumSize(self, dc, wnd, desired_bitmap_size=None, expanded_panel_direction=None):NEWLINE """NEWLINE Calculate the size of a minimised ribbon panel.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The ribbon panel in question. Attributes like the panel label canNEWLINE be queried from this;NEWLINE :param `desired_bitmap_size`: MISSING DESCRIPTION;NEWLINE :param `expanded_panel_direction`: MISSING DESCRIPTION.NEWLINENEWLINE """NEWLINENEWLINE if desired_bitmap_size is not None:NEWLINE desired_bitmap_size = wx.Size(16, 16)NEWLINENEWLINE if expanded_panel_direction is not None:NEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE expanded_panel_direction = wx.EASTNEWLINE else:NEWLINE expanded_panel_direction = wx.SOUTHNEWLINENEWLINE base_size = wx.Size(42, 42)NEWLINENEWLINE dc.SetFont(self._panel_label_font)NEWLINE label_size = wx.Size(*dc.GetTextExtent(wnd.GetLabel()))NEWLINE label_size.IncBy(2, 2) # Allow for differences between this DC and a paint DCNEWLINE label_size.IncBy(6, 0) # PaddingNEWLINE label_size.y *= 2 # Second line for dropdown buttonNEWLINENEWLINE if self._flags & RIBBON_BAR_FLOW_VERTICAL:NEWLINE # Label alongside iconNEWLINE return wx.Size(base_size.x + label_size.x, max(base_size.y, label_size.y)), \NEWLINE desired_bitmap_size, expanded_panel_directionNEWLINE else:NEWLINE # Label beneath iconNEWLINE return wx.Size(max(base_size.x, label_size.x), base_size.y + label_size.y), \NEWLINE desired_bitmap_size, expanded_panel_directionNEWLINENEWLINENEWLINE def GetToolSize(self, dc, wnd, bitmap_size, kind, is_first, is_last, dropdown_region=None):NEWLINE """NEWLINE Calculate the size of a tool within a :class:`~wx.lib.agw.ribbon.toolbar.RibbonToolBar`.NEWLINENEWLINE :param `dc`: A device context to use when one is required for size calculations;NEWLINE :param `wnd`: The window onto which the tool will eventually be drawn;NEWLINE :param `bitmap_size`: The size of the tool's foreground bitmap;NEWLINE :param `kind`: The kind of tool (normal, dropdown, or hybrid);NEWLINE :param `is_first`: ``True`` if the tool is the first within its group. ``False``NEWLINE otherwise;NEWLINE :param `is_last`: ``True`` if the tool is the last within its group. ``False``NEWLINE otherwise;NEWLINE :param `dropdown_region`: For dropdown and hybrid tools, the region within theNEWLINE returned size which counts as the dropdown part.NEWLINE """NEWLINENEWLINE size = wx.Size(*bitmap_size)NEWLINE size.IncBy(7, 6)NEWLINENEWLINE if is_last:NEWLINE size.IncBy(1, 0)NEWLINENEWLINE if kind & RIBBON_BUTTON_DROPDOWN:NEWLINE size.IncBy(8, 0)NEWLINE if kind == RIBBON_BUTTON_DROPDOWN:NEWLINE dropdown_region = wx.Rect(0, 0, *size)NEWLINE else:NEWLINE dropdown_region = wx.Rect(size.GetWidth() - 8, 0, 8, size.GetHeight())NEWLINE else:NEWLINE dropdown_region = wx.Rect(0, 0, 0, 0)NEWLINENEWLINE return size, dropdown_regionNEWLINENEWLINE |
# -*- coding:utf-8 -*-NEWLINE"""NEWLINE合肥工业大学教务系统学生端接口以及方便开发者开发围绕学生数据的一些工具.NEWLINE"""NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEfrom . import util, parserNEWLINEfrom .exception import *NEWLINEfrom .log import *NEWLINEfrom .shortcut import *NEWLINEfrom .value import *NEWLINENEWLINE__title__ = 'hfut'NEWLINE__version__ = '2.2.3'NEWLINE__author__ = 'erliang'NEWLINE__author_email__ = '[email protected]'NEWLINE__url__ = 'https://github.com/er1iang/hfut'NEWLINE__license__ = 'MIT'NEWLINE |
import reNEWLINEimport unittestNEWLINEimport markdownNEWLINEfrom biovis_media_extension.extension import BioVisPluginExtensionNEWLINENEWLINEclass TestUtils(unittest.TestCase):NEWLINE plugin = BioVisPluginExtension(configs={})NEWLINENEWLINE def test_invalid_plugin(self):NEWLINE text = """NEWLINE @invalid-plugin()NEWLINE """NEWLINE output = markdown.markdown(text, extensions=[self.plugin])NEWLINE matched = re.match("<div class='alert alert-danger' role='alert'>", output) != NoneNEWLINE self.assertEqual(matched, True)NEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE |
#!/usr/bin/env pythonNEWLINEfrom __future__ import division, print_function, absolute_importNEWLINENEWLINEimport numpy as npNEWLINEfrom numpy.testing import (run_module_suite, assert_allclose, assert_,NEWLINE assert_raises)NEWLINENEWLINEimport pywtNEWLINENEWLINENEWLINEdef test_dwt_idwt_basic():NEWLINE x = [3, 7, 1, 1, -2, 5, 4, 6]NEWLINE cA, cD = pywt.dwt(x, 'db2')NEWLINE cA_expect = [5.65685425, 7.39923721, 0.22414387, 3.33677403, 7.77817459]NEWLINE cD_expect = [-2.44948974, -1.60368225, -4.44140056, -0.41361256,NEWLINE 1.22474487]NEWLINE assert_allclose(cA, cA_expect)NEWLINE assert_allclose(cD, cD_expect)NEWLINENEWLINE x_roundtrip = pywt.idwt(cA, cD, 'db2')NEWLINE assert_allclose(x_roundtrip, x, rtol=1e-10)NEWLINENEWLINENEWLINEdef test_dwt_wavelet_kwd():NEWLINE x = np.array([3, 7, 1, 1, -2, 5, 4, 6])NEWLINE w = pywt.Wavelet('sym3')NEWLINE cA, cD = pywt.dwt(x, wavelet=w, mode='cpd')NEWLINE cA_expect = [4.38354585, 3.80302657, 7.31813271, -0.58565539, 4.09727044,NEWLINE 7.81994027]NEWLINE cD_expect = [-1.33068221, -2.78795192, -3.16825651, -0.67715519,NEWLINE -0.09722957, -0.07045258]NEWLINE assert_allclose(cA, cA_expect)NEWLINE assert_allclose(cD, cD_expect)NEWLINENEWLINENEWLINEdef test_dwt_coeff_len():NEWLINE x = np.array([3, 7, 1, 1, -2, 5, 4, 6])NEWLINE w = pywt.Wavelet('sym3')NEWLINE ln = pywt.dwt_coeff_len(data_len=len(x), filter_len=w.dec_len, mode='sym')NEWLINE assert_(ln == 6)NEWLINE ln_modes = [pywt.dwt_coeff_len(len(x), w.dec_len, mode) for mode inNEWLINE pywt.MODES.modes]NEWLINE assert_allclose(ln_modes, [6, 6, 6, 6, 6, 4])NEWLINENEWLINENEWLINEdef test_idwt_none_input():NEWLINE # None input equals arrays of zeros of the right lengthNEWLINE res1 = pywt.idwt([1,2,0,1], None, 'db2', 'sym')NEWLINE res2 = pywt.idwt([1, 2, 0, 1], [0, 0, 0, 0], 'db2', 'sym')NEWLINE assert_allclose(res1, res2, rtol=1e-15, atol=1e-15)NEWLINENEWLINE res1 = pywt.idwt(None, [1, 2, 0, 1], 'db2', 'sym')NEWLINE res2 = pywt.idwt([0, 0, 0, 0], [1, 2, 0, 1], 'db2', 'sym')NEWLINE assert_allclose(res1, res2, rtol=1e-15, atol=1e-15)NEWLINENEWLINE # Only one argument at a time can be NoneNEWLINE assert_raises(ValueError, pywt.idwt, None, None, 'db2', 'sym')NEWLINENEWLINENEWLINEdef test_idwt_correct_size_kw():NEWLINE res = pywt.idwt([1, 2, 3, 4, 5], [1, 2, 3, 4], 'db2', 'sym',NEWLINE correct_size=True)NEWLINE expected = [1.76776695, 0.61237244, 3.18198052, 0.61237244, 4.59619408,NEWLINE 0.61237244]NEWLINE assert_allclose(res, expected)NEWLINENEWLINE assert_raises(ValueError, pywt.idwt,NEWLINE [1, 2, 3, 4, 5], [1, 2, 3, 4], 'db2', 'sym')NEWLINE assert_raises(ValueError, pywt.idwt, [1, 2, 3, 4], [1, 2, 3, 4, 5], 'db2',NEWLINE 'sym', correct_size=True)NEWLINENEWLINENEWLINEdef test_idwt_invalid_input():NEWLINE # Too short, min length is 4 for 'db4':NEWLINE assert_raises(ValueError, pywt.idwt, [1,2,4], [4,1,3], 'db4', 'sym')NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE run_module_suite()NEWLINE |
#!/usr/bin/env pythonNEWLINENEWLINE# Copyright (c) 2015 Google Inc. All rights reserved.NEWLINE# Use of this source code is governed by a BSD-style license that can beNEWLINE# found in the LICENSE file.NEWLINENEWLINE"""NEWLINEMake sure that we cause downstream modules to get built when we depend on theNEWLINEparent targets.NEWLINE"""NEWLINENEWLINEimport TestGypNEWLINENEWLINEtest = TestGyp.TestGyp()NEWLINENEWLINECHDIR = 'module-dep'NEWLINEtest.run_gyp('indirect-module-dependency.gyp', chdir=CHDIR)NEWLINEtest.build('indirect-module-dependency.gyp', 'an_exe', chdir=CHDIR)NEWLINEtest.built_file_must_exist(NEWLINE test.built_file_basename('a_module', test.LOADABLE_MODULE), chdir=CHDIR)NEWLINENEWLINEtest.pass_test()NEWLINE |
genero = ''NEWLINEwhile genero != 'F' and genero != 'M':NEWLINE genero = str(input('Gênero [M/F]: ')).upper()NEWLINE if genero == 'F':NEWLINE print('Seu gênero é FEMININO!')NEWLINE if genero == 'M':NEWLINE print('Seu gênero é MASCULINO!')NEWLINEprint('FIM') |
#!/usr/bin/env python2NEWLINEimport numpy as npNEWLINEimport path_parserNEWLINEfrom mpl_toolkits.mplot3d import Axes3DNEWLINEimport matplotlib.pyplot as pltNEWLINEfrom matplotlib import cmNEWLINEfrom matplotlib.ticker import LinearLocator, FormatStrFormatterNEWLINEfrom scipy.spatial import KDTreeNEWLINE#ruta='sample_map_origin_map.txt'NEWLINEruta='Trayectoria3.txt'NEWLINENEWLINEdef main():NEWLINE arr_in=np.array(list(path_parser.read_points(ruta)))NEWLINE print arr_inNEWLINEif __name__ == '__main__':NEWLINE main() |
#!C:\Users\sara8\PycharmProjects\card_test\venv\Scripts\python.exeNEWLINE# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'NEWLINE__requires__ = 'pip==19.0.3'NEWLINEimport reNEWLINEimport sysNEWLINEfrom pkg_resources import load_entry_pointNEWLINENEWLINEif __name__ == '__main__':NEWLINE sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])NEWLINE sys.exit(NEWLINE load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()NEWLINE )NEWLINE |
import jsonNEWLINEfrom controller.client import ClientNEWLINENEWLINENEWLINEdef offerview(user, offer, taking):NEWLINE isvalid = _offer(user, offer, taking)NEWLINE if isvalid:NEWLINE print('A troca foi anunciada')NEWLINE return isvalidNEWLINE else:NEWLINE print('Lamentamos, mas não alguma coisa não está correta (quantidade insuficente ou ID incorreto)')NEWLINE return NoneNEWLINENEWLINENEWLINEdef _offer(user, offer, taking):NEWLINE client = Client()NEWLINE response = client.createTrade(idUser=user.idUser, offer=offer, taking=taking)NEWLINE isvalid = response.responseNEWLINENEWLINE return isvalidNEWLINE |