text
stringlengths
28
881k
NEWLINEfrom .client import NetworkTableClientNEWLINEfrom .server import NetworkTableServerNEWLINEfrom .socketstream import SocketStreamFactory, SocketServerStreamProviderNEWLINEfrom .type import BooleanArray, NumberArray, StringArray, DefaultEntryTypesNEWLINE
# -*- coding: utf-8 -*-NEWLINENEWLINE'''NEWLINE**Wavelet Based in CUSUM control chart for filtering signals Project (module**NEWLINE``statsWaveletFilt.miscellaneous`` **):** A Miscellaneous of functions forNEWLINEwork with data and show wavelet coefficientsNEWLINENEWLINE*Created by Tiarles Guterres, 2018*NEWLINE'''NEWLINENEWLINEdef showWaveletCoeff(coefficients, filename='tmp', format='pdf',NEWLINE threshold_value=0, color='black', color_threshold='black',NEWLINE figsize=(7, 8), title=''):NEWLINE '''NEWLINE Show and save the wavelet and scale coefficients in a plot.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE coeff: list of numpy.array'sNEWLINE With in '0' position the scale coefficients. Equal to theNEWLINE ``pywt.wavedec()`` return.NEWLINE filename: stringNEWLINE Optional, is 'tmp' by default. This is the first part of theNEWLINE name of the figure.NEWLINE format: stringNEWLINE Optional, is 'pdf' by default. This is the last part of the name of theNEWLINE figure. Can be 'png', 'ps', 'eps' and 'svg' too.NEWLINENEWLINE threshold_value: int, float or list.NEWLINE Optional, is 0 by default, this means that bothing new happens.NEWLINE Otherwise, a line in threshold value will be plotted in all waveletNEWLINE coefficients plots. This value can be a list too, but they was to beNEWLINE the same size of wavelet coefficients (without the scale coefficient).NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE void:NEWLINE Nothing is returned, the plots is show and save.NEWLINENEWLINE See alsoNEWLINE --------NEWLINE pywt.wavedec: Function that decomposes the signal in wavelet andNEWLINE scale coefficientsNEWLINE pywt.waverec: Function that recomposes the signal from wavelet andNEWLINE scale coefficientsNEWLINENEWLINE filtration.filtration: Function that use this function to filter viaNEWLINE wavelet coefficientsNEWLINENEWLINE filtration.filtrationCusum: Function that use Cumulative Sum Control ChartNEWLINE and some variation for filter wavelet coefficients.NEWLINE '''NEWLINENEWLINE import numpy as npNEWLINE import matplotlib.pyplot as pltNEWLINENEWLINE if isinstance(threshold_value, (int, float, np.float64, np.int32,NEWLINE np.int64)):NEWLINE threshold_list = [threshold_value]*len(coefficients)NEWLINE else:NEWLINE threshold_list = [0] + list(threshold_value)NEWLINENEWLINE N = len(coefficients) - 1NEWLINENEWLINE fig, ax = plt.subplots(len(coefficients), 1, figsize=figsize)NEWLINENEWLINE ax[0].set_title(title)NEWLINENEWLINE # Scale CoefficientsNEWLINE ax[0].plot(coefficients[0], color=color, label='$c_0$ ($c_%d$)' % N)NEWLINE ax[0].legend(loc=1)NEWLINE ax[0].grid()NEWLINENEWLINE # Wavelet CoefficientsNEWLINENEWLINE for i in range(1, len(coefficients)):NEWLINE ax[i].plot(coefficients[i], color=color,NEWLINE label='$d_%d$ ($d_%d$)' % (i - 1, N - i + 1))NEWLINE if threshold_list[i] != 0:NEWLINE x_min, x_max = ax[i].get_xlim()NEWLINE ax[i].hlines(threshold_list[i], x_min, x_max,NEWLINE colors=color_threshold, linestyles='dashed')NEWLINENEWLINE ax[i].hlines(-threshold_list[i], x_min, x_max,NEWLINE colors=color_threshold, linestyles='dashed',NEWLINE label='$\\lambda$')NEWLINE ax[i].legend(loc=1)NEWLINE ax[i].grid()NEWLINENEWLINE plt.tight_layout()NEWLINE plt.savefig('%s' % filename+'.'+format)NEWLINE plt.show()NEWLINENEWLINE returnNEWLINENEWLINENEWLINEdef normalizeData(data, min=0, max=1):NEWLINE '''NEWLINE Its almost a map function. This function normalize the data between aNEWLINE min and max values.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINENEWLINE data: list or array-likeNEWLINE The values that desire normalize.NEWLINE min: int or floatNEWLINE Optional, is -1 by default. The min value correspond, in the end,NEWLINE of the min value of data.NEWLINE max: int or floatNEWLINE Optional, is 1 by default. The max value correspond, in the end,NEWLINE of the max value of data.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE numpy.array:NEWLINE The data normalized between min and max values.NEWLINENEWLINE '''NEWLINENEWLINE import numpy as npNEWLINENEWLINE data = np.array(data)NEWLINENEWLINE new_data = data.copy()NEWLINE max_value = data.max()NEWLINE min_value = data.min()NEWLINENEWLINE diff_pp = max_value - min_valueNEWLINE diff_new_pp = max - minNEWLINENEWLINE new_data = new_data - min_valueNEWLINE new_data = new_data / diff_ppNEWLINENEWLINE new_data = new_data * diff_new_ppNEWLINE new_data = new_data + minNEWLINENEWLINE return new_dataNEWLINENEWLINENEWLINEdef generateData(functions=['doppler', 'block', 'bump', 'heavsine'],NEWLINE varNoises=[0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007,NEWLINE 0.008, 0.009, 0.010],NEWLINE dim_signals=1024,NEWLINE n_samples_per_sig_per_noise=10000, folder='tmp'):NEWLINE '''NEWLINE If you like to generate your dataset before run your test you can useNEWLINE this function to generate the data. With the 1) type of signal andNEWLINE 2) quantity of noise (in variance). Saves in ``.npy``NEWLINE '''NEWLINENEWLINE from statsWaveletFilt.signals import bumpFunction, blockFunctionNEWLINE from statsWaveletFilt.signals import dopplerFunction, heavsineFunctionNEWLINE import numpy as npNEWLINE import osNEWLINENEWLINE try:NEWLINE os.mkdir(folder)NEWLINE print('try: ', folder)NEWLINE except FileExistsError:NEWLINE passNEWLINENEWLINE n_it = n_samples_per_sig_per_noiseNEWLINENEWLINE functions_dic = {'doppler': dopplerFunction,NEWLINE 'block': blockFunction,NEWLINE 'bump': bumpFunction,NEWLINE 'heavsine': heavsineFunction}NEWLINENEWLINE functions_dic_used = {function: functions_dic[function]NEWLINE for function in functions}NEWLINENEWLINE for name, function in functions_dic_used.items():NEWLINE x, y = function(dim_signals)NEWLINE print('|----', name)NEWLINE NEWLINE try:NEWLINE os.mkdir(folder+'/'+name)NEWLINE except FileExistsError:NEWLINE passNEWLINE NEWLINE for varNoise in varNoises:NEWLINE counter = 0NEWLINE print('|----|----', varNoise)NEWLINE while counter < n_it:NEWLINE np.random.seed(counter)NEWLINE noise = np.random.normal(0, np.sqrt(varNoise), dim_signals)NEWLINENEWLINE sinalNoisy = y + noiseNEWLINENEWLINE filename = './%s/%s/%f_%d.npy' % (folder, name, varNoise,NEWLINE counter)NEWLINE NEWLINE np.save(filename, sinalNoisy)NEWLINE counter += 1NEWLINE
#!/usr/bin/env pythonNEWLINE# coding=utf-8NEWLINENEWLINEimport socketNEWLINENEWLINEfrom urllib.parse import urlparseNEWLINEfrom http.server import HTTPServer, BaseHTTPRequestHandlerNEWLINENEWLINENEWLINEclass ProxyHandler(BaseException):NEWLINE """NEWLINE 参考链接:NEWLINE https://zhuanlan.zhihu.com/p/28737960NEWLINE https://docs.python.org/3/library/http.server.htmlNEWLINE """NEWLINE def _recv_proxy_data(self, socket_client: socket.socket):NEWLINE data = b''NEWLINE while True:NEWLINE recv = socket_client.recv(1024)NEWLINE if recv:NEWLINE data += recvNEWLINE else:NEWLINE breakNEWLINE socket_client.close()NEWLINE return dataNEWLINENEWLINE def do_GET(self):NEWLINE uri = urlparse(self.path)NEWLINE scheme, host, path = uri.scheme, uri.netloc, uri.pathNEWLINE host_id = socket.gethostbyname(host)NEWLINE port = 443 if scheme == 'https' else 80NEWLINENEWLINE data = 'GET {} {}\r\n'.format(path, self.protocol_version)NEWLINE for k, v in self.headers.items():NEWLINE data += '{}: {}\r\n'.format(k, v)NEWLINE data += '\r\n'NEWLINENEWLINE with open('./res.txt', 'a') as fp:NEWLINE fp.write(data)NEWLINE socket_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)NEWLINE socket_client.connect((host, port))NEWLINE socket_client.sendall(data.encode('utf-8'))NEWLINE recv_res_data = self._recv_proxy_data(socket_client)NEWLINE self.wfile.write(recv_res_data)NEWLINENEWLINENEWLINEdef main():NEWLINE try:NEWLINE server = HTTPServer(('', 6789), ProxyHandler)NEWLINE server.serve_forever()NEWLINE except KeyboardInterrupt as e:NEWLINE server.socket.close()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE
#===============================================================================NEWLINE#NEWLINE# FILE: parse_log_3.pyNEWLINE#NEWLINE# USAGE:NEWLINE#NEWLINE# DESCRIPTION:NEWLINE#NEWLINE# OPTIONS: NEWLINE# REQUIREMENTS: NEWLINE# BUGS: NEWLINE# NOTES: NEWLINE# AUTHOR: Debjit PalNEWLINE# CONTACT: [email protected]# ORGANIZATION: ECE, Cornell UniversityNEWLINE# VERSION: NEWLINE# CREATED: 22-11-2019NEWLINE# REVISION: NEWLINE# LMODIFIED: Fri 22 Nov 2019 11:49:26 PM ESTNEWLINE#===============================================================================NEWLINENEWLINEimport os, sysNEWLINEfrom shutil import copyfile as cpfNEWLINENEWLINEgl_dir = os.environ.get('GRAPHLEARN')NEWLINESOURCE_PATH = gl_dir + '/data_app_source_bit'NEWLINEDEST_PATH = gl_dir + '/num_source'NEWLINENEWLINE#SOURCE_PATH = 'path..../source_data/data_app_source_bit'NEWLINE#DEST_PATH = 'path..../source_data/num_source'NEWLINENEWLINEfiles = [f for f in os.listdir(SOURCE_PATH) if f.endswith('.edgelist')]NEWLINEfiles_sorted = sorted(files)NEWLINEdel filesNEWLINENEWLINEcounter = 0NEWLINENEWLINEmh = open('nfile_efile_glearn.map', 'w')NEWLINEfor file_ in files_sorted:NEWLINE name = file_[:file_.find('.')]NEWLINE cpf_name = str(counter)NEWLINE cpf(SOURCE_PATH + '/' + name + '.edgelist', DEST_PATH + '/' + cpf_name + '.edgelist')NEWLINE cpf(SOURCE_PATH + '/' + name + '.nodelist', DEST_PATH + '/' + cpf_name + '.nodelist')NEWLINE mh.write(name + '\t' + cpf_name + '\n')NEWLINE counter = counter + 1NEWLINENEWLINEmh.close()NEWLINE
NEWLINE#-*-coding:utf-8-*- 设置utf-8编码NEWLINEprint "Hello World!"NEWLINEprint "Hello Again"NEWLINEprint "I like typing this."NEWLINEprint "This is fun."NEWLINEprint "Yay! Printing."NEWLINEprint "I'd much rather you 'not "NEWLINEprint 'I "said" do not touch this.'NEWLINE#print "我爱你们!"NEWLINE# A comment , this is so you can read your program later.NEWLINE# Anything after the # is ignored by pythonNEWLINEprint "I could have code like this." # and the comment after is ignored NEWLINE# You can also use a comment to "disable" or comment out a piece of code;NEWLINENEWLINE# print "This won't run"NEWLINENEWLINEprint "This will run."NEWLINE
from random import randintNEWLINEfrom tkinter import *NEWLINEfrom tkinter import ttkNEWLINENEWLINENEWLINEclass Node:NEWLINE def __init__(self, x, y, aValue):NEWLINE self.x = xNEWLINE self.y = yNEWLINE self.leftNode = 0NEWLINE self.bottomNode = 0NEWLINE self.rightNode = 0NEWLINE self.topNode = 0NEWLINE self.aValue = aValueNEWLINENEWLINENEWLINEclass AObject:NEWLINE def __init__(self, finder, start, pokemon, tablero):NEWLINE self.openQ = []NEWLINE self.closeQ = []NEWLINE self.rightWay = []NEWLINE self.steps = []NEWLINENEWLINE def insertStep(node):NEWLINE if not self.rightWay:NEWLINE print('primer paso')NEWLINE self.rightWay.append(node)NEWLINE # print(self.rightWay, node, self.rightWay[0].rightNode)NEWLINENEWLINE else:NEWLINE print('entre')NEWLINE for i in self.rightWay:NEWLINE print('right', node.x, i.rightNode.x, node.y, i.rightNode.y)NEWLINE print('left', node.x, i.leftNode.x, node.y, i.leftNode.y)NEWLINE print('top', node.x, i.topNode.x, node.y, i.topNode.y)NEWLINE print('bottom', node.x, i.bottomNode.x,NEWLINE node.y, i.bottomNode.y)NEWLINE if i.rightNode != 0:NEWLINE if (node.x == i.rightNode.x and node.y == i.rightNode.y):NEWLINE self.rightWay = self.rightWay[0: self.rightWay.index(NEWLINE i) + 1]NEWLINE breakNEWLINE if i.leftNode != 0:NEWLINE if (node.x == i.leftNode.x and node.y == i.leftNode.y):NEWLINE self.rightWay = self.rightWay[0: self.rightWay.index(NEWLINE i) + 1]NEWLINE breakNEWLINE if i.topNode != 0:NEWLINE if (node.x == i.topNode.x and node.y == i.topNode.y):NEWLINE self.rightWay = self.rightWay[0: self.rightWay.index(NEWLINE i) + 1]NEWLINE breakNEWLINE if i.bottomNode != 0:NEWLINE if (node.x == i.bottomNode.x and node.y == i.bottomNode.y):NEWLINE self.rightWay = self.rightWay[0: self.rightWay.index(NEWLINE i) + 1]NEWLINE breakNEWLINENEWLINE def insertClose(node):NEWLINE if self.openQ:NEWLINE for i in self.openQ:NEWLINE if node.x == i.x and node.y == i.y:NEWLINE self.openQ.remove(i)NEWLINE breakNEWLINE if self.closeQ:NEWLINE for i in self.closeQ:NEWLINE if node.aValue <= i.aValue:NEWLINE self.closeQ.insert(self.closeQ.index(i), node)NEWLINE breakNEWLINE if node.aValue > self.closeQ[-1].aValue:NEWLINE self.closeQ.append(node)NEWLINE else:NEWLINE self.closeQ.append(node)NEWLINENEWLINE def insertOpen(node):NEWLINE # print('Agregando nodo')NEWLINE if self.closeQ:NEWLINE for i in self.closeQ:NEWLINE if node.x == i.x and node.y == i.y:NEWLINE returnNEWLINE if self.openQ:NEWLINE for i in self.openQ:NEWLINE # print('buscando lugar para el nodo')NEWLINE if node.aValue <= i.aValue:NEWLINE self.openQ.insert(self.openQ.index(i), node)NEWLINE # print('nodo agregado')NEWLINE breakNEWLINE if node.aValue > self.openQ[-1].aValue:NEWLINE self.openQ.append(node)NEWLINE # print('nodo agregado')NEWLINE else:NEWLINE self.openQ.append(node)NEWLINE # print('primer nodo agregado')NEWLINENEWLINE def findWay(goal):NEWLINE self.rightWay = []NEWLINENEWLINE def wayWithoutObstacle(finder):NEWLINE obstacles = {}NEWLINE if finder.x > 0:NEWLINE if (tablero[finder.y][finder.x - 1].name != 'Rock') and (tablero[finder.y][finder.x - 1].name != 'Van'):NEWLINE obstacles['left'] = (True)NEWLINE else:NEWLINE obstacles['left'] = (False)NEWLINE else:NEWLINE obstacles['left'] = (False)NEWLINE if finder.x < 9:NEWLINE if (tablero[finder.y][finder.x + 1].name != 'Rock') and (tablero[finder.y][finder.x + 1].name != 'Van'):NEWLINE obstacles['right'] = (True)NEWLINE else:NEWLINE obstacles['right'] = (False)NEWLINE else:NEWLINE obstacles['right'] = (False)NEWLINE if finder.y > 0:NEWLINE if (tablero[finder.y - 1][finder.x].name != 'Rock') and (tablero[finder.y - 1][finder.x].name != 'Van'):NEWLINE obstacles['up'] = (True)NEWLINE else:NEWLINE obstacles['up'] = (False)NEWLINE else:NEWLINE obstacles['up'] = (False)NEWLINE if finder.y < 9:NEWLINE if (tablero[finder.y + 1][finder.x].name != 'Rock') and (tablero[finder.y + 1][finder.x].name != 'Van'):NEWLINE obstacles['down'] = (True)NEWLINE else:NEWLINE obstacles['down'] = (False)NEWLINE else:NEWLINE obstacles['down'] = (False)NEWLINE return obstaclesNEWLINENEWLINE def manhatan(startX, startY, goal):NEWLINE return abs(startX - goal.x) + abs(startY - goal.y)NEWLINE g_n_ = manhatan(finder.x, finder.y, start)NEWLINE h_n_ = manhatan(finder.x, finder.y, goal)NEWLINE currentTrainer = Trainer(finder.y, finder.x)NEWLINE while True:NEWLINE a = input()NEWLINE print('Pokemon', goal.x, goal.y)NEWLINE if self.openQ:NEWLINE currentTrainer = Trainer(self.openQ[0].y, self.openQ[0].x)NEWLINE g_n_ = manhatan(currentTrainer.x, currentTrainer.y, start)NEWLINE h_n_ = manhatan(currentTrainer.x, currentTrainer.y, goal)NEWLINENEWLINE print('Pokebola', currentTrainer.x, currentTrainer.y)NEWLINE currentNode = Node(NEWLINE currentTrainer.x, currentTrainer.y, g_n_ + h_n_)NEWLINE obstacles = wayWithoutObstacle(currentTrainer)NEWLINE print(obstacles)NEWLINE insertClose(currentNode)NEWLINE # for k in self.closeQ:NEWLINE # print('Cola cerrada', '[', k.x, k.y, k.aValue, ']')NEWLINENEWLINE if obstacles['left']:NEWLINE # print('izq')NEWLINE g_n_ = manhatan(currentTrainer.x - 1,NEWLINE currentTrainer.y, start)NEWLINE h_n_ = manhatan(currentTrainer.x - 1,NEWLINE currentTrainer.y, goal)NEWLINE insertOpen(Node(currentTrainer.x - 1,NEWLINE currentTrainer.y, g_n_ + h_n_))NEWLINE currentNode.leftNode = Node(NEWLINE currentTrainer.x - 1, currentTrainer.y, g_n_ + h_n_)NEWLINE if obstacles['right']:NEWLINE # print('der')NEWLINE g_n_ = manhatan(currentTrainer.x + 1,NEWLINE currentTrainer.y, start)NEWLINE h_n_ = manhatan(currentTrainer.x + 1,NEWLINE currentTrainer.y, goal)NEWLINE insertOpen(Node(currentTrainer.x + 1,NEWLINE currentTrainer.y, g_n_ + h_n_))NEWLINE currentNode.rightNode = Node(NEWLINE currentTrainer.x - 1, currentTrainer.y, g_n_ + h_n_)NEWLINE if obstacles['up']:NEWLINE # print('arriba')NEWLINE g_n_ = manhatan(currentTrainer.x,NEWLINE currentTrainer.y - 1, start)NEWLINE h_n_ = manhatan(currentTrainer.x,NEWLINE currentTrainer.y - 1, goal)NEWLINE insertOpen(NEWLINE Node(currentTrainer.x, currentTrainer.y - 1, g_n_ + h_n_))NEWLINE currentNode.topNode = Node(NEWLINE currentTrainer.x - 1, currentTrainer.y, g_n_ + h_n_)NEWLINE if obstacles['down']:NEWLINE # print('abajo')NEWLINE g_n_ = manhatan(currentTrainer.x,NEWLINE currentTrainer.y + 1, start)NEWLINE h_n_ = manhatan(currentTrainer.x,NEWLINE currentTrainer.y + 1, goal)NEWLINE insertOpen(NEWLINE Node(currentTrainer.x, currentTrainer.y + 1, g_n_ + h_n_))NEWLINE currentNode.bottomNode = Node(NEWLINE currentTrainer.x - 1, currentTrainer.y, g_n_ + h_n_)NEWLINENEWLINE insertStep(currentNode)NEWLINENEWLINE # for k in self.openQ:NEWLINE # print('Cola abierta', '[', k.x, k.y, k.aValue, ']')NEWLINENEWLINE if currentTrainer.x == goal.x and currentTrainer.y == goal.y:NEWLINE for k in self.rightWay:NEWLINE print('Paso', '[', k.x, k.y, ']')NEWLINE return self.rightWayNEWLINENEWLINE self.steps.append(findWay(pokemon[0]))NEWLINENEWLINENEWLINEclass Pokemon:NEWLINE def __init__(self, i, j, pokemonId, container):NEWLINE self.name = 'Pokemon'NEWLINE self.pokemonId = pokemonIdNEWLINE self.image = PhotoImage(file='images/' + str(pokemonId) + '.png')NEWLINE self.y = iNEWLINE self.x = jNEWLINE self.label = Label(NEWLINE container,NEWLINE height='64',NEWLINE width='64',NEWLINE borderwidth='2',NEWLINE image=self.imageNEWLINE )NEWLINENEWLINENEWLINEclass Grass:NEWLINE def __init__(self, i, j, container):NEWLINE self.name = 'Grass'NEWLINE self.image = PhotoImage(file='images/grass.png')NEWLINE self.y = iNEWLINE self.x = jNEWLINE self.label = Label(NEWLINE container,NEWLINE height='64',NEWLINE width='64',NEWLINE borderwidth='2',NEWLINE image=self.imageNEWLINE )NEWLINENEWLINENEWLINEclass Rock:NEWLINE def __init__(self, i, j, container):NEWLINE self.name = 'Rock'NEWLINE self.image = PhotoImage(file='images/rock.png')NEWLINE self.y = iNEWLINE self.x = jNEWLINE self.label = Label(NEWLINE container,NEWLINE height='64',NEWLINE width='64',NEWLINE borderwidth='2',NEWLINE image=self.imageNEWLINE )NEWLINENEWLINENEWLINEclass Bean:NEWLINE def __init__(self, i, j, container):NEWLINE self.name = 'Bean'NEWLINE self.image = PhotoImage(file='images/jelly-beans.png')NEWLINE self.y = iNEWLINE self.x = jNEWLINE self.label = Label(NEWLINE container,NEWLINE height='64',NEWLINE width='64',NEWLINE borderwidth='2',NEWLINE image=self.imageNEWLINE )NEWLINENEWLINENEWLINEclass Trainer:NEWLINE def __init__(self, i, j, container=False, pokeball=False):NEWLINE self.name = 'Trainer'NEWLINE self.y = iNEWLINE self.x = jNEWLINE self.back = FalseNEWLINE if container:NEWLINE self.image = PhotoImage(file='images/' + pokeball + '.png')NEWLINE self.label = Label(NEWLINE container,NEWLINE height='64',NEWLINE width='64',NEWLINE borderwidth='2',NEWLINE image=self.imageNEWLINE )NEWLINENEWLINENEWLINEclass Van:NEWLINE def __init__(self, i, j, container):NEWLINE self.name = 'Van'NEWLINE self.image = PhotoImage(file='images/van.png')NEWLINE self.y = iNEWLINE self.x = jNEWLINE self.label = Label(NEWLINE container,NEWLINE height='64',NEWLINE width='64',NEWLINE borderwidth='2',NEWLINE image=self.imageNEWLINE )NEWLINENEWLINENEWLINEclass Tablero:NEWLINE def __init__(self, size):NEWLINE self.window = Tk()NEWLINE self.window.title('Pokemon Finder')NEWLINE self.size = sizeNEWLINE self.tablero = []NEWLINE self.pokemonArray = []NEWLINE self.trainer = Trainer(randint(0, self.size), randint(NEWLINE 0, self.size), self.window, 'pokeball2')NEWLINENEWLINE for i in range(10):NEWLINE self.tablero.append([])NEWLINE for j in range(10):NEWLINE if ((j == self.trainer.x) & (i == self.trainer.y - 1)):NEWLINE self.van = Van(i, j, self.window)NEWLINE self.tablero[i].append(self.van)NEWLINE elif randint(0, 6) == 1:NEWLINE pokemon = Pokemon(i, j, randint(1, 19), self.window)NEWLINE self.pokemonArray.append(pokemon)NEWLINE self.tablero[i].append(pokemon)NEWLINE elif randint(0, 6) == 1:NEWLINE rock = Rock(i, j, self.window)NEWLINE self.tablero[i].append(rock)NEWLINE else:NEWLINE grass = Grass(i, j, self.window)NEWLINE self.tablero[i].append(grass)NEWLINENEWLINE for i in range(10):NEWLINE for j in range(10):NEWLINE self.tablero[i][j].label.grid(NEWLINE column=self.tablero[i][j].x, row=self.tablero[i][j].y)NEWLINENEWLINE self.window.after(500, self.findPokemon)NEWLINE self.window.mainloop()NEWLINENEWLINE def findPokemon(self):NEWLINENEWLINE def Move(trainer):NEWLINE def rightMove(leaveBean=False):NEWLINE if leaveBean:NEWLINE # self.tablero[trainer.y][trainer.x] = Bean(trainer.y, trainer.y, self.window)NEWLINE self.tablero[trainer.y][trainer.x + 1] = Trainer(NEWLINE trainer.y, trainer.x + 1, self.window, 'pokeball1')NEWLINE else:NEWLINE self.tablero[trainer.y][trainer.x + 1] = Trainer(NEWLINE trainer.y, trainer.x + 1, self.window, 'pokeball2')NEWLINENEWLINE self.tablero[trainer.y][trainer.x] = Grass(NEWLINE trainer.y, trainer.x, self.window)NEWLINE self.tablero[trainer.y][trainer.x].label.grid(NEWLINE column=trainer.x, row=trainer.y)NEWLINE self.tablero[trainer.y][trainer.x +NEWLINE 1].label.grid(column=trainer.x + 1, row=trainer.y)NEWLINE trainer.x += 1NEWLINENEWLINE def leftMove(leaveBean=False):NEWLINE if leaveBean:NEWLINE # self.tablero[trainer.y][trainer.x] = Bean(trainer.y, trainer.y, self.window)NEWLINE self.tablero[trainer.y][trainer.x - 1] = Trainer(NEWLINE trainer.y, trainer.x - 1, self.window, 'pokeball1')NEWLINE else:NEWLINE self.tablero[trainer.y][trainer.x - 1] = Trainer(NEWLINE trainer.y, trainer.x - 1, self.window, 'pokeball2')NEWLINENEWLINE self.tablero[trainer.y][trainer.x] = Grass(NEWLINE trainer.y, trainer.x, self.window)NEWLINE self.tablero[trainer.y][trainer.x].label.grid(NEWLINE column=trainer.x, row=trainer.y)NEWLINE self.tablero[trainer.y][trainer.x -NEWLINE 1].label.grid(column=trainer.x - 1, row=trainer.y)NEWLINE trainer.x -= 1NEWLINENEWLINE def downMove(leaveBean=False):NEWLINE if leaveBean:NEWLINE # self.tablero[trainer.y][trainer.x] = Bean(trainer.y, trainer.y, self.window)NEWLINE self.tablero[trainer.y + 1][trainer.x] = Trainer(NEWLINE trainer.y + 1, trainer.x, self.window, 'pokeball1')NEWLINE else:NEWLINE self.tablero[trainer.y + 1][trainer.x] = Trainer(NEWLINE trainer.y + 1, trainer.x, self.window, 'pokeball2')NEWLINENEWLINE self.tablero[trainer.y][trainer.x] = Grass(NEWLINE trainer.y, trainer.x, self.window)NEWLINE self.tablero[trainer.y][trainer.x].label.grid(NEWLINE column=trainer.x, row=trainer.y)NEWLINE self.tablero[trainer.y +NEWLINE 1][trainer.x].label.grid(column=trainer.x, row=trainer.y + 1)NEWLINE trainer.y += 1NEWLINENEWLINE def upMove(leaveBean=False):NEWLINE if leaveBean:NEWLINE # self.tablero[trainer.y][trainer.x] = Bean(trainer.y, trainer.y, self.window)NEWLINE self.tablero[trainer.y - 1][trainer.x] = Trainer(NEWLINE trainer.y - 1, trainer.x, self.window, 'pokeball1')NEWLINE else:NEWLINE self.tablero[trainer.y - 1][trainer.x] = Trainer(NEWLINE trainer.y - 1, trainer.x, self.window, 'pokeball2')NEWLINENEWLINE self.tablero[trainer.y][trainer.x] = Grass(NEWLINE trainer.y, trainer.x, self.window)NEWLINE self.tablero[trainer.y][trainer.x].label.grid(NEWLINE column=trainer.x, row=trainer.y)NEWLINE self.tablero[trainer.y -NEWLINE 1][trainer.x].label.grid(column=trainer.x, row=trainer.y - 1)NEWLINE trainer.y -= 1NEWLINENEWLINE def isPokemonClose():NEWLINE if trainer.x < self.size - 1 and self.tablero[trainer.y][trainer.x+1].name == 'Pokemon':NEWLINE return 'right'NEWLINE elif trainer.x > 0 and self.tablero[trainer.y][trainer.x-1].name == 'Pokemon':NEWLINE return 'left'NEWLINE elif trainer.y < self.size - 1 and self.tablero[trainer.y + 1][trainer.x].name == 'Pokemon':NEWLINE return 'down'NEWLINE elif trainer.y > 0 and self.tablero[trainer.y - 1][trainer.x].name == 'Pokemon':NEWLINE return 'up'NEWLINENEWLINE def wayWithoutObstacle():NEWLINE obstacles = {}NEWLINE if trainer.x > 0:NEWLINE if (self.tablero[trainer.y][trainer.x - 1].name != 'Rock') and (self.tablero[trainer.y][trainer.x - 1].name != 'Van'):NEWLINE obstacles['left'] = (True)NEWLINE else:NEWLINE obstacles['left'] = (False)NEWLINE else:NEWLINE obstacles['left'] = (False)NEWLINE if trainer.x < self.size - 1:NEWLINE if (self.tablero[trainer.y][trainer.x + 1].name != 'Rock') and (self.tablero[trainer.y][trainer.x + 1].name != 'Van'):NEWLINE obstacles['right'] = (True)NEWLINE else:NEWLINE obstacles['right'] = (False)NEWLINE else:NEWLINE obstacles['right'] = (False)NEWLINE if trainer.y > 0:NEWLINE if (self.tablero[trainer.y - 1][trainer.x].name != 'Rock') and (self.tablero[trainer.y - 1][trainer.x].name != 'Van'):NEWLINE obstacles['up'] = (True)NEWLINE else:NEWLINE obstacles['up'] = (False)NEWLINE else:NEWLINE obstacles['up'] = (False)NEWLINE if trainer.y < self.size - 1:NEWLINE if (self.tablero[trainer.y + 1][trainer.x].name != 'Rock') and (self.tablero[trainer.y + 1][trainer.x].name != 'Van'):NEWLINE obstacles['down'] = (True)NEWLINE else:NEWLINE obstacles['down'] = (False)NEWLINE else:NEWLINE obstacles['down'] = (False)NEWLINE return obstaclesNEWLINENEWLINE def chooseWay(obstacles):NEWLINE choose = randint(0, 3)NEWLINE if choose == 0 and obstacles['left']:NEWLINE return 'left'NEWLINE elif choose == 1 and obstacles['right']:NEWLINE return 'right'NEWLINE elif choose == 2 and obstacles['up']:NEWLINE return 'up'NEWLINE elif choose == 3 and obstacles['down']:NEWLINE return 'down'NEWLINE else:NEWLINE return chooseWay(obstacles)NEWLINENEWLINE def backToVan():NEWLINENEWLINE def chooseBackWay():NEWLINE min = abs(trainer.x + 1 - self.van.x) + \NEWLINE abs(trainer.y - self.van.y)NEWLINE if (abs(trainer.x - 1 - self.van.x) + abs(trainer.y - self.van.y) < min) and wayWithoutObstacle()['left'] and isPokemonClose() != 'left':NEWLINE return 'left'NEWLINE elif (abs(trainer.x - self.van.x) + abs(trainer.y + 1 - self.van.y) < min) and wayWithoutObstacle()['down'] and isPokemonClose() != 'down':NEWLINE return 'down'NEWLINE elif (abs(trainer.x - self.van.x) + abs(trainer.y - 1 - self.van.y) < min) and wayWithoutObstacle()['up'] and isPokemonClose() != 'up':NEWLINE return 'up'NEWLINE elif wayWithoutObstacle()['right'] and isPokemonClose() != 'right':NEWLINE return 'right'NEWLINE else:NEWLINE NoneNEWLINENEWLINE def isVanClose():NEWLINE if self.trainer.x < self.size - 1:NEWLINE if self.tablero[trainer.y][trainer.x+1].name == 'Van':NEWLINE return TrueNEWLINE if self.trainer.x > 0:NEWLINE if self.tablero[trainer.y][trainer.x-1].name == 'Van':NEWLINE return TrueNEWLINE if self.trainer.y < self.size - 1:NEWLINE if self.tablero[trainer.y+1][trainer.x].name == 'Van':NEWLINE return TrueNEWLINE if self.trainer.y > 0:NEWLINE if self.tablero[trainer.y-1][trainer.x].name == 'Van':NEWLINE return TrueNEWLINE else:NEWLINE return FalseNEWLINE pokemonGotcha(True)NEWLINE try:NEWLINE if isVanClose():NEWLINE pokemonGotcha(False)NEWLINE elif chooseBackWay() == 'right':NEWLINE rightMove(True)NEWLINE elif chooseBackWay() == 'left':NEWLINE leftMove(True)NEWLINE elif chooseBackWay() == 'down':NEWLINE downMove(True)NEWLINE elif chooseBackWay() == 'up':NEWLINE upMove(True)NEWLINE except Exception as error:NEWLINE print(error)NEWLINENEWLINE def pokemonGotcha(gotIt):NEWLINE self.trainer.back = gotItNEWLINE self.trainer.image = PhotoImage(file='images/pokeball1.png')NEWLINE self.trainer.label.config(image=self.trainer.image)NEWLINENEWLINE self.a = AObject(self.trainer, self.van,NEWLINE self.pokemonArray, self.tablero)NEWLINE # print(self.a.openQ, self.a.closeQ)NEWLINENEWLINE Move(self.trainer)NEWLINE self.window.after(500, self.findPokemon)NEWLINENEWLINENEWLINEdef main():NEWLINE tierra = Tablero(10)NEWLINENEWLINENEWLINE# x = j | y = iNEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root for license information.NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code is regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINEfrom typing import TYPE_CHECKINGNEWLINEimport warningsNEWLINENEWLINEfrom azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_errorNEWLINEfrom azure.core.paging import ItemPagedNEWLINEfrom azure.core.pipeline import PipelineResponseNEWLINEfrom azure.core.pipeline.transport import HttpRequest, HttpResponseNEWLINEfrom azure.core.polling import LROPoller, NoPolling, PollingMethodNEWLINEfrom azure.mgmt.core.exceptions import ARMErrorFormatNEWLINEfrom azure.mgmt.core.polling.arm_polling import ARMPollingNEWLINENEWLINEfrom .. import models as _modelsNEWLINENEWLINEif TYPE_CHECKING:NEWLINE # pylint: disable=unused-import,ungrouped-importsNEWLINE from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, UnionNEWLINENEWLINE T = TypeVar('T')NEWLINE ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]NEWLINENEWLINEclass InterfaceEndpointsOperations(object):NEWLINE """InterfaceEndpointsOperations operations.NEWLINENEWLINE You should not instantiate this class directly. Instead, you should create a Client instance thatNEWLINE instantiates it for you and attaches it as an attribute.NEWLINENEWLINE :ivar models: Alias to model classes used in this operation group.NEWLINE :type models: ~azure.mgmt.network.v2019_02_01.modelsNEWLINE :param client: Client for service requests.NEWLINE :param config: Configuration of service client.NEWLINE :param serializer: An object model serializer.NEWLINE :param deserializer: An object model deserializer.NEWLINE """NEWLINENEWLINE models = _modelsNEWLINENEWLINE def __init__(self, client, config, serializer, deserializer):NEWLINE self._client = clientNEWLINE self._serialize = serializerNEWLINE self._deserialize = deserializerNEWLINE self._config = configNEWLINENEWLINE def _delete_initial(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE interface_endpoint_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> NoneNEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2019-02-01"NEWLINENEWLINE # Construct URLNEWLINE url = self._delete_initial.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),NEWLINE 'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINENEWLINE request = self._client.delete(url, query_parameters, header_parameters)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 202, 204]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignoreNEWLINENEWLINE def begin_delete(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE interface_endpoint_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> LROPoller[None]NEWLINE """Deletes the specified interface endpoint.NEWLINENEWLINE :param resource_group_name: The name of the resource group.NEWLINE :type resource_group_name: strNEWLINE :param interface_endpoint_name: The name of the interface endpoint.NEWLINE :type interface_endpoint_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be ARMPolling.NEWLINE Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.PollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.NEWLINE :return: An instance of LROPoller that returns either None or the result of cls(response)NEWLINE :rtype: ~azure.core.polling.LROPoller[None]NEWLINE :raises ~azure.core.exceptions.HttpResponseError:NEWLINE """NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = self._delete_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE interface_endpoint_name=interface_endpoint_name,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINENEWLINE kwargs.pop('error_map', None)NEWLINE kwargs.pop('content_type', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),NEWLINE 'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINENEWLINE if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)NEWLINE elif polling is False: polling_method = NoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return LROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE else:NEWLINE return LROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINE begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignoreNEWLINENEWLINE def get(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE interface_endpoint_name, # type: strNEWLINE expand=None, # type: Optional[str]NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.InterfaceEndpoint"NEWLINE """Gets the specified interface endpoint by resource group.NEWLINENEWLINE :param resource_group_name: The name of the resource group.NEWLINE :type resource_group_name: strNEWLINE :param interface_endpoint_name: The name of the interface endpoint.NEWLINE :type interface_endpoint_name: strNEWLINE :param expand: Expands referenced resources.NEWLINE :type expand: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: InterfaceEndpoint, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.network.v2019_02_01.models.InterfaceEndpointNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.InterfaceEndpoint"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2019-02-01"NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self.get.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),NEWLINE 'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINE if expand is not None:NEWLINE query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignoreNEWLINENEWLINE def _create_or_update_initial(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE interface_endpoint_name, # type: strNEWLINE parameters, # type: "_models.InterfaceEndpoint"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> "_models.InterfaceEndpoint"NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.InterfaceEndpoint"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2019-02-01"NEWLINE content_type = kwargs.pop("content_type", "application/json")NEWLINE accept = "application/json"NEWLINENEWLINE # Construct URLNEWLINE url = self._create_or_update_initial.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),NEWLINE 'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINENEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE body_content_kwargs = {} # type: Dict[str, Any]NEWLINE body_content = self._serialize.body(parameters, 'InterfaceEndpoint')NEWLINE body_content_kwargs['content'] = body_contentNEWLINE request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)NEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 201]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if response.status_code == 200:NEWLINE deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)NEWLINENEWLINE if response.status_code == 201:NEWLINE deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINE _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignoreNEWLINENEWLINE def begin_create_or_update(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE interface_endpoint_name, # type: strNEWLINE parameters, # type: "_models.InterfaceEndpoint"NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> LROPoller["_models.InterfaceEndpoint"]NEWLINE """Creates or updates an interface endpoint in the specified resource group.NEWLINENEWLINE :param resource_group_name: The name of the resource group.NEWLINE :type resource_group_name: strNEWLINE :param interface_endpoint_name: The name of the interface endpoint.NEWLINE :type interface_endpoint_name: strNEWLINE :param parameters: Parameters supplied to the create or update interface endpoint operation.NEWLINE :type parameters: ~azure.mgmt.network.v2019_02_01.models.InterfaceEndpointNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be ARMPolling.NEWLINE Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.PollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.NEWLINE :return: An instance of LROPoller that returns either InterfaceEndpoint or the result of cls(response)NEWLINE :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_02_01.models.InterfaceEndpoint]NEWLINE :raises ~azure.core.exceptions.HttpResponseError:NEWLINE """NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.InterfaceEndpoint"]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = self._create_or_update_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE interface_endpoint_name=interface_endpoint_name,NEWLINE parameters=parameters,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINENEWLINE kwargs.pop('error_map', None)NEWLINE kwargs.pop('content_type', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINE return deserializedNEWLINENEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),NEWLINE 'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINENEWLINE if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)NEWLINE elif polling is False: polling_method = NoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return LROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE else:NEWLINE return LROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINE begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignoreNEWLINENEWLINE def list(NEWLINE self,NEWLINE resource_group_name, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> Iterable["_models.InterfaceEndpointListResult"]NEWLINE """Gets all interface endpoints in a resource group.NEWLINENEWLINE :param resource_group_name: The name of the resource group.NEWLINE :type resource_group_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An iterator like instance of either InterfaceEndpointListResult or the result of cls(response)NEWLINE :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_02_01.models.InterfaceEndpointListResult]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.InterfaceEndpointListResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2019-02-01"NEWLINE accept = "application/json"NEWLINENEWLINE def prepare_request(next_link=None):NEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE if not next_link:NEWLINE # Construct URLNEWLINE url = self.list.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE else:NEWLINE url = next_linkNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE return requestNEWLINENEWLINE def extract_data(pipeline_response):NEWLINE deserialized = self._deserialize('InterfaceEndpointListResult', pipeline_response)NEWLINE list_of_elem = deserialized.valueNEWLINE if cls:NEWLINE list_of_elem = cls(list_of_elem)NEWLINE return deserialized.next_link or None, iter(list_of_elem)NEWLINENEWLINE def get_next(next_link=None):NEWLINE request = prepare_request(next_link)NEWLINENEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE return pipeline_responseNEWLINENEWLINE return ItemPaged(NEWLINE get_next, extract_dataNEWLINE )NEWLINE list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints'} # type: ignoreNEWLINENEWLINE def list_by_subscription(NEWLINE self,NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> Iterable["_models.InterfaceEndpointListResult"]NEWLINE """Gets all interface endpoints in a subscription.NEWLINENEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An iterator like instance of either InterfaceEndpointListResult or the result of cls(response)NEWLINE :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_02_01.models.InterfaceEndpointListResult]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.InterfaceEndpointListResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE api_version = "2019-02-01"NEWLINE accept = "application/json"NEWLINENEWLINE def prepare_request(next_link=None):NEWLINE # Construct headersNEWLINE header_parameters = {} # type: Dict[str, Any]NEWLINE header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')NEWLINENEWLINE if not next_link:NEWLINE # Construct URLNEWLINE url = self.list_by_subscription.metadata['url'] # type: ignoreNEWLINE path_format_arguments = {NEWLINE 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),NEWLINE }NEWLINE url = self._client.format_url(url, **path_format_arguments)NEWLINE # Construct parametersNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')NEWLINENEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE else:NEWLINE url = next_linkNEWLINE query_parameters = {} # type: Dict[str, Any]NEWLINE request = self._client.get(url, query_parameters, header_parameters)NEWLINE return requestNEWLINENEWLINE def extract_data(pipeline_response):NEWLINE deserialized = self._deserialize('InterfaceEndpointListResult', pipeline_response)NEWLINE list_of_elem = deserialized.valueNEWLINE if cls:NEWLINE list_of_elem = cls(list_of_elem)NEWLINE return deserialized.next_link or None, iter(list_of_elem)NEWLINENEWLINE def get_next(next_link=None):NEWLINE request = prepare_request(next_link)NEWLINENEWLINE pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE return pipeline_responseNEWLINENEWLINE return ItemPaged(NEWLINE get_next, extract_dataNEWLINE )NEWLINE list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/interfaceEndpoints'} # type: ignoreNEWLINE
import sys, os, reNEWLINEimport timeNEWLINENEWLINEsys.path.append("lib")NEWLINEimport utilsNEWLINENEWLINEimport requestsNEWLINEfrom bs4 import BeautifulSoupNEWLINENEWLINEtail_number_records = utils.read_json_lines_file('data/tail_numbers.jsonl')NEWLINENEWLINEaircraft_records = []NEWLINE# Loop through the tail numbers, fetchingNEWLINEfor tail_number_record in tail_number_records:NEWLINE time.sleep(0.1) # essential to sleep FIRST in loop or you will flood sitesNEWLINE NEWLINE # Parameterize the URL with the tail numberNEWLINE BASE_URL = 'http://registry.faa.gov/aircraftinquiry/NNum_Results.aspx?NNumbertxt={}'NEWLINE tail_number = tail_number_record['TailNum']NEWLINE url = BASE_URL.format(tail_number)NEWLINENEWLINE # Fetch the page, parse the HTMLNEWLINE r = requests.get(url)NEWLINE NEWLINE html = r.textNEWLINE soup = BeautifulSoup(html)NEWLINE NEWLINE # The table structure is constant for all pages that contain dataNEWLINE try:NEWLINE aircraft_description = soup.find_all('table')[4]NEWLINE craft_tds = aircraft_description.find_all('td')NEWLINE serial_number = craft_tds[1].text.strip()NEWLINE manufacturer = craft_tds[5].text.strip()NEWLINE model = craft_tds[9].text.strip()NEWLINE mfr_year = craft_tds[25].text.strip()NEWLINENEWLINE registered_owner = soup.find_all('table')[5]NEWLINE reg_tds = registered_owner.find_all('td')NEWLINE owner = reg_tds[1].text.strip()NEWLINE owner_state = reg_tds[9].text.strip()NEWLINENEWLINE airworthiness = soup.find_all('table')[6]NEWLINE worthy_tds = airworthiness.find_all('td')NEWLINE engine_manufacturer = worthy_tds[1].text.strip()NEWLINE engine_model = worthy_tds[5].text.strip()NEWLINENEWLINE aircraft_record = {NEWLINE 'TailNum': tail_number,NEWLINE 'serial_number': serial_number,NEWLINE 'manufacturer': manufacturer,NEWLINE 'model': model,NEWLINE 'mfr_year': mfr_year,NEWLINE 'owner': owner,NEWLINE 'owner_state': owner_state,NEWLINE 'engine_manufacturer': engine_manufacturer,NEWLINE 'engine_model': engine_model,NEWLINE }NEWLINE aircraft_records.append(NEWLINE aircraft_recordNEWLINE )NEWLINE print(aircraft_record)NEWLINE NEWLINE except IndexError as e:NEWLINE print("Missing {} record: {}".format(tail_number, e))NEWLINENEWLINEutils.write_json_lines_file(NEWLINE aircraft_records, 'data/faa_tail_number_inquiry.jsonl'NEWLINE)NEWLINE
# pylint: disable=too-many-linesNEWLINE# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root for license information.NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code is regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINEfrom typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, UnionNEWLINENEWLINEfrom azure.core.async_paging import AsyncItemPaged, AsyncListNEWLINEfrom azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_errorNEWLINEfrom azure.core.pipeline import PipelineResponseNEWLINEfrom azure.core.pipeline.transport import AsyncHttpResponseNEWLINEfrom azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethodNEWLINEfrom azure.core.rest import HttpRequestNEWLINEfrom azure.core.tracing.decorator import distributed_traceNEWLINEfrom azure.core.tracing.decorator_async import distributed_trace_asyncNEWLINEfrom azure.mgmt.core.exceptions import ARMErrorFormatNEWLINEfrom azure.mgmt.core.polling.async_arm_polling import AsyncARMPollingNEWLINENEWLINEfrom ... import models as _modelsNEWLINEfrom ..._vendor import _convert_requestNEWLINEfrom ...operations._scope_maps_operations import build_create_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initialNEWLINET = TypeVar('T')NEWLINEClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]NEWLINENEWLINEclass ScopeMapsOperations:NEWLINE """ScopeMapsOperations async operations.NEWLINENEWLINE You should not instantiate this class directly. Instead, you should create a Client instance thatNEWLINE instantiates it for you and attaches it as an attribute.NEWLINENEWLINE :ivar models: Alias to model classes used in this operation group.NEWLINE :type models: ~azure.mgmt.containerregistry.v2020_11_01_preview.modelsNEWLINE :param client: Client for service requests.NEWLINE :param config: Configuration of service client.NEWLINE :param serializer: An object model serializer.NEWLINE :param deserializer: An object model deserializer.NEWLINE """NEWLINENEWLINE models = _modelsNEWLINENEWLINE def __init__(self, client, config, serializer, deserializer) -> None:NEWLINE self._client = clientNEWLINE self._serialize = serializerNEWLINE self._deserialize = deserializerNEWLINE self._config = configNEWLINENEWLINE @distributed_trace_asyncNEWLINE async def get(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> "_models.ScopeMap":NEWLINE """Gets the properties of the specified scope map.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: ScopeMap, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINENEWLINE NEWLINE request = build_get_request(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE template_url=self.get.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINENEWLINE get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE async def _create_initial(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_create_parameters: "_models.ScopeMap",NEWLINE **kwargs: AnyNEWLINE ) -> "_models.ScopeMap":NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINENEWLINE _json = self._serialize.body(scope_map_create_parameters, 'ScopeMap')NEWLINENEWLINE request = build_create_request_initial(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE json=_json,NEWLINE template_url=self._create_initial.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 201]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if response.status_code == 200:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if response.status_code == 201:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINENEWLINE _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE @distributed_trace_asyncNEWLINE async def begin_create(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_create_parameters: "_models.ScopeMap",NEWLINE **kwargs: AnyNEWLINE ) -> AsyncLROPoller["_models.ScopeMap"]:NEWLINE """Creates a scope map for a container registry with the specified parameters.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :param scope_map_create_parameters: The parameters for creating a scope map.NEWLINE :type scope_map_create_parameters:NEWLINE ~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False forNEWLINE this operation to not poll, or pass in your own initialized polling object for a personalNEWLINE polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if noNEWLINE Retry-After header is present.NEWLINE :return: An instance of AsyncLROPoller that returns either ScopeMap or the result ofNEWLINE cls(response)NEWLINE :rtype:NEWLINE ~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMap]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = await self._create_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE scope_map_create_parameters=scope_map_create_parameters,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINE kwargs.pop('error_map', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE response = pipeline_response.http_responseNEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINE return deserializedNEWLINENEWLINENEWLINE if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)NEWLINE elif polling is False: polling_method = AsyncNoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return AsyncLROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINENEWLINE begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINE async def _delete_initial( # pylint: disable=inconsistent-return-statementsNEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> None:NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINENEWLINE NEWLINE request = build_delete_request_initial(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE template_url=self._delete_initial.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 202, 204]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE @distributed_trace_asyncNEWLINE async def begin_delete( # pylint: disable=inconsistent-return-statementsNEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> AsyncLROPoller[None]:NEWLINE """Deletes a scope map from a container registry.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False forNEWLINE this operation to not poll, or pass in your own initialized polling object for a personalNEWLINE polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if noNEWLINE Retry-After header is present.NEWLINE :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)NEWLINE :rtype: ~azure.core.polling.AsyncLROPoller[None]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = await self._delete_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINE kwargs.pop('error_map', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINENEWLINE if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)NEWLINE elif polling is False: polling_method = AsyncNoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return AsyncLROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINENEWLINE begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINE async def _update_initial(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_update_parameters: "_models.ScopeMapUpdateParameters",NEWLINE **kwargs: AnyNEWLINE ) -> "_models.ScopeMap":NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINENEWLINE _json = self._serialize.body(scope_map_update_parameters, 'ScopeMapUpdateParameters')NEWLINENEWLINE request = build_update_request_initial(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE json=_json,NEWLINE template_url=self._update_initial.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 201]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if response.status_code == 200:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if response.status_code == 201:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINENEWLINE _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE @distributed_trace_asyncNEWLINE async def begin_update(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_update_parameters: "_models.ScopeMapUpdateParameters",NEWLINE **kwargs: AnyNEWLINE ) -> AsyncLROPoller["_models.ScopeMap"]:NEWLINE """Updates a scope map with the specified parameters.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :param scope_map_update_parameters: The parameters for updating a scope map.NEWLINE :type scope_map_update_parameters:NEWLINE ~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapUpdateParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False forNEWLINE this operation to not poll, or pass in your own initialized polling object for a personalNEWLINE polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if noNEWLINE Retry-After header is present.NEWLINE :return: An instance of AsyncLROPoller that returns either ScopeMap or the result ofNEWLINE cls(response)NEWLINE :rtype:NEWLINE ~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMap]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = await self._update_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE scope_map_update_parameters=scope_map_update_parameters,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINE kwargs.pop('error_map', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE response = pipeline_response.http_responseNEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINE return deserializedNEWLINENEWLINENEWLINE if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)NEWLINE elif polling is False: polling_method = AsyncNoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return AsyncLROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINENEWLINE begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINE @distributed_traceNEWLINE def list(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> AsyncIterable["_models.ScopeMapListResult"]:NEWLINE """Lists all the scope maps for the specified container registry.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An iterator like instance of either ScopeMapListResult or the result of cls(response)NEWLINE :rtype:NEWLINE ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapListResult]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINENEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMapListResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE def prepare_request(next_link=None):NEWLINE if not next_link:NEWLINE NEWLINE request = build_list_request(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE api_version=api_version,NEWLINE template_url=self.list.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE else:NEWLINE NEWLINE request = build_list_request(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE api_version=api_version,NEWLINE template_url=next_link,NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINE request.method = "GET"NEWLINE return requestNEWLINENEWLINE async def extract_data(pipeline_response):NEWLINE deserialized = self._deserialize("ScopeMapListResult", pipeline_response)NEWLINE list_of_elem = deserialized.valueNEWLINE if cls:NEWLINE list_of_elem = cls(list_of_elem)NEWLINE return deserialized.next_link or None, AsyncList(list_of_elem)NEWLINENEWLINE async def get_next(next_link=None):NEWLINE request = prepare_request(next_link)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE return pipeline_responseNEWLINENEWLINENEWLINE return AsyncItemPaged(NEWLINE get_next, extract_dataNEWLINE )NEWLINE list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps"} # type: ignoreNEWLINE
import osNEWLINENEWLINEBINARIES_PATHS = [NEWLINE '/home/ttz/git/opencv-4.3.0/build/lib'NEWLINE] + BINARIES_PATHSNEWLINE
# Copyright 2017 Battelle Energy Alliance, LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""NEWLINECreated on Feb. 16 2018NEWLINENEWLINE@author: wangcNEWLINE"""NEWLINE#for future compatibility with Python 3--------------------------------------------------------------NEWLINEfrom __future__ import division, print_function, unicode_literals, absolute_importNEWLINEimport warningsNEWLINEwarnings.simplefilter('default',DeprecationWarning)NEWLINE#End compatibility block for Python 3----------------------------------------------------------------NEWLINENEWLINE#External Modules------------------------------------------------------------------------------------NEWLINEimport mathNEWLINEimport scipyNEWLINEimport numpy as npNEWLINEimport astNEWLINEimport scipy.spatial.distance as spatialDistanceNEWLINE#External Modules End--------------------------------------------------------------------------------NEWLINENEWLINE#Internal Modules------------------------------------------------------------------------------------NEWLINEfrom .Metric import MetricNEWLINEfrom utils import utils, InputDataNEWLINE#Internal Modules End--------------------------------------------------------------------------------NEWLINENEWLINEclass ScipyMetric(Metric):NEWLINE """NEWLINE ScipyMetric metrics which can be employed for both pointSets and historySetsNEWLINE """NEWLINE availMetrics = {}NEWLINE # Distance functions between two numeric vectorsNEWLINE availMetrics['paired_distance'] = {}NEWLINE availMetrics['paired_distance']['braycurtis'] = spatialDistance.braycurtisNEWLINE availMetrics['paired_distance']['canberra'] = spatialDistance.canberraNEWLINE availMetrics['paired_distance']['correlation'] = spatialDistance.correlationNEWLINE availMetrics['paired_distance']['minkowski'] = spatialDistance.minkowskiNEWLINE # Distance functions between two boolean vectorsNEWLINE availMetrics['boolean'] = {}NEWLINE availMetrics['boolean']['rogerstanimoto'] = spatialDistance.rogerstanimotoNEWLINE availMetrics['boolean']['dice'] = spatialDistance.diceNEWLINE availMetrics['boolean']['hamming'] = spatialDistance.hammingNEWLINE availMetrics['boolean']['jaccard'] = spatialDistance.jaccardNEWLINE availMetrics['boolean']['kulsinski'] = spatialDistance.kulsinskiNEWLINE availMetrics['boolean']['russellrao'] = spatialDistance.russellraoNEWLINE availMetrics['boolean']['sokalmichener'] = spatialDistance.sokalmichenerNEWLINE availMetrics['boolean']['sokalsneath'] = spatialDistance.sokalsneathNEWLINE availMetrics['boolean']['yule'] = spatialDistance.yuleNEWLINENEWLINE @classmethodNEWLINE def getInputSpecification(cls):NEWLINE """NEWLINE Method to get a reference to a class that specifies the input data forNEWLINE class cls.NEWLINE @ In, cls, the class for which we are retrieving the specificationNEWLINE @ Out, inputSpecification, InputData.ParameterInput, class to use forNEWLINE specifying input of cls.NEWLINE """NEWLINE inputSpecification = super(ScipyMetric, cls).getInputSpecification()NEWLINE inputSpecification.addSub(InputData.parameterInputFactory("metricType",contentType=InputData.StringType),quantity=InputData.Quantity.one)NEWLINE inputSpecification.addSub(InputData.parameterInputFactory("w",contentType=InputData.FloatListType),quantity=InputData.Quantity.zero_to_one)NEWLINE inputSpecification.addSub(InputData.parameterInputFactory("p",contentType=InputData.FloatType),quantity=InputData.Quantity.zero_to_one)NEWLINENEWLINE return inputSpecificationNEWLINENEWLINE def __init__(self):NEWLINE """NEWLINE ConstructorNEWLINE @ In, NoneNEWLINE @ Out, NoneNEWLINE """NEWLINE Metric.__init__(self)NEWLINE # The type of given metric, None or List of two elements, first element should be in availMetrics.keys()NEWLINE # and sencond element should be in availMetrics.values()[firstElement].keys()NEWLINE self.metricType = NoneNEWLINENEWLINE def _localReadMoreXML(self,xmlNode):NEWLINE """NEWLINE Method that reads the portion of the xml input that belongs to this specialized classNEWLINE and initialize internal parametersNEWLINE @ In, xmlNode, xml.etree.Element, Xml element nodeNEWLINE @ Out, NoneNEWLINE """NEWLINE self.distParams = {}NEWLINE paramInput = ScipyMetric.getInputSpecification()()NEWLINE paramInput.parseNode(xmlNode)NEWLINE for child in paramInput.subparts:NEWLINE if child.getName() == "metricType":NEWLINE self.metricType = list(elem.strip() for elem in child.value.split('|'))NEWLINE if len(self.metricType) != 2:NEWLINE self.raiseAnError(IOError, "Metric type: '", child.value, "' is not correct, please check the user manual for the correct metric type!")NEWLINE else:NEWLINE self.distParams[child.getName()] = child.valueNEWLINENEWLINE if self.metricType[0] not in self.__class__.availMetrics.keys() or self.metricType[1] not in self.__class__.availMetrics[self.metricType[0]].keys():NEWLINE self.raiseAnError(IOError, "Metric '", self.name, "' with metricType '", self.metricType[0], "|", self.metricType[1], "' is not valid!")NEWLINENEWLINE def __evaluateLocal__(self, x, y, weights = None, axis = 0, **kwargs):NEWLINE """NEWLINE This method computes difference between two points x and y based on given metricNEWLINE @ In, x, 1-D numpy.ndarray, array containing data of x.NEWLINE @ In, y, 1-D numpy.ndarray, array containing data of y.NEWLINE @ In, weights, array_like (numpy.array or list), optional, weights associated the metric methodNEWLINE @ In, axis, integer, optional, default is 0, not used in this metricNEWLINE @ In, kwargs, dict, dictionary of parameters characteristic of each metricNEWLINE @ Out, value, float, metric resultNEWLINE """NEWLINE if isinstance(x,np.ndarray) and isinstance(y,np.ndarray):NEWLINE assert(x.shape == y.shape, "Input data x, y should have the same shape!")NEWLINE # TODO: weights are supported in scipy.spatial.distance for many distance metrics in v1.0.0NEWLINE # when we switch to scipy 1.0.0, we can enable weights in our metrics calculationsNEWLINE sv = str(scipy.__version__).split('.')NEWLINE if int(sv[0]) > 0:NEWLINE if weights is not None and 'w' not in self.distParams.keys():NEWLINE self.distParams['w'] = weightsNEWLINE # FIXME: In Scipy version 1.1.0, the function scipy.spatial.distance.canberra andNEWLINE # scipy.spatial.distance.sokalmichener will accept the weights, and the calculated results fromNEWLINE # these functions will affected by the normalization of the weights. The following is disabled forNEWLINE # this purpose --- wangc July 17, 2018NEWLINE # For future development, please pay attention to canberra, minkowski, and sokalmichener metricsNEWLINE #if 'w' in self.distParams.keys():NEWLINE # Normalized weights, since methods exist in Scipy are using unnormalized weightsNEWLINE #self.distParams['w'] = np.asarray(self.distParams['w'])/np.sum(self.distParams['w'])NEWLINE else:NEWLINE if 'w' in self.distParams.keys():NEWLINE self.raiseAWarning("Weights will not be used, since weights provided with key word 'w' is not supported for your current version of scipy!")NEWLINE self.distParams.pop('w')NEWLINE dictTemp = utils.mergeDictionaries(kwargs, self.distParams)NEWLINE try:NEWLINE value = self.__class__.availMetrics[self.metricType[0]][self.metricType[1]](x, y, **dictTemp)NEWLINE except TypeError as e:NEWLINE self.raiseAWarning('There are some unexpected keyword arguments found in Metric with type', self.metricType[1])NEWLINE self.raiseAnError(TypeError, 'Input parameters error: \n', str(e), '\n')NEWLINE else:NEWLINE self.raiseAnError(IOError, "Input data type is not correct!")NEWLINENEWLINE return valueNEWLINE
def DetectCollision(characterPosX, characterPosY, character, obstaclePosX, obstaclePosY, obstacle):NEWLINE MARGIN_ERROR = 18 # error with image pixelsNEWLINE collision = FalseNEWLINENEWLINE print("Character:",characterPosX, characterPosY, "\tObstacle", obstaclePosX, obstaclePosY)NEWLINE if ( characterPosX + MARGIN_ERROR < (obstaclePosX + obstacle.getWidth() )NEWLINE and (characterPosX + character[0]) > obstaclePosX + MARGIN_ERRORNEWLINE and characterPosY + MARGIN_ERROR < (obstaclePosY) NEWLINE and (characterPosY + character[1]) > obstaclePosY + MARGIN_ERROR):NEWLINE collision = TrueNEWLINENEWLINE return collisionNEWLINENEWLINE
import matplotlib.pyplot as pltNEWLINENEWLINENEWLINEdef analyze(context, perf):NEWLINE ax1 = plt.subplot(211)NEWLINE perf.portfolio_value.plot(ax=ax1)NEWLINE ax2 = plt.subplot(212, sharex=ax1)NEWLINE perf.AAPL.plot(ax=ax2)NEWLINE plt.gcf().set_size_inches(18, 8)NEWLINE plt.show()NEWLINE
"""NEWLINEModule: 'ujson' on micropython-v1.15-esp8266NEWLINE"""NEWLINE# MCU: {'ver': 'v1.15', 'port': 'esp8266', 'arch': 'xtensa', 'sysname': 'esp8266', 'release': '1.15', 'name': 'micropython', 'mpy': 9733, 'version': '1.15', 'machine': 'ESP module with ESP8266', 'build': '', 'nodename': 'esp8266', 'platform': 'esp8266', 'family': 'micropython'}NEWLINE# Stubber: 1.5.4NEWLINEfrom typing import AnyNEWLINENEWLINENEWLINEdef dump(*args, **kwargs) -> Any:NEWLINE ...NEWLINENEWLINENEWLINEdef dumps(*args, **kwargs) -> Any:NEWLINE ...NEWLINENEWLINENEWLINEdef load(*args, **kwargs) -> Any:NEWLINE ...NEWLINENEWLINENEWLINEdef loads(*args, **kwargs) -> Any:NEWLINE ...NEWLINE
import torch.nn as nnNEWLINEimport numpy as npNEWLINEimport matplotlib.pyplot as pltNEWLINEfrom PIL import ImageNEWLINEimport cv2NEWLINENEWLINENEWLINEdef color2hist(color):NEWLINE color = np.asarray(color).flatten()NEWLINE padding_width = 255 - np.max(color)NEWLINE color = plt.hist(color, bins=np.max(color))[0]NEWLINE color /= sum(color)NEWLINE color = np.asarray(color)NEWLINE color = np.pad(color, [0, padding_width], mode='constant', constant_values=0)NEWLINENEWLINE return colorNEWLINENEWLINENEWLINEdef img2hist(path, space='RGB'):NEWLINE assert space in ['RGB', 'LAB']NEWLINE image = cv2.imread(path)NEWLINENEWLINE if space == 'RGB':NEWLINE image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)NEWLINE r, g, b = cv2.split(image)NEWLINE r = color2hist(r)NEWLINE g = color2hist(g)NEWLINE b = color2hist(b)NEWLINENEWLINE return r, g, bNEWLINENEWLINE elif space == 'LAB':NEWLINE image = cv2.cvtColor(image, cv2.COLOR_BGR2LAB)NEWLINE l, a, b = cv2.split(image)NEWLINE l = color2hist(l)NEWLINE a = color2hist(a)NEWLINE b = color2hist(b)NEWLINENEWLINE return l, a, bNEWLINENEWLINE elif space == 'HSV':NEWLINE image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)NEWLINE h, s, v = cv2.split(image)NEWLINE h = color2hist(h)NEWLINE s = color2hist(s)NEWLINE v = color2hist(v)NEWLINENEWLINE return h, s, vNEWLINENEWLINE elif space == 'YCbCr' or 'YCrCb':NEWLINE image = cv2.cvtColor(image, cv2.COLOR_BGR2YCrCb)NEWLINE Y, Cr, Cb = cv2.split(image)NEWLINE Y = color2hist(Y)NEWLINE Cr = color2hist(Cr)NEWLINE Cb = color2hist(Cb)NEWLINENEWLINE return Y, Cr, CbNEWLINENEWLINENEWLINEdef weight_init(module):NEWLINE class_name = module.__class__.__name__NEWLINE if class_name.find('Conv') != -1:NEWLINE module.weight.detach().normal_(0.0, 0.02)NEWLINENEWLINE elif class_name.find('BatchNorm') != -1:NEWLINE module.weight.detach().normal_(1.0, 0.02)NEWLINE module.bias.detach().fill_(0.0)NEWLINENEWLINENEWLINEdef block(module, normalization=True, transpose=False, relu=True, dropout=False):NEWLINE layers = []NEWLINENEWLINE if relu:NEWLINE layers.append(nn.ReLU(inplace=True))NEWLINE elif not relu:NEWLINE layers.append(nn.LeakyReLU(0.2, inplace=True))NEWLINENEWLINE layers.append(module)NEWLINENEWLINE if normalization:NEWLINE if transpose:NEWLINE layers.append(nn.BatchNorm2d(module.weight.size()[1]))NEWLINE elif not transpose:NEWLINE layers.append(nn.BatchNorm2d(module.weight.size()[0]))NEWLINENEWLINE if dropout:NEWLINE layers.append(nn.Dropout2d(0.5, inplace=True))NEWLINENEWLINE return nn.Sequential(*layers)NEWLINENEWLINENEWLINEdef get_grid_shape(opt):NEWLINE assert opt.patch_size in [1, 16, 70, 286]NEWLINE patch_to_grid_dict = {1: (256, 256), 16: (62, 62), 70: (30, 30), 286: (6, 6)}NEWLINENEWLINE return (opt.batch_size, 1, *patch_to_grid_dict[opt.patch_size])NEWLINENEWLINENEWLINEdef adjust_dynamic_range(data, drange_in, drange_out): # define a function for scaling a dataNEWLINE if drange_in != drange_out: # if the range of pixel values are different for input and outputNEWLINE scale = (np.float32(drange_out[1]) - np.float32(drange_out[0]))/(np.float32(drange_in[1]) - np.float32(drange_in[0]))NEWLINE # calculate a scaling factorNEWLINE bias = (np.float32(drange_out[0]) - np.float32(drange_in[0])*scale)NEWLINE # calculate a biasNEWLINE data = data*scale + biasNEWLINE # change the input data based on the scalining factor and biasNEWLINE return data # return the scaled data whose pixel values are within drange_outNEWLINENEWLINENEWLINEdef tensor2image(image_tensor): # define a function for changing torch.tensor to a numpy array before saving imageNEWLINE np_image = image_tensor.squeeze().cpu().float().numpy()NEWLINE # squeeze a input tensor (which means to delete dimensions with value 1) and convert it to cpu tensor(this is forNEWLINE # the case you use GPU during training). Ensure the pixel values have float type and finally convert to numpy array.NEWLINE if len(np_image.shape) == 2: # if the array has only two dimensions (which means it is gray image)NEWLINE pass # pass without changing the order of the axesNEWLINE elif len(np_image.shape) == 3: # if the array has three dimensions (which means t is color(RGB) image)NEWLINE np_image = np.transpose(np_image, (1, 2, 0)) # change the order of the axes from (C, H, W) to (H, W, C)NEWLINENEWLINE np_image = adjust_dynamic_range(np_image, drange_in=[-1., 1.], drange_out=[0, 255]) # scale the pixel valuesNEWLINE np_image = np.clip(np_image, 0, 255).astype(np.uint8) # make its type uint8 so that you can save the imageNEWLINE return np_image # return the processed imageNEWLINENEWLINENEWLINEdef save_image(image_tensor, path): # define a function for saving processed imageNEWLINE np_image = tensor2image(image_tensor) # change a torch.tensor to a numpy imageNEWLINE pil_image = Image.fromarray(np_image) # convert the numpy image to Image objectNEWLINE pil_image.save(path + '.png', mode='PNG') # save the image with given path and modeNEWLINE
# Copyright (c) Facebook, Inc. and its affiliates.NEWLINE#NEWLINE# This source code is licensed under the MIT license found in theNEWLINE# LICENSE file in the root directory of this source tree.NEWLINENEWLINEimport mathNEWLINENEWLINEimport torchNEWLINEimport torch.nn.functional as FNEWLINENEWLINEfrom fairseq import utilsNEWLINENEWLINEfrom fairseq.criterions import FairseqCriterion, register_criterionNEWLINENEWLINENEWLINE@register_criterion('ngram_language_loss')NEWLINEclass NgramLmLoss(FairseqCriterion):NEWLINE """NEWLINE Implementation for the loss used in masked language model (MLM) training.NEWLINE """NEWLINE def __init__(self, args, task):NEWLINE super().__init__(args, task)NEWLINE self.eps = args.label_smoothingNEWLINE self.disable_ngram_loss = args.disable_ngram_lossNEWLINENEWLINE @staticmethodNEWLINE def add_args(parser):NEWLINE """Add criterion-specific arguments to the parser."""NEWLINE parser.add_argument(NEWLINE '--label-smoothing',NEWLINE default=0.,NEWLINE type=float,NEWLINE metavar='D',NEWLINE help='epsilon for label smoothing, 0 means no label smoothing')NEWLINE parser.add_argument('--disable-ngram-loss',NEWLINE action='store_true',NEWLINE help='only comput basic stat')NEWLINENEWLINE def forward(self, model, sample, reduce=True):NEWLINE """Compute the loss for the given sample.NEWLINE Returns a tuple with three elements:NEWLINE 1) the lossNEWLINE 2) the sample size, which is used as the denominator for the gradientNEWLINE 3) logging outputs to display while trainingNEWLINE """NEWLINE # compute MLM lossNEWLINE logits_list = model(**sample['net_input'], return_all_hiddens=False)[0]NEWLINE targets = model.get_targets(sample, [logits_list[0]])NEWLINENEWLINE ngram = len(logits_list)NEWLINE # [B, ngram, T]NEWLINE expend_targets = targets.new_zeros(NEWLINE ngram, targets.size(0), targets.size(1)).fill_(self.padding_idx)NEWLINE for i in range(ngram):NEWLINE if i > 0 and self.disable_ngram_loss:NEWLINE breakNEWLINENEWLINE padding_targets = torch.zeros_like(targets).fill_(self.padding_idx)NEWLINE if 'target_idx' in sample:NEWLINE expend_targets[i, :, :] = torch.where(NEWLINE sample['target_idx'] >= i, targets, padding_targets)NEWLINE else:NEWLINE expend_targets[i, :, :] = targetsNEWLINE targets = expend_targetsNEWLINENEWLINE logits = torch.cat(logits_list, dim=0)NEWLINENEWLINE lprobs = F.log_softmax(NEWLINE logits.view(-1, logits.size(-1)),NEWLINE dim=-1,NEWLINE dtype=torch.float32,NEWLINE )NEWLINENEWLINE loss = F.nll_loss(NEWLINE lprobs,NEWLINE targets.view(-1),NEWLINE reduction='sum',NEWLINE ignore_index=self.padding_idx,NEWLINE )NEWLINENEWLINE if self.eps > 0.:NEWLINE smooth_loss = -lprobs.sum(dim=-1, keepdim=True)NEWLINE non_pad_mask = targets.ne(self.padding_idx).view(-1)NEWLINE smooth_loss = smooth_loss[non_pad_mask]NEWLINE smooth_loss = smooth_loss.sum()NEWLINENEWLINE eps_i = self.eps / lprobs.size(-1)NEWLINE loss = (1. - self.eps) * loss + eps_i * smooth_lossNEWLINENEWLINE sample_size = targets.ne(self.padding_idx).int().sum().item()NEWLINENEWLINE logging_output = {NEWLINE 'loss': utils.item(loss.data) if reduce else loss.data,NEWLINE 'ntokens': sample['ntokens'],NEWLINE 'nsentences': sample['nsentences'],NEWLINE 'sample_size': sample_size,NEWLINE }NEWLINE return loss, sample_size, logging_outputNEWLINENEWLINE @staticmethodNEWLINE def aggregate_logging_outputs(logging_outputs):NEWLINE """Aggregate logging outputs from data parallel training."""NEWLINE loss = sum(log.get('loss', 0) for log in logging_outputs)NEWLINE ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)NEWLINE nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)NEWLINE sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)NEWLINENEWLINE agg_output = {NEWLINE 'loss': loss / sample_size / math.log(2),NEWLINE 'ntokens': ntokens,NEWLINE 'nsentences': nsentences,NEWLINE 'sample_size': sample_size,NEWLINE }NEWLINE return agg_outputNEWLINE
__all__ = [NEWLINE 'dates',NEWLINE 'dictionary',NEWLINE 'greetings',NEWLINE 'ircbot',NEWLINE 'js',NEWLINE 'listcommands',NEWLINE 'locator',NEWLINE 'pip',NEWLINE 'repeats',NEWLINE 'scheme',NEWLINE 'storedresponses',NEWLINE]NEWLINE
from unittest import TestCaseNEWLINEimport numpy as npNEWLINEimport osNEWLINEimport pickleNEWLINEimport loggingNEWLINENEWLINEfrom qcodes.data.data_array import DataArrayNEWLINEfrom qcodes.data.io import DiskIONEWLINEfrom qcodes.data.data_set import load_data, new_data, DataSetNEWLINEfrom qcodes.utils.helpers import LogCaptureNEWLINENEWLINEfrom .data_mocks import (MockFormatter, MatchIO,NEWLINE DataSet2D, DataSet1D,NEWLINE DataSetCombined, RecordingMockFormatter)NEWLINENEWLINEfrom .common import strip_qcNEWLINENEWLINENEWLINEclass TestDataArray(TestCase):NEWLINENEWLINE def test_attributes(self):NEWLINE pname = 'Betty Sue'NEWLINE plabel = 'The best apple pie this side of Wenatchee'NEWLINE pfullname = 'bert'NEWLINENEWLINE class MockParam:NEWLINE name = pnameNEWLINE label = plabelNEWLINENEWLINE def __init__(self, full_name=None):NEWLINE self.full_name = full_nameNEWLINENEWLINE name = 'Oscar'NEWLINE label = 'The grouch. GRR!'NEWLINE fullname = 'ernie'NEWLINE array_id = 24601NEWLINE set_arrays = ('awesomeness', 'chocolate content')NEWLINE shape = 'Ginornous'NEWLINE action_indices = (1, 2, 3, 4, 5)NEWLINENEWLINE p_data = DataArray(parameter=MockParam(pfullname), name=name,NEWLINE label=label, full_name=fullname)NEWLINE p_data2 = DataArray(parameter=MockParam(pfullname))NEWLINENEWLINE # explicitly given name and label override parameter valsNEWLINE self.assertEqual(p_data.name, name)NEWLINE self.assertEqual(p_data.label, label)NEWLINE self.assertEqual(p_data.full_name, fullname)NEWLINE self.assertEqual(p_data2.name, pname)NEWLINE self.assertEqual(p_data2.label, plabel)NEWLINE self.assertEqual(p_data2.full_name, pfullname)NEWLINE # test default valuesNEWLINE self.assertIsNone(p_data.array_id)NEWLINE self.assertEqual(p_data.shape, ())NEWLINE self.assertEqual(p_data.action_indices, ())NEWLINE self.assertEqual(p_data.set_arrays, ())NEWLINE self.assertIsNone(p_data.ndarray)NEWLINENEWLINE np_data = DataArray(name=name, label=label, array_id=array_id,NEWLINE set_arrays=set_arrays, shape=shape,NEWLINE action_indices=action_indices)NEWLINE self.assertEqual(np_data.name, name)NEWLINE self.assertEqual(np_data.label, label)NEWLINE # no full name or parameter - use nameNEWLINE self.assertEqual(np_data.full_name, name)NEWLINE # test simple assignmentsNEWLINE self.assertEqual(np_data.array_id, array_id)NEWLINE self.assertEqual(np_data.set_arrays, set_arrays)NEWLINE self.assertEqual(np_data.shape, shape)NEWLINE self.assertEqual(np_data.action_indices, action_indices)NEWLINENEWLINE name_data = DataArray(name=name)NEWLINE self.assertEqual(name_data.label, name)NEWLINENEWLINE blank_data = DataArray()NEWLINE self.assertIsNone(blank_data.name)NEWLINENEWLINE def test_preset_data(self):NEWLINE onetwothree = [NEWLINE # lists and tuples workNEWLINE [1.0, 2.0, 3.0],NEWLINE (1.0, 2.0, 3.0),NEWLINENEWLINE # iterators get automatically cast to floatsNEWLINE (i + 1 for i in range(3)),NEWLINE map(float, range(1, 4)),NEWLINENEWLINE # and of course numpy arrays themselves workNEWLINE np.array([1.0, 2.0, 3.0]),NEWLINE ]NEWLINENEWLINE expected123 = [1.0, 2.0, 3.0]NEWLINENEWLINE for item in onetwothree:NEWLINE data = DataArray(preset_data=item)NEWLINE self.assertEqual(data.ndarray.tolist(), expected123)NEWLINE self.assertEqual(data.shape, (3, ))NEWLINENEWLINE # you can re-initialize a DataArray with the same shape data,NEWLINE # but not with a different shapeNEWLINE list456 = [4, 5, 6]NEWLINE data.init_data(data=list456)NEWLINE self.assertEqual(data.ndarray.tolist(), list456)NEWLINE with self.assertRaises(ValueError):NEWLINE data.init_data([1, 2])NEWLINE self.assertEqual(data.ndarray.tolist(), list456)NEWLINE self.assertEqual(data.shape, (3, ))NEWLINENEWLINE # you can call init_data again with no data, and nothing changesNEWLINE data.init_data()NEWLINE self.assertEqual(data.ndarray.tolist(), list456)NEWLINE self.assertEqual(data.shape, (3, ))NEWLINENEWLINE # multidimensional works tooNEWLINE list2d = [[1, 2], [3, 4]]NEWLINE data2 = DataArray(preset_data=list2d)NEWLINE self.assertEqual(data2.ndarray.tolist(), list2d)NEWLINE self.assertEqual(data2.shape, (2, 2))NEWLINENEWLINE def test_init_data_error(self):NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE data.shape = (3, )NEWLINENEWLINE # not sure when this would happen... but if you call init_dataNEWLINE # and it notices an inconsistency between shape and the actualNEWLINE # data that's already there, it raises an errorNEWLINE with self.assertRaises(ValueError):NEWLINE data.init_data()NEWLINENEWLINE def test_clear(self):NEWLINE nan = float('nan')NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE data.clear()NEWLINE # sometimes it's annoying that nan != nanNEWLINE self.assertEqual(repr(data.ndarray.tolist()), repr([nan, nan]))NEWLINENEWLINE def test_edit_and_mark(self):NEWLINE data = DataArray(preset_data=[[1, 2], [3, 4]])NEWLINE self.assertEqual(data[0].tolist(), [1, 2])NEWLINE self.assertEqual(data[0, 1], 2)NEWLINENEWLINE data.modified_range = NoneNEWLINE self.assertIsNone(data.last_saved_index)NEWLINENEWLINE self.assertEqual(len(data), 2)NEWLINE data[0] = np.array([5, 6])NEWLINE data[1, 0] = 7NEWLINE self.assertEqual(data.ndarray.tolist(), [[5, 6], [7, 4]])NEWLINENEWLINE self.assertEqual(data.modified_range, (0, 2))NEWLINENEWLINE # as if we saved the first two points... the third should stillNEWLINE # show as modifiedNEWLINE data.mark_saved(1)NEWLINE self.assertEqual(data.last_saved_index, 1)NEWLINE self.assertEqual(data.modified_range, (2, 2))NEWLINENEWLINE # now we save the third point... no modifications left.NEWLINE data.mark_saved(2)NEWLINE self.assertEqual(data.last_saved_index, 2)NEWLINE self.assertEqual(data.modified_range, None)NEWLINENEWLINE data.clear_save()NEWLINE self.assertEqual(data.last_saved_index, None)NEWLINE self.assertEqual(data.modified_range, (0, 2))NEWLINENEWLINE def test_edit_and_mark_slice(self):NEWLINE data = DataArray(preset_data=[[1] * 5] * 6)NEWLINENEWLINE self.assertEqual(data.shape, (6, 5))NEWLINE data.modified_range = NoneNEWLINENEWLINE data[:4:2, 2:] = 2NEWLINE self.assertEqual(data.tolist(), [NEWLINE [1, 1, 2, 2, 2],NEWLINE [1, 1, 1, 1, 1],NEWLINE [1, 1, 2, 2, 2],NEWLINE [1, 1, 1, 1, 1],NEWLINE [1, 1, 1, 1, 1],NEWLINE [1, 1, 1, 1, 1]NEWLINE ])NEWLINE self.assertEqual(data.modified_range, (2, 14))NEWLINENEWLINE def test_repr(self):NEWLINE array2d = [[1, 2], [3, 4]]NEWLINE arrayrepr = repr(np.array(array2d))NEWLINE array_id = (3, 4)NEWLINE data = DataArray(preset_data=array2d)NEWLINENEWLINE self.assertEqual(repr(data), 'DataArray[2,2]:\n' + arrayrepr)NEWLINENEWLINE data.array_id = array_idNEWLINE self.assertEqual(repr(data), 'DataArray[2,2]: ' + str(array_id) +NEWLINE '\n' + arrayrepr)NEWLINENEWLINE def test_nest_empty(self):NEWLINE data = DataArray()NEWLINENEWLINE self.assertEqual(data.shape, ())NEWLINENEWLINE mock_set_array = 'not really an array but we don\'t check'NEWLINE mock_set_array2 = 'another one'NEWLINENEWLINE data.nest(2, action_index=44, set_array=mock_set_array)NEWLINE data.nest(3, action_index=66, set_array=mock_set_array2)NEWLINENEWLINE # the array doesn't exist until you initialize itNEWLINE self.assertIsNone(data.ndarray)NEWLINENEWLINE # but other attributes are setNEWLINE self.assertEqual(data.shape, (3, 2))NEWLINE self.assertEqual(data.action_indices, (66, 44))NEWLINE self.assertEqual(data.set_arrays, (mock_set_array2, mock_set_array))NEWLINENEWLINE data.init_data()NEWLINE self.assertEqual(data.ndarray.shape, (3, 2))NEWLINENEWLINE # after initializing data, you can't nest anymore because this isn'tNEWLINE # a preset arrayNEWLINE with self.assertRaises(RuntimeError):NEWLINE data.nest(4)NEWLINENEWLINE def test_nest_preset(self):NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE data.nest(3)NEWLINE self.assertEqual(data.shape, (3, 2))NEWLINE self.assertEqual(data.ndarray.tolist(), [[1, 2]] * 3)NEWLINE self.assertEqual(data.action_indices, ())NEWLINE self.assertEqual(data.set_arrays, (data,))NEWLINENEWLINE # test that the modified range gets correctly set toNEWLINE # (0, 2*3-1 = 5)NEWLINE self.assertEqual(data.modified_range, (0, 5))NEWLINENEWLINE # you need a set array for all but the inner nestingNEWLINE with self.assertRaises(TypeError):NEWLINE data.nest(4)NEWLINENEWLINE def test_data_set_property(self):NEWLINE data = DataArray(preset_data=[1, 2])NEWLINE self.assertIsNone(data.data_set)NEWLINENEWLINE mock_data_set = 'pretend this is a DataSet, we don\'t check type'NEWLINE mock_data_set2 = 'you can only assign to another after first clearing'NEWLINE data.data_set = mock_data_setNEWLINE self.assertEqual(data.data_set, mock_data_set)NEWLINENEWLINE with self.assertRaises(RuntimeError):NEWLINE data.data_set = mock_data_set2NEWLINENEWLINE data.data_set = NoneNEWLINE self.assertIsNone(data.data_set)NEWLINE data.data_set = mock_data_set2NEWLINE self.assertEqual(data.data_set, mock_data_set2)NEWLINENEWLINE def test_fraction_complete(self):NEWLINE data = DataArray(shape=(5, 10))NEWLINE self.assertIsNone(data.ndarray)NEWLINE self.assertEqual(data.fraction_complete(), 0.0)NEWLINENEWLINE data.init_data()NEWLINE self.assertEqual(data.fraction_complete(), 0.0)NEWLINENEWLINE # index = 1 * 10 + 7 - add 1 (for index 0) and you get 18NEWLINE # each index is 2% of the total, so this is 36%NEWLINE data[1, 7] = 1NEWLINE self.assertEqual(data.fraction_complete(), 18/50)NEWLINENEWLINE # add a last_saved_index but modified_range is still biggerNEWLINE data.mark_saved(13)NEWLINE self.assertEqual(data.fraction_complete(), 18/50)NEWLINENEWLINE # now last_saved_index winsNEWLINE data.mark_saved(19)NEWLINE self.assertEqual(data.fraction_complete(), 20/50)NEWLINENEWLINE # now pretend we get more info from syncingNEWLINE data.synced_index = 22NEWLINE self.assertEqual(data.fraction_complete(), 23/50)NEWLINENEWLINENEWLINEclass TestLoadData(TestCase):NEWLINENEWLINE def test_no_saved_data(self):NEWLINE with self.assertRaises(IOError):NEWLINE load_data('_no/such/file_')NEWLINENEWLINE def test_load_false(self):NEWLINE with self.assertRaises(ValueError):NEWLINE load_data(False)NEWLINENEWLINE def test_get_read(self):NEWLINE data = load_data(formatter=MockFormatter(), location='here!')NEWLINE self.assertEqual(data.has_read_data, True)NEWLINE self.assertEqual(data.has_read_metadata, True)NEWLINENEWLINENEWLINEclass TestDataSetMetaData(TestCase):NEWLINENEWLINE def test_snapshot(self):NEWLINE data = new_data(location=False)NEWLINE expected_snap = {NEWLINE '__class__': 'qcodes.data.data_set.DataSet',NEWLINE 'location': False,NEWLINE 'arrays': {},NEWLINE 'formatter': 'qcodes.data.gnuplot_format.GNUPlotFormat',NEWLINE }NEWLINE snap = strip_qc(data.snapshot())NEWLINENEWLINE # handle io separately so we don't need to figure out our pathNEWLINE self.assertIn('DiskIO', snap['io'])NEWLINE del snap['io']NEWLINE self.assertEqual(snap, expected_snap)NEWLINENEWLINE # even though we removed io from the snapshot, it's still in .metadataNEWLINE self.assertIn('io', data.metadata)NEWLINENEWLINE # then do the same transformations to metadata to check it tooNEWLINE del data.metadata['io']NEWLINE strip_qc(data.metadata)NEWLINE self.assertEqual(data.metadata, expected_snap)NEWLINENEWLINE # location is False so read_metadata should be a noopNEWLINE data.metadata = {'food': 'Fried chicken'}NEWLINE data.read_metadata()NEWLINE self.assertEqual(data.metadata, {'food': 'Fried chicken'})NEWLINENEWLINE # snapshot should never delete things from metadata, only add or updateNEWLINE data.metadata['location'] = 'Idaho'NEWLINE snap = strip_qc(data.snapshot())NEWLINE expected_snap['food'] = 'Fried chicken'NEWLINE del snap['io']NEWLINE self.assertEqual(snap, expected_snap)NEWLINENEWLINENEWLINEclass TestNewData(TestCase):NEWLINENEWLINE @classmethodNEWLINE def setUpClass(cls):NEWLINE cls.original_lp = DataSet.location_providerNEWLINENEWLINE @classmethodNEWLINE def tearDownClass(cls):NEWLINE DataSet.location_provider = cls.original_lpNEWLINENEWLINE def test_overwrite(self):NEWLINE io = MatchIO([1])NEWLINENEWLINE with self.assertRaises(FileExistsError):NEWLINE new_data(location='somewhere', io=io)NEWLINENEWLINE data = new_data(location='somewhere', io=io, overwrite=True,)NEWLINE self.assertEqual(data.location, 'somewhere')NEWLINENEWLINE def test_location_functions(self):NEWLINE def my_location(io, record):NEWLINE return 'data/{}'.format((record or {}).get('name') or 'LOOP!')NEWLINENEWLINE def my_location2(io, record):NEWLINE name = (record or {}).get('name') or 'loop?'NEWLINE return 'data/{}/folder'.format(name)NEWLINENEWLINE DataSet.location_provider = my_locationNEWLINENEWLINE self.assertEqual(new_data().location, 'data/LOOP!')NEWLINE self.assertEqual(new_data(name='cheese').location, 'data/cheese')NEWLINENEWLINE data = new_data(location=my_location2)NEWLINE self.assertEqual(data.location, 'data/loop?/folder')NEWLINE data = new_data(location=my_location2, name='iceCream')NEWLINE self.assertEqual(data.location, 'data/iceCream/folder')NEWLINENEWLINENEWLINEclass TestDataSet(TestCase):NEWLINENEWLINE def test_constructor_errors(self):NEWLINE # no location - only allowed with load_dataNEWLINE with self.assertRaises(ValueError):NEWLINE DataSet()NEWLINE # wrong typeNEWLINE with self.assertRaises(ValueError):NEWLINE DataSet(location=42)NEWLINENEWLINE def test_write_copy(self):NEWLINE data = DataSet1D(location=False)NEWLINE mockbase = os.path.abspath('some_folder')NEWLINE data.io = DiskIO(mockbase)NEWLINENEWLINE mr = (2, 3)NEWLINE mr_full = (0, 4)NEWLINE lsi = 1NEWLINE data.x_set.modified_range = mrNEWLINE data.y.modified_range = mrNEWLINE data.x_set.last_saved_index = lsiNEWLINE data.y.last_saved_index = lsiNEWLINENEWLINE with self.assertRaises(TypeError):NEWLINE data.write_copy()NEWLINENEWLINE with self.assertRaises(TypeError):NEWLINE data.write_copy(path='some/path', io_manager=DiskIO('.'))NEWLINENEWLINE with self.assertRaises(TypeError):NEWLINE data.write_copy(path='some/path', location='something/else')NEWLINENEWLINE data.formatter = RecordingMockFormatter()NEWLINE data.write_copy(path='/some/abs/path')NEWLINE self.assertEqual(data.formatter.write_calls,NEWLINE [(None, '/some/abs/path')])NEWLINE self.assertEqual(data.formatter.write_metadata_calls,NEWLINE [(None, '/some/abs/path', False)])NEWLINE # check that the formatter gets called as if nothing has been savedNEWLINE self.assertEqual(data.formatter.modified_ranges,NEWLINE [{'x_set': mr_full, 'y': mr_full}])NEWLINE self.assertEqual(data.formatter.last_saved_indices,NEWLINE [{'x_set': None, 'y': None}])NEWLINE # but the dataset afterward has its original mods backNEWLINE self.assertEqual(data.x_set.modified_range, mr)NEWLINE self.assertEqual(data.y.modified_range, mr)NEWLINE self.assertEqual(data.x_set.last_saved_index, lsi)NEWLINE self.assertEqual(data.y.last_saved_index, lsi)NEWLINENEWLINE # recreate the formatter to clear the calls attributesNEWLINE data.formatter = RecordingMockFormatter()NEWLINE data.write_copy(location='some/rel/path')NEWLINE self.assertEqual(data.formatter.write_calls,NEWLINE [(mockbase, 'some/rel/path')])NEWLINE self.assertEqual(data.formatter.write_metadata_calls,NEWLINE [(mockbase, 'some/rel/path', False)])NEWLINENEWLINE mockbase2 = os.path.abspath('some/other/folder')NEWLINE io2 = DiskIO(mockbase2)NEWLINENEWLINE with self.assertRaises(ValueError):NEWLINE # if location=False we need to specify it in write_copyNEWLINE data.write_copy(io_manager=io2)NEWLINENEWLINE data.location = 'yet/another/path'NEWLINE data.formatter = RecordingMockFormatter()NEWLINE data.write_copy(io_manager=io2)NEWLINE self.assertEqual(data.formatter.write_calls,NEWLINE [(mockbase2, 'yet/another/path')])NEWLINE self.assertEqual(data.formatter.write_metadata_calls,NEWLINE [(mockbase2, 'yet/another/path', False)])NEWLINENEWLINE def test_pickle_dataset(self):NEWLINE # Test pickling of DataSet objectNEWLINE # If the data_manager is set to None, then the object should pickle.NEWLINE m = DataSet2D()NEWLINE pickle.dumps(m)NEWLINENEWLINE def test_default_parameter(self):NEWLINE # Test whether the default_array function worksNEWLINE m = DataSet2D()NEWLINENEWLINE # test we can run with default argumentsNEWLINE name = m.default_parameter_name()NEWLINENEWLINE # test with paramnameNEWLINE name = m.default_parameter_name(paramname='z')NEWLINE self.assertEqual(name, 'z')NEWLINE # test we can get the array instead of the nameNEWLINE array = m.default_parameter_array(paramname='z')NEWLINE self.assertEqual(array, m.z)NEWLINENEWLINE # first non-setpoint arrayNEWLINE array = m.default_parameter_array()NEWLINE self.assertEqual(array, m.z)NEWLINENEWLINE # test with metadataNEWLINE m.metadata = dict({'default_parameter_name': 'x_set'})NEWLINE name = m.default_parameter_name()NEWLINE self.assertEqual(name, 'x_set')NEWLINENEWLINE # test the fallback: no name matches, no non-setpoint arrayNEWLINE x = DataArray(name='x', label='X', preset_data=(1., 2., 3., 4., 5.), is_setpoint=True)NEWLINE m= new_data(arrays=(x,), name='onlysetpoint')NEWLINE name=m.default_parameter_name(paramname='dummy')NEWLINE self.assertEqual(name, 'x_set')NEWLINENEWLINE def test_fraction_complete(self):NEWLINE empty_data = new_data(arrays=(), location=False)NEWLINE self.assertEqual(empty_data.fraction_complete(), 0.0)NEWLINENEWLINE data = DataSetCombined(location=False)NEWLINE self.assertEqual(data.fraction_complete(), 1.0)NEWLINENEWLINE # alter only the measured arrays, check that only these are usedNEWLINE # to calculate fraction_completeNEWLINE data.y1.modified_range = (0, 0) # 1 of 2NEWLINE data.y2.modified_range = (0, 0) # 1 of 2NEWLINE data.z1.modified_range = (0, 2) # 3 of 6NEWLINE data.z2.modified_range = (0, 2) # 3 of 6NEWLINE self.assertEqual(data.fraction_complete(), 0.5)NEWLINENEWLINE # mark more things complete using last_saved_index and synced_indexNEWLINE data.y1.last_saved_index = 1 # 2 of 2NEWLINE data.z1.synced_index = 5 # 6 of 6NEWLINE self.assertEqual(data.fraction_complete(), 0.75)NEWLINENEWLINE def mock_sync(self):NEWLINE i = self.sync_indexNEWLINE self.syncing_array[i] = iNEWLINE self.sync_index = i + 1NEWLINE return self.sync_index < self.syncing_array.sizeNEWLINENEWLINE def failing_func(self):NEWLINE raise RuntimeError('it is called failing_func for a reason!')NEWLINENEWLINE def logging_func(self):NEWLINE logging.info('background at index {}'.format(self.sync_index))NEWLINENEWLINE def test_complete(self):NEWLINE array = DataArray(name='y', shape=(5,))NEWLINE array.init_data()NEWLINE data = new_data(arrays=(array,), location=False)NEWLINE self.syncing_array = arrayNEWLINE self.sync_index = 0NEWLINE data.sync = self.mock_syncNEWLINE bf = DataSet.background_functionsNEWLINE bf['fail'] = self.failing_funcNEWLINE bf['log'] = self.logging_funcNEWLINENEWLINE with LogCapture() as logs:NEWLINE # grab info and warnings but not debug messagesNEWLINE logging.getLogger().setLevel(logging.INFO)NEWLINE data.complete(delay=0.001)NEWLINENEWLINE logs = logs.valueNEWLINENEWLINE expected_logs = [NEWLINE 'waiting for DataSet <False> to complete',NEWLINE 'DataSet: 0% complete',NEWLINE 'RuntimeError: it is called failing_func for a reason!',NEWLINE 'background at index 1',NEWLINE 'DataSet: 20% complete',NEWLINE 'RuntimeError: it is called failing_func for a reason!',NEWLINE 'background function fail failed twice in a row, removing it',NEWLINE 'background at index 2',NEWLINE 'DataSet: 40% complete',NEWLINE 'background at index 3',NEWLINE 'DataSet: 60% complete',NEWLINE 'background at index 4',NEWLINE 'DataSet: 80% complete',NEWLINE 'background at index 5',NEWLINE 'DataSet <False> is complete'NEWLINE ]NEWLINENEWLINE log_index = 0NEWLINE for line in expected_logs:NEWLINE self.assertIn(line, logs, logs)NEWLINE try:NEWLINE log_index_new = logs.index(line, log_index)NEWLINE except ValueError:NEWLINE raise ValueError('line {} not found after {} in: \n {}'.format(NEWLINE line, log_index, logs))NEWLINE self.assertTrue(log_index_new >= log_index, logs)NEWLINE log_index = log_index_new + len(line) + 1 # +1 for \nNEWLINE self.assertEqual(log_index, len(logs), logs)NEWLINE
import wxNEWLINENEWLINEfrom meerk40t.gui.scene.sceneconst import (NEWLINE RESPONSE_ABORT,NEWLINE RESPONSE_CHAIN,NEWLINE RESPONSE_CONSUME,NEWLINE)NEWLINEfrom meerk40t.gui.toolwidgets.toolwidget import ToolWidgetNEWLINEfrom meerk40t.svgelements import Ellipse, PathNEWLINENEWLINENEWLINEclass EllipseTool(ToolWidget):NEWLINE """NEWLINE Ellipse Drawing Tool.NEWLINENEWLINE Adds Circle with click and drag.NEWLINE """NEWLINENEWLINE def __init__(self, scene):NEWLINE ToolWidget.__init__(self, scene)NEWLINE self.start_position = NoneNEWLINE self.p1 = NoneNEWLINE self.p2 = NoneNEWLINENEWLINE def process_draw(self, gc: wx.GraphicsContext):NEWLINE if self.p1 is not None and self.p2 is not None:NEWLINE x0 = min(self.p1.real, self.p2.real)NEWLINE y0 = min(self.p1.imag, self.p2.imag)NEWLINE x1 = max(self.p1.real, self.p2.real)NEWLINE y1 = max(self.p1.imag, self.p2.imag)NEWLINE gc.SetPen(self.pen)NEWLINE gc.SetBrush(wx.TRANSPARENT_BRUSH)NEWLINE gc.DrawEllipse(x0, y0, x1 - x0, y1 - y0)NEWLINENEWLINE def event(self, window_pos=None, space_pos=None, event_type=None):NEWLINE response = RESPONSE_CHAINNEWLINE if event_type == "leftdown":NEWLINE self.scene.tool_active = TrueNEWLINE self.p1 = complex(space_pos[0], space_pos[1])NEWLINE response = RESPONSE_CONSUMENEWLINE elif event_type == "move":NEWLINE self.p2 = complex(space_pos[0], space_pos[1])NEWLINE self.scene.request_refresh()NEWLINE response = RESPONSE_CONSUMENEWLINE elif event_type == "leftup":NEWLINE self.scene.tool_active = FalseNEWLINE try:NEWLINE if self.p1 is None:NEWLINE returnNEWLINE self.p2 = complex(space_pos[0], space_pos[1])NEWLINE x0 = min(self.p1.real, self.p2.real)NEWLINE y0 = min(self.p1.imag, self.p2.imag)NEWLINE x1 = max(self.p1.real, self.p2.real)NEWLINE y1 = max(self.p1.imag, self.p2.imag)NEWLINE ellipse = Ellipse(NEWLINE (x1 + x0) / 2.0,NEWLINE (y1 + y0) / 2.0,NEWLINE abs(x0 - x1) / 2,NEWLINE abs(y0 - y1) / 2,NEWLINE stroke="blue",NEWLINE stroke_width=1000,NEWLINE )NEWLINE if not ellipse.is_degenerate():NEWLINE elements = self.scene.context.elementsNEWLINE node = elements.elem_branch.add(shape=ellipse, type="elem ellipse")NEWLINE elements.classify([node])NEWLINE self.p1 = NoneNEWLINE self.p2 = NoneNEWLINE except IndexError:NEWLINE passNEWLINE self.scene.request_refresh()NEWLINE response = RESPONSE_ABORTNEWLINE elif event_type == "lost":NEWLINE self.scene.tool_active = FalseNEWLINE return responseNEWLINE
'''NEWLINEData structure of the input .npz:NEWLINEthe data is save in python dictionary format with keys: 'acs', 'ep_rets', 'rews', 'obs'NEWLINEthe values of each item is a list storing the expert trajectory sequentiallyNEWLINEa transition can be: (data['obs'][t], data['acs'][t], data['obs'][t+1]) and get reward data['rews'][t]NEWLINE'''NEWLINENEWLINEfrom baselines import loggerNEWLINEimport numpy as npNEWLINENEWLINENEWLINEclass Dset(object):NEWLINE def __init__(self, inputs, labels, randomize):NEWLINE self.inputs = inputsNEWLINE self.labels = labelsNEWLINE assert len(self.inputs) == len(self.labels)NEWLINE self.randomize = randomizeNEWLINE self.num_pairs = len(inputs)NEWLINE self.init_pointer()NEWLINENEWLINE def init_pointer(self):NEWLINE self.pointer = 0NEWLINE if self.randomize:NEWLINE idx = np.arange(self.num_pairs)NEWLINE np.random.shuffle(idx)NEWLINE self.inputs = self.inputs[idx, :]NEWLINE self.labels = self.labels[idx, :]NEWLINENEWLINE def get_next_batch(self, batch_size):NEWLINE # if batch_size is negative -> return allNEWLINE if batch_size < 0:NEWLINE return self.inputs, self.labelsNEWLINE if self.pointer + batch_size >= self.num_pairs:NEWLINE self.init_pointer()NEWLINE end = self.pointer + batch_sizeNEWLINE inputs = self.inputs[self.pointer:end, :]NEWLINE labels = self.labels[self.pointer:end, :]NEWLINE self.pointer = endNEWLINE return inputs, labelsNEWLINENEWLINENEWLINEclass Mujoco_Dset(object):NEWLINE def __init__(self, expert_path, gen_f, train_fraction=0.7, traj_limitation=-1, randomize=True):NEWLINE if type(expert_path) is str:NEWLINE expert_path = [expert_path]NEWLINE self.gen_f = gen_fNEWLINE traj_data = np.load(expert_path.pop(), allow_pickle=True)NEWLINE traj_data = traj_data[()]NEWLINE while len(expert_path) and (traj_limitation < 0 or len(traj_data['obs']) < traj_limitation):NEWLINE next_f = expert_path.pop()NEWLINE try:NEWLINE next_data = np.load(next_f, allow_pickle=True)NEWLINE next_data = next_data[()]NEWLINE for key in traj_data:NEWLINE traj_data[key].extend(next_data[key])NEWLINE except Exception as e:NEWLINE logger.log('Could not load pickled dataset from {0}'.format(next_f))NEWLINE if traj_limitation < 0:NEWLINE traj_limitation = len(traj_data['obs'])NEWLINE obs = np.array(traj_data['obs'][:traj_limitation])NEWLINE acs = np.array(traj_data['acs'][:traj_limitation])NEWLINENEWLINE # obs, acs: shape (N, L, ) + S where N = # episodes, L = episode lengthNEWLINE # and S is the environment observation/action space.NEWLINE # Flatten to (N * L, prod(S))NEWLINE if len(obs.shape) > 2:NEWLINE self.obs = np.reshape(obs, [-1, np.prod(obs.shape[2:])])NEWLINE self.acs = np.reshape(acs, [-1, np.prod(acs.shape[2:])])NEWLINE else:NEWLINE self.obs = np.vstack(obs)NEWLINE self.acs = np.vstack(acs)NEWLINENEWLINE self.rets = traj_data['ep_rets'][:traj_limitation]NEWLINE self.avg_ret = np.sum([np.sum(r) for r in self.rets])/len(self.rets)NEWLINE self.std_ret = np.std(np.array([np.sum(r) for r in self.rets]))NEWLINE if len(self.acs) > 2:NEWLINE self.acs = np.squeeze(self.acs)NEWLINE assert len(self.obs) == len(self.acs)NEWLINE self.num_traj = min(traj_limitation, len(traj_data['obs']))NEWLINE self.num_transition = len(self.obs)NEWLINE self.randomize = randomizeNEWLINE self.dset = Dset(self.obs, self.acs, self.randomize)NEWLINE # for behavior cloningNEWLINE self.train_set = Dset(self.obs[:int(self.num_transition*train_fraction), :],NEWLINE self.acs[:int(self.num_transition*train_fraction), :],NEWLINE self.randomize)NEWLINE self.val_set = Dset(self.obs[int(self.num_transition*train_fraction):, :],NEWLINE self.acs[int(self.num_transition*train_fraction):, :],NEWLINE self.randomize)NEWLINE self.log_info()NEWLINENEWLINE def log_info(self):NEWLINE logger.log("Total trajectories: %d" % self.num_traj)NEWLINE logger.log("Total transitions: %d" % self.num_transition)NEWLINE logger.log("Average returns: %f" % self.avg_ret)NEWLINE logger.log("Std for returns: %f" % self.std_ret)NEWLINENEWLINE def get_next_batch(self, batch_size, split=None):NEWLINE if split is None:NEWLINE return self.dset.get_next_batch(batch_size)NEWLINE elif split == 'train':NEWLINE return self.train_set.get_next_batch(batch_size)NEWLINE elif split == 'val':NEWLINE return self.val_set.get_next_batch(batch_size)NEWLINE else:NEWLINE raise NotImplementedErrorNEWLINENEWLINE def plot(self):NEWLINE import matplotlib.pyplot as pltNEWLINE plt.hist(self.rets)NEWLINE plt.savefig("histogram_rets.png")NEWLINE plt.close()NEWLINENEWLINENEWLINEdef test(expert_path, traj_limitation, plot):NEWLINE dset = Mujoco_Dset(expert_path, traj_limitation=traj_limitation)NEWLINE if plot:NEWLINE dset.plot()NEWLINENEWLINEif __name__ == '__main__':NEWLINE import argparseNEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument("--expert_path", type=str, default="../data/deterministic.trpo.Hopper.0.00.npz")NEWLINE parser.add_argument("--traj_limitation", type=int, default=None)NEWLINE parser.add_argument("--plot", type=bool, default=False)NEWLINE args = parser.parse_args()NEWLINE test(args.expert_path, args.traj_limitation, args.plot)NEWLINE
"""numpy.distutils.fcompilerNEWLINENEWLINEContains FCompiler, an abstract base class that defines the interfaceNEWLINEfor the numpy.distutils Fortran compiler abstraction model.NEWLINENEWLINETerminology:NEWLINENEWLINETo be consistent, where the term 'executable' is used, it means the singleNEWLINEfile, like 'gcc', that is executed, and should be a string. In contrast,NEWLINE'command' means the entire command line, like ['gcc', '-c', 'file.c'], andNEWLINEshould be a list.NEWLINENEWLINEBut note that FCompiler.executables is actually a dictionary of commands.NEWLINENEWLINE"""NEWLINEfrom __future__ import division, absolute_import, print_functionNEWLINENEWLINE__all__ = ['FCompiler', 'new_fcompiler', 'show_fcompilers',NEWLINE 'dummy_fortran_file']NEWLINENEWLINEimport osNEWLINEimport sysNEWLINEimport reNEWLINEimport typesNEWLINEtry:NEWLINE setNEWLINEexcept NameError:NEWLINE from sets import Set as setNEWLINENEWLINEfrom numpy.compat import open_latin1NEWLINENEWLINEfrom distutils.sysconfig import get_python_libNEWLINEfrom distutils.fancy_getopt import FancyGetoptNEWLINEfrom distutils.errors import DistutilsModuleError, \NEWLINE DistutilsExecError, CompileError, LinkError, DistutilsPlatformErrorNEWLINEfrom distutils.util import split_quoted, strtoboolNEWLINENEWLINEfrom numpy.distutils.ccompiler import CCompiler, gen_lib_optionsNEWLINEfrom numpy.distutils import logNEWLINEfrom numpy.distutils.misc_util import is_string, all_strings, is_sequence, \NEWLINE make_temp_file, get_shared_lib_extensionNEWLINEfrom numpy.distutils.environment import EnvironmentConfigNEWLINEfrom numpy.distutils.exec_command import find_executableNEWLINEfrom numpy.distutils.compat import get_exceptionNEWLINENEWLINE__metaclass__ = typeNEWLINENEWLINEclass CompilerNotFound(Exception):NEWLINE passNEWLINENEWLINEdef flaglist(s):NEWLINE if is_string(s):NEWLINE return split_quoted(s)NEWLINE else:NEWLINE return sNEWLINENEWLINEdef str2bool(s):NEWLINE if is_string(s):NEWLINE return strtobool(s)NEWLINE return bool(s)NEWLINENEWLINEdef is_sequence_of_strings(seq):NEWLINE return is_sequence(seq) and all_strings(seq)NEWLINENEWLINEclass FCompiler(CCompiler):NEWLINE """Abstract base class to define the interface that must be implementedNEWLINE by real Fortran compiler classes.NEWLINENEWLINE Methods that subclasses may redefine:NEWLINENEWLINE update_executables(), find_executables(), get_version()NEWLINE get_flags(), get_flags_opt(), get_flags_arch(), get_flags_debug()NEWLINE get_flags_f77(), get_flags_opt_f77(), get_flags_arch_f77(),NEWLINE get_flags_debug_f77(), get_flags_f90(), get_flags_opt_f90(),NEWLINE get_flags_arch_f90(), get_flags_debug_f90(),NEWLINE get_flags_fix(), get_flags_linker_so()NEWLINENEWLINE DON'T call these methods (except get_version) afterNEWLINE constructing a compiler instance or inside any other method.NEWLINE All methods, except update_executables() and find_executables(),NEWLINE may call the get_version() method.NEWLINENEWLINE After constructing a compiler instance, always call customize(dist=None)NEWLINE method that finalizes compiler construction and makes the followingNEWLINE attributes available:NEWLINE compiler_f77NEWLINE compiler_f90NEWLINE compiler_fixNEWLINE linker_soNEWLINE archiverNEWLINE ranlibNEWLINE librariesNEWLINE library_dirsNEWLINE """NEWLINENEWLINE # These are the environment variables and distutils keys used.NEWLINE # Each configuration descripition isNEWLINE # (<hook name>, <environment variable>, <key in distutils.cfg>, <convert>)NEWLINE # The hook names are handled by the self._environment_hook method.NEWLINE # - names starting with 'self.' call methods in this classNEWLINE # - names starting with 'exe.' return the key in the executables dictNEWLINE # - names like 'flags.YYY' return self.get_flag_YYY()NEWLINE # convert is either None or a function to convert a string to theNEWLINE # appropiate type used.NEWLINENEWLINE distutils_vars = EnvironmentConfig(NEWLINE distutils_section='config_fc',NEWLINE noopt = (None, None, 'noopt', str2bool),NEWLINE noarch = (None, None, 'noarch', str2bool),NEWLINE debug = (None, None, 'debug', str2bool),NEWLINE verbose = (None, None, 'verbose', str2bool),NEWLINE )NEWLINENEWLINE command_vars = EnvironmentConfig(NEWLINE distutils_section='config_fc',NEWLINE compiler_f77 = ('exe.compiler_f77', 'F77', 'f77exec', None),NEWLINE compiler_f90 = ('exe.compiler_f90', 'F90', 'f90exec', None),NEWLINE compiler_fix = ('exe.compiler_fix', 'F90', 'f90exec', None),NEWLINE version_cmd = ('exe.version_cmd', None, None, None),NEWLINE linker_so = ('exe.linker_so', 'LDSHARED', 'ldshared', None),NEWLINE linker_exe = ('exe.linker_exe', 'LD', 'ld', None),NEWLINE archiver = (None, 'AR', 'ar', None),NEWLINE ranlib = (None, 'RANLIB', 'ranlib', None),NEWLINE )NEWLINENEWLINE flag_vars = EnvironmentConfig(NEWLINE distutils_section='config_fc',NEWLINE f77 = ('flags.f77', 'F77FLAGS', 'f77flags', flaglist),NEWLINE f90 = ('flags.f90', 'F90FLAGS', 'f90flags', flaglist),NEWLINE free = ('flags.free', 'FREEFLAGS', 'freeflags', flaglist),NEWLINE fix = ('flags.fix', None, None, flaglist),NEWLINE opt = ('flags.opt', 'FOPT', 'opt', flaglist),NEWLINE opt_f77 = ('flags.opt_f77', None, None, flaglist),NEWLINE opt_f90 = ('flags.opt_f90', None, None, flaglist),NEWLINE arch = ('flags.arch', 'FARCH', 'arch', flaglist),NEWLINE arch_f77 = ('flags.arch_f77', None, None, flaglist),NEWLINE arch_f90 = ('flags.arch_f90', None, None, flaglist),NEWLINE debug = ('flags.debug', 'FDEBUG', 'fdebug', flaglist),NEWLINE debug_f77 = ('flags.debug_f77', None, None, flaglist),NEWLINE debug_f90 = ('flags.debug_f90', None, None, flaglist),NEWLINE flags = ('self.get_flags', 'FFLAGS', 'fflags', flaglist),NEWLINE linker_so = ('flags.linker_so', 'LDFLAGS', 'ldflags', flaglist),NEWLINE linker_exe = ('flags.linker_exe', 'LDFLAGS', 'ldflags', flaglist),NEWLINE ar = ('flags.ar', 'ARFLAGS', 'arflags', flaglist),NEWLINE )NEWLINENEWLINE language_map = {'.f': 'f77',NEWLINE '.for': 'f77',NEWLINE '.F': 'f77', # XXX: needs preprocessorNEWLINE '.ftn': 'f77',NEWLINE '.f77': 'f77',NEWLINE '.f90': 'f90',NEWLINE '.F90': 'f90', # XXX: needs preprocessorNEWLINE '.f95': 'f90',NEWLINE }NEWLINE language_order = ['f90', 'f77']NEWLINENEWLINENEWLINE # These will be set by the subclassNEWLINENEWLINE compiler_type = NoneNEWLINE compiler_aliases = ()NEWLINE version_pattern = NoneNEWLINENEWLINE possible_executables = []NEWLINE executables = {NEWLINE 'version_cmd': ["f77", "-v"],NEWLINE 'compiler_f77': ["f77"],NEWLINE 'compiler_f90': ["f90"],NEWLINE 'compiler_fix': ["f90", "-fixed"],NEWLINE 'linker_so': ["f90", "-shared"],NEWLINE 'linker_exe': ["f90"],NEWLINE 'archiver': ["ar", "-cr"],NEWLINE 'ranlib': None,NEWLINE }NEWLINENEWLINE # If compiler does not support compiling Fortran 90 then it canNEWLINE # suggest using another compiler. For example, gnu would suggestNEWLINE # gnu95 compiler type when there are F90 sources.NEWLINE suggested_f90_compiler = NoneNEWLINENEWLINE compile_switch = "-c"NEWLINE object_switch = "-o " # Ending space matters! It will be strippedNEWLINE # but if it is missing then object_switchNEWLINE # will be prefixed to object file name byNEWLINE # string concatenation.NEWLINE library_switch = "-o " # Ditto!NEWLINENEWLINE # Switch to specify where module files are created and searchedNEWLINE # for USE statement. Normally it is a string and also here endingNEWLINE # space matters. See above.NEWLINE module_dir_switch = NoneNEWLINENEWLINE # Switch to specify where module files are searched for USE statement.NEWLINE module_include_switch = '-I'NEWLINENEWLINE pic_flags = [] # Flags to create position-independent codeNEWLINENEWLINE src_extensions = ['.for', '.ftn', '.f77', '.f', '.f90', '.f95', '.F', '.F90']NEWLINE obj_extension = ".o"NEWLINENEWLINE shared_lib_extension = get_shared_lib_extension()NEWLINE static_lib_extension = ".a" # or .libNEWLINE static_lib_format = "lib%s%s" # or %s%sNEWLINE shared_lib_format = "%s%s"NEWLINE exe_extension = ""NEWLINENEWLINE _exe_cache = {}NEWLINENEWLINE _executable_keys = ['version_cmd', 'compiler_f77', 'compiler_f90',NEWLINE 'compiler_fix', 'linker_so', 'linker_exe', 'archiver',NEWLINE 'ranlib']NEWLINENEWLINE # This will be set by new_fcompiler when called inNEWLINE # command/{build_ext.py, build_clib.py, config.py} files.NEWLINE c_compiler = NoneNEWLINENEWLINE # extra_{f77,f90}_compile_args are set by build_ext.build_extension methodNEWLINE extra_f77_compile_args = []NEWLINE extra_f90_compile_args = []NEWLINENEWLINE def __init__(self, *args, **kw):NEWLINE CCompiler.__init__(self, *args, **kw)NEWLINE self.distutils_vars = self.distutils_vars.clone(self._environment_hook)NEWLINE self.command_vars = self.command_vars.clone(self._environment_hook)NEWLINE self.flag_vars = self.flag_vars.clone(self._environment_hook)NEWLINE self.executables = self.executables.copy()NEWLINE for e in self._executable_keys:NEWLINE if e not in self.executables:NEWLINE self.executables[e] = NoneNEWLINENEWLINE # Some methods depend on .customize() being called first, soNEWLINE # this keeps track of whether that's happened yet.NEWLINE self._is_customised = FalseNEWLINENEWLINE def __copy__(self):NEWLINE obj = self.__new__(self.__class__)NEWLINE obj.__dict__.update(self.__dict__)NEWLINE obj.distutils_vars = obj.distutils_vars.clone(obj._environment_hook)NEWLINE obj.command_vars = obj.command_vars.clone(obj._environment_hook)NEWLINE obj.flag_vars = obj.flag_vars.clone(obj._environment_hook)NEWLINE obj.executables = obj.executables.copy()NEWLINE return objNEWLINENEWLINE def copy(self):NEWLINE return self.__copy__()NEWLINENEWLINE # Use properties for the attributes used by CCompiler. Setting themNEWLINE # as attributes from the self.executables dictionary is error-prone,NEWLINE # so we get them from there each time.NEWLINE def _command_property(key):NEWLINE def fget(self):NEWLINE assert self._is_customisedNEWLINE return self.executables[key]NEWLINE return property(fget=fget)NEWLINE version_cmd = _command_property('version_cmd')NEWLINE compiler_f77 = _command_property('compiler_f77')NEWLINE compiler_f90 = _command_property('compiler_f90')NEWLINE compiler_fix = _command_property('compiler_fix')NEWLINE linker_so = _command_property('linker_so')NEWLINE linker_exe = _command_property('linker_exe')NEWLINE archiver = _command_property('archiver')NEWLINE ranlib = _command_property('ranlib')NEWLINENEWLINE # Make our terminology consistent.NEWLINE def set_executable(self, key, value):NEWLINE self.set_command(key, value)NEWLINENEWLINE def set_commands(self, **kw):NEWLINE for k, v in kw.items():NEWLINE self.set_command(k, v)NEWLINENEWLINE def set_command(self, key, value):NEWLINE if not key in self._executable_keys:NEWLINE raise ValueError(NEWLINE "unknown executable '%s' for class %s" %NEWLINE (key, self.__class__.__name__))NEWLINE if is_string(value):NEWLINE value = split_quoted(value)NEWLINE assert value is None or is_sequence_of_strings(value[1:]), (key, value)NEWLINE self.executables[key] = valueNEWLINENEWLINE ######################################################################NEWLINE ## Methods that subclasses may redefine. But don't call these methods!NEWLINE ## They are private to FCompiler class and may return unexpectedNEWLINE ## results if used elsewhere. So, you have been warned..NEWLINENEWLINE def find_executables(self):NEWLINE """Go through the self.executables dictionary, and attempt toNEWLINE find and assign appropiate executables.NEWLINENEWLINE Executable names are looked for in the environment (environmentNEWLINE variables, the distutils.cfg, and command line), the 0th-element ofNEWLINE the command list, and the self.possible_executables list.NEWLINENEWLINE Also, if the 0th element is "<F77>" or "<F90>", the Fortran 77NEWLINE or the Fortran 90 compiler executable is used, unless overriddenNEWLINE by an environment setting.NEWLINENEWLINE Subclasses should call this if overriden.NEWLINE """NEWLINE assert self._is_customisedNEWLINE exe_cache = self._exe_cacheNEWLINE def cached_find_executable(exe):NEWLINE if exe in exe_cache:NEWLINE return exe_cache[exe]NEWLINE fc_exe = find_executable(exe)NEWLINE exe_cache[exe] = exe_cache[fc_exe] = fc_exeNEWLINE return fc_exeNEWLINE def verify_command_form(name, value):NEWLINE if value is not None and not is_sequence_of_strings(value):NEWLINE raise ValueError(NEWLINE "%s value %r is invalid in class %s" %NEWLINE (name, value, self.__class__.__name__))NEWLINE def set_exe(exe_key, f77=None, f90=None):NEWLINE cmd = self.executables.get(exe_key, None)NEWLINE if not cmd:NEWLINE return NoneNEWLINE # Note that we get cmd[0] here if the environment doesn'tNEWLINE # have anything setNEWLINE exe_from_environ = getattr(self.command_vars, exe_key)NEWLINE if not exe_from_environ:NEWLINE possibles = [f90, f77] + self.possible_executablesNEWLINE else:NEWLINE possibles = [exe_from_environ] + self.possible_executablesNEWLINENEWLINE seen = set()NEWLINE unique_possibles = []NEWLINE for e in possibles:NEWLINE if e == '<F77>':NEWLINE e = f77NEWLINE elif e == '<F90>':NEWLINE e = f90NEWLINE if not e or e in seen:NEWLINE continueNEWLINE seen.add(e)NEWLINE unique_possibles.append(e)NEWLINENEWLINE for exe in unique_possibles:NEWLINE fc_exe = cached_find_executable(exe)NEWLINE if fc_exe:NEWLINE cmd[0] = fc_exeNEWLINE return fc_exeNEWLINE self.set_command(exe_key, None)NEWLINE return NoneNEWLINENEWLINE ctype = self.compiler_typeNEWLINE f90 = set_exe('compiler_f90')NEWLINE if not f90:NEWLINE f77 = set_exe('compiler_f77')NEWLINE if f77:NEWLINE log.warn('%s: no Fortran 90 compiler found' % ctype)NEWLINE else:NEWLINE raise CompilerNotFound('%s: f90 nor f77' % ctype)NEWLINE else:NEWLINE f77 = set_exe('compiler_f77', f90=f90)NEWLINE if not f77:NEWLINE log.warn('%s: no Fortran 77 compiler found' % ctype)NEWLINE set_exe('compiler_fix', f90=f90)NEWLINENEWLINE set_exe('linker_so', f77=f77, f90=f90)NEWLINE set_exe('linker_exe', f77=f77, f90=f90)NEWLINE set_exe('version_cmd', f77=f77, f90=f90)NEWLINE set_exe('archiver')NEWLINE set_exe('ranlib')NEWLINENEWLINE def update_executables(elf):NEWLINE """Called at the beginning of customisation. Subclasses shouldNEWLINE override this if they need to set up the executables dictionary.NEWLINENEWLINE Note that self.find_executables() is run afterwards, so theNEWLINE self.executables dictionary values can contain <F77> or <F90> asNEWLINE the command, which will be replaced by the found F77 or F90NEWLINE compiler.NEWLINE """NEWLINE passNEWLINENEWLINE def get_flags(self):NEWLINE """List of flags common to all compiler types."""NEWLINE return [] + self.pic_flagsNEWLINENEWLINE def _get_command_flags(self, key):NEWLINE cmd = self.executables.get(key, None)NEWLINE if cmd is None:NEWLINE return []NEWLINE return cmd[1:]NEWLINENEWLINE def get_flags_f77(self):NEWLINE """List of Fortran 77 specific flags."""NEWLINE return self._get_command_flags('compiler_f77')NEWLINE def get_flags_f90(self):NEWLINE """List of Fortran 90 specific flags."""NEWLINE return self._get_command_flags('compiler_f90')NEWLINE def get_flags_free(self):NEWLINE """List of Fortran 90 free format specific flags."""NEWLINE return []NEWLINE def get_flags_fix(self):NEWLINE """List of Fortran 90 fixed format specific flags."""NEWLINE return self._get_command_flags('compiler_fix')NEWLINE def get_flags_linker_so(self):NEWLINE """List of linker flags to build a shared library."""NEWLINE return self._get_command_flags('linker_so')NEWLINE def get_flags_linker_exe(self):NEWLINE """List of linker flags to build an executable."""NEWLINE return self._get_command_flags('linker_exe')NEWLINE def get_flags_ar(self):NEWLINE """List of archiver flags. """NEWLINE return self._get_command_flags('archiver')NEWLINE def get_flags_opt(self):NEWLINE """List of architecture independent compiler flags."""NEWLINE return []NEWLINE def get_flags_arch(self):NEWLINE """List of architecture dependent compiler flags."""NEWLINE return []NEWLINE def get_flags_debug(self):NEWLINE """List of compiler flags to compile with debugging information."""NEWLINE return []NEWLINENEWLINE get_flags_opt_f77 = get_flags_opt_f90 = get_flags_optNEWLINE get_flags_arch_f77 = get_flags_arch_f90 = get_flags_archNEWLINE get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debugNEWLINENEWLINE def get_libraries(self):NEWLINE """List of compiler libraries."""NEWLINE return self.libraries[:]NEWLINE def get_library_dirs(self):NEWLINE """List of compiler library directories."""NEWLINE return self.library_dirs[:]NEWLINENEWLINE def get_version(self, force=False, ok_status=[0]):NEWLINE assert self._is_customisedNEWLINE version = CCompiler.get_version(self, force=force, ok_status=ok_status)NEWLINE if version is None:NEWLINE raise CompilerNotFound()NEWLINE return versionNEWLINENEWLINE ############################################################NEWLINENEWLINE ## Public methods:NEWLINENEWLINE def customize(self, dist = None):NEWLINE """Customize Fortran compiler.NEWLINENEWLINE This method gets Fortran compiler specific information fromNEWLINE (i) class definition, (ii) environment, (iii) distutils configNEWLINE files, and (iv) command line (later overrides earlier).NEWLINENEWLINE This method should be always called after constructing aNEWLINE compiler instance. But not in __init__ because DistributionNEWLINE instance is needed for (iii) and (iv).NEWLINE """NEWLINE log.info('customize %s' % (self.__class__.__name__))NEWLINENEWLINE self._is_customised = TrueNEWLINENEWLINE self.distutils_vars.use_distribution(dist)NEWLINE self.command_vars.use_distribution(dist)NEWLINE self.flag_vars.use_distribution(dist)NEWLINENEWLINE self.update_executables()NEWLINENEWLINE # find_executables takes care of setting the compiler commands,NEWLINE # version_cmd, linker_so, linker_exe, ar, and ranlibNEWLINE self.find_executables()NEWLINENEWLINE noopt = self.distutils_vars.get('noopt', False)NEWLINE noarch = self.distutils_vars.get('noarch', noopt)NEWLINE debug = self.distutils_vars.get('debug', False)NEWLINENEWLINE f77 = self.command_vars.compiler_f77NEWLINE f90 = self.command_vars.compiler_f90NEWLINENEWLINE f77flags = []NEWLINE f90flags = []NEWLINE freeflags = []NEWLINE fixflags = []NEWLINENEWLINE if f77:NEWLINE f77flags = self.flag_vars.f77NEWLINE if f90:NEWLINE f90flags = self.flag_vars.f90NEWLINE freeflags = self.flag_vars.freeNEWLINE # XXX Assuming that free format is default for f90 compiler.NEWLINE fix = self.command_vars.compiler_fixNEWLINE if fix:NEWLINE fixflags = self.flag_vars.fix + f90flagsNEWLINENEWLINE oflags, aflags, dflags = [], [], []NEWLINE # examine get_flags_<tag>_<compiler> for extra flagsNEWLINE # only add them if the method is different from get_flags_<tag>NEWLINE def get_flags(tag, flags):NEWLINE # note that self.flag_vars.<tag> calls self.get_flags_<tag>()NEWLINE flags.extend(getattr(self.flag_vars, tag))NEWLINE this_get = getattr(self, 'get_flags_' + tag)NEWLINE for name, c, flagvar in [('f77', f77, f77flags),NEWLINE ('f90', f90, f90flags),NEWLINE ('f90', fix, fixflags)]:NEWLINE t = '%s_%s' % (tag, name)NEWLINE if c and this_get is not getattr(self, 'get_flags_' + t):NEWLINE flagvar.extend(getattr(self.flag_vars, t))NEWLINE if not noopt:NEWLINE get_flags('opt', oflags)NEWLINE if not noarch:NEWLINE get_flags('arch', aflags)NEWLINE if debug:NEWLINE get_flags('debug', dflags)NEWLINENEWLINE fflags = self.flag_vars.flags + dflags + oflags + aflagsNEWLINENEWLINE if f77:NEWLINE self.set_commands(compiler_f77=[f77]+f77flags+fflags)NEWLINE if f90:NEWLINE self.set_commands(compiler_f90=[f90]+freeflags+f90flags+fflags)NEWLINE if fix:NEWLINE self.set_commands(compiler_fix=[fix]+fixflags+fflags)NEWLINENEWLINENEWLINE #XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGSNEWLINE linker_so = self.linker_soNEWLINE if linker_so:NEWLINE linker_so_flags = self.flag_vars.linker_soNEWLINE if sys.platform.startswith('aix'):NEWLINE python_lib = get_python_lib(standard_lib=1)NEWLINE ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')NEWLINE python_exp = os.path.join(python_lib, 'config', 'python.exp')NEWLINE linker_so = [ld_so_aix] + linker_so + ['-bI:'+python_exp]NEWLINE self.set_commands(linker_so=linker_so+linker_so_flags)NEWLINENEWLINE linker_exe = self.linker_exeNEWLINE if linker_exe:NEWLINE linker_exe_flags = self.flag_vars.linker_exeNEWLINE self.set_commands(linker_exe=linker_exe+linker_exe_flags)NEWLINENEWLINE ar = self.command_vars.archiverNEWLINE if ar:NEWLINE arflags = self.flag_vars.arNEWLINE self.set_commands(archiver=[ar]+arflags)NEWLINENEWLINE self.set_library_dirs(self.get_library_dirs())NEWLINE self.set_libraries(self.get_libraries())NEWLINENEWLINE def dump_properties(self):NEWLINE """Print out the attributes of a compiler instance."""NEWLINE props = []NEWLINE for key in list(self.executables.keys()) + \NEWLINE ['version', 'libraries', 'library_dirs',NEWLINE 'object_switch', 'compile_switch']:NEWLINE if hasattr(self, key):NEWLINE v = getattr(self, key)NEWLINE props.append((key, None, '= '+repr(v)))NEWLINE props.sort()NEWLINENEWLINE pretty_printer = FancyGetopt(props)NEWLINE for l in pretty_printer.generate_help("%s instance properties:" \NEWLINE % (self.__class__.__name__)):NEWLINE if l[:4]==' --':NEWLINE l = ' ' + l[4:]NEWLINE print(l)NEWLINENEWLINE ###################NEWLINENEWLINE def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):NEWLINE """Compile 'src' to product 'obj'."""NEWLINE src_flags = {}NEWLINE if is_f_file(src) and not has_f90_header(src):NEWLINE flavor = ':f77'NEWLINE compiler = self.compiler_f77NEWLINE src_flags = get_f77flags(src)NEWLINE extra_compile_args = self.extra_f77_compile_args or []NEWLINE elif is_free_format(src):NEWLINE flavor = ':f90'NEWLINE compiler = self.compiler_f90NEWLINE if compiler is None:NEWLINE raise DistutilsExecError('f90 not supported by %s needed for %s'\NEWLINE % (self.__class__.__name__, src))NEWLINE extra_compile_args = self.extra_f90_compile_args or []NEWLINE else:NEWLINE flavor = ':fix'NEWLINE compiler = self.compiler_fixNEWLINE if compiler is None:NEWLINE raise DistutilsExecError('f90 (fixed) not supported by %s needed for %s'\NEWLINE % (self.__class__.__name__, src))NEWLINE extra_compile_args = self.extra_f90_compile_args or []NEWLINE if self.object_switch[-1]==' ':NEWLINE o_args = [self.object_switch.strip(), obj]NEWLINE else:NEWLINE o_args = [self.object_switch.strip()+obj]NEWLINENEWLINE assert self.compile_switch.strip()NEWLINE s_args = [self.compile_switch, src]NEWLINENEWLINE if extra_compile_args:NEWLINE log.info('extra %s options: %r' \NEWLINE % (flavor[1:], ' '.join(extra_compile_args)))NEWLINENEWLINE extra_flags = src_flags.get(self.compiler_type, [])NEWLINE if extra_flags:NEWLINE log.info('using compile options from source: %r' \NEWLINE % ' '.join(extra_flags))NEWLINENEWLINE command = compiler + cc_args + extra_flags + s_args + o_args \NEWLINE + extra_postargs + extra_compile_argsNEWLINENEWLINE display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,NEWLINE src)NEWLINE try:NEWLINE self.spawn(command, display=display)NEWLINE except DistutilsExecError:NEWLINE msg = str(get_exception())NEWLINE raise CompileError(msg)NEWLINENEWLINE def module_options(self, module_dirs, module_build_dir):NEWLINE options = []NEWLINE if self.module_dir_switch is not None:NEWLINE if self.module_dir_switch[-1]==' ':NEWLINE options.extend([self.module_dir_switch.strip(), module_build_dir])NEWLINE else:NEWLINE options.append(self.module_dir_switch.strip()+module_build_dir)NEWLINE else:NEWLINE print('XXX: module_build_dir=%r option ignored' % (module_build_dir))NEWLINE print('XXX: Fix module_dir_switch for ', self.__class__.__name__)NEWLINE if self.module_include_switch is not None:NEWLINE for d in [module_build_dir]+module_dirs:NEWLINE options.append('%s%s' % (self.module_include_switch, d))NEWLINE else:NEWLINE print('XXX: module_dirs=%r option ignored' % (module_dirs))NEWLINE print('XXX: Fix module_include_switch for ', self.__class__.__name__)NEWLINE return optionsNEWLINENEWLINE def library_option(self, lib):NEWLINE return "-l" + libNEWLINE def library_dir_option(self, dir):NEWLINE return "-L" + dirNEWLINENEWLINE def link(self, target_desc, objects,NEWLINE output_filename, output_dir=None, libraries=None,NEWLINE library_dirs=None, runtime_library_dirs=None,NEWLINE export_symbols=None, debug=0, extra_preargs=None,NEWLINE extra_postargs=None, build_temp=None, target_lang=None):NEWLINE objects, output_dir = self._fix_object_args(objects, output_dir)NEWLINE libraries, library_dirs, runtime_library_dirs = \NEWLINE self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)NEWLINENEWLINE lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,NEWLINE libraries)NEWLINE if is_string(output_dir):NEWLINE output_filename = os.path.join(output_dir, output_filename)NEWLINE elif output_dir is not None:NEWLINE raise TypeError("'output_dir' must be a string or None")NEWLINENEWLINE if self._need_link(objects, output_filename):NEWLINE if self.library_switch[-1]==' ':NEWLINE o_args = [self.library_switch.strip(), output_filename]NEWLINE else:NEWLINE o_args = [self.library_switch.strip()+output_filename]NEWLINENEWLINE if is_string(self.objects):NEWLINE ld_args = objects + [self.objects]NEWLINE else:NEWLINE ld_args = objects + self.objectsNEWLINE ld_args = ld_args + lib_opts + o_argsNEWLINE if debug:NEWLINE ld_args[:0] = ['-g']NEWLINE if extra_preargs:NEWLINE ld_args[:0] = extra_preargsNEWLINE if extra_postargs:NEWLINE ld_args.extend(extra_postargs)NEWLINE self.mkpath(os.path.dirname(output_filename))NEWLINE if target_desc == CCompiler.EXECUTABLE:NEWLINE linker = self.linker_exe[:]NEWLINE else:NEWLINE linker = self.linker_so[:]NEWLINE command = linker + ld_argsNEWLINE try:NEWLINE self.spawn(command)NEWLINE except DistutilsExecError:NEWLINE msg = str(get_exception())NEWLINE raise LinkError(msg)NEWLINE else:NEWLINE log.debug("skipping %s (up-to-date)", output_filename)NEWLINENEWLINE def _environment_hook(self, name, hook_name):NEWLINE if hook_name is None:NEWLINE return NoneNEWLINE if is_string(hook_name):NEWLINE if hook_name.startswith('self.'):NEWLINE hook_name = hook_name[5:]NEWLINE hook = getattr(self, hook_name)NEWLINE return hook()NEWLINE elif hook_name.startswith('exe.'):NEWLINE hook_name = hook_name[4:]NEWLINE var = self.executables[hook_name]NEWLINE if var:NEWLINE return var[0]NEWLINE else:NEWLINE return NoneNEWLINE elif hook_name.startswith('flags.'):NEWLINE hook_name = hook_name[6:]NEWLINE hook = getattr(self, 'get_flags_' + hook_name)NEWLINE return hook()NEWLINE else:NEWLINE return hook_name()NEWLINENEWLINE ## class FCompilerNEWLINENEWLINE_default_compilers = (NEWLINE # sys.platform mappingsNEWLINE ('win32', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95',NEWLINE 'intelvem', 'intelem')),NEWLINE ('cygwin.*', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95')),NEWLINE ('linux.*', ('gnu95', 'intel', 'lahey', 'pg', 'absoft', 'nag', 'vast', 'compaq',NEWLINE 'intele', 'intelem', 'gnu', 'g95', 'pathf95')),NEWLINE ('darwin.*', ('gnu95', 'nag', 'absoft', 'ibm', 'intel', 'gnu', 'g95', 'pg')),NEWLINE ('sunos.*', ('sun', 'gnu', 'gnu95', 'g95')),NEWLINE ('irix.*', ('mips', 'gnu', 'gnu95',)),NEWLINE ('aix.*', ('ibm', 'gnu', 'gnu95',)),NEWLINE # os.name mappingsNEWLINE ('posix', ('gnu', 'gnu95',)),NEWLINE ('nt', ('gnu', 'gnu95',)),NEWLINE ('mac', ('gnu95', 'gnu', 'pg')),NEWLINE )NEWLINENEWLINEfcompiler_class = NoneNEWLINEfcompiler_aliases = NoneNEWLINENEWLINEdef load_all_fcompiler_classes():NEWLINE """Cache all the FCompiler classes found in modules in theNEWLINE numpy.distutils.fcompiler package.NEWLINE """NEWLINE from glob import globNEWLINE global fcompiler_class, fcompiler_aliasesNEWLINE if fcompiler_class is not None:NEWLINE returnNEWLINE pys = os.path.join(os.path.dirname(__file__), '*.py')NEWLINE fcompiler_class = {}NEWLINE fcompiler_aliases = {}NEWLINE for fname in glob(pys):NEWLINE module_name, ext = os.path.splitext(os.path.basename(fname))NEWLINE module_name = 'numpy.distutils.fcompiler.' + module_nameNEWLINE __import__ (module_name)NEWLINE module = sys.modules[module_name]NEWLINE if hasattr(module, 'compilers'):NEWLINE for cname in module.compilers:NEWLINE klass = getattr(module, cname)NEWLINE desc = (klass.compiler_type, klass, klass.description)NEWLINE fcompiler_class[klass.compiler_type] = descNEWLINE for alias in klass.compiler_aliases:NEWLINE if alias in fcompiler_aliases:NEWLINE raise ValueError("alias %r defined for both %s and %s"NEWLINE % (alias, klass.__name__,NEWLINE fcompiler_aliases[alias][1].__name__))NEWLINE fcompiler_aliases[alias] = descNEWLINENEWLINEdef _find_existing_fcompiler(compiler_types,NEWLINE osname=None, platform=None,NEWLINE requiref90=False,NEWLINE c_compiler=None):NEWLINE from numpy.distutils.core import get_distributionNEWLINE dist = get_distribution(always=True)NEWLINE for compiler_type in compiler_types:NEWLINE v = NoneNEWLINE try:NEWLINE c = new_fcompiler(plat=platform, compiler=compiler_type,NEWLINE c_compiler=c_compiler)NEWLINE c.customize(dist)NEWLINE v = c.get_version()NEWLINE if requiref90 and c.compiler_f90 is None:NEWLINE v = NoneNEWLINE new_compiler = c.suggested_f90_compilerNEWLINE if new_compiler:NEWLINE log.warn('Trying %r compiler as suggested by %r 'NEWLINE 'compiler for f90 support.' % (compiler_type,NEWLINE new_compiler))NEWLINE c = new_fcompiler(plat=platform, compiler=new_compiler,NEWLINE c_compiler=c_compiler)NEWLINE c.customize(dist)NEWLINE v = c.get_version()NEWLINE if v is not None:NEWLINE compiler_type = new_compilerNEWLINE if requiref90 and c.compiler_f90 is None:NEWLINE raise ValueError('%s does not support compiling f90 codes, 'NEWLINE 'skipping.' % (c.__class__.__name__))NEWLINE except DistutilsModuleError:NEWLINE log.debug("_find_existing_fcompiler: compiler_type='%s' raised DistutilsModuleError", compiler_type)NEWLINE except CompilerNotFound:NEWLINE log.debug("_find_existing_fcompiler: compiler_type='%s' not found", compiler_type)NEWLINE if v is not None:NEWLINE return compiler_typeNEWLINE return NoneNEWLINENEWLINEdef available_fcompilers_for_platform(osname=None, platform=None):NEWLINE if osname is None:NEWLINE osname = os.nameNEWLINE if platform is None:NEWLINE platform = sys.platformNEWLINE matching_compiler_types = []NEWLINE for pattern, compiler_type in _default_compilers:NEWLINE if re.match(pattern, platform) or re.match(pattern, osname):NEWLINE for ct in compiler_type:NEWLINE if ct not in matching_compiler_types:NEWLINE matching_compiler_types.append(ct)NEWLINE if not matching_compiler_types:NEWLINE matching_compiler_types.append('gnu')NEWLINE return matching_compiler_typesNEWLINENEWLINEdef get_default_fcompiler(osname=None, platform=None, requiref90=False,NEWLINE c_compiler=None):NEWLINE """Determine the default Fortran compiler to use for the givenNEWLINE platform."""NEWLINE matching_compiler_types = available_fcompilers_for_platform(osname,NEWLINE platform)NEWLINE compiler_type = _find_existing_fcompiler(matching_compiler_types,NEWLINE osname=osname,NEWLINE platform=platform,NEWLINE requiref90=requiref90,NEWLINE c_compiler=c_compiler)NEWLINE return compiler_typeNEWLINENEWLINE# Flag to avoid rechecking for Fortran compiler every timeNEWLINEfailed_fcompiler = FalseNEWLINENEWLINEdef new_fcompiler(plat=None,NEWLINE compiler=None,NEWLINE verbose=0,NEWLINE dry_run=0,NEWLINE force=0,NEWLINE requiref90=False,NEWLINE c_compiler = None):NEWLINE """Generate an instance of some FCompiler subclass for the suppliedNEWLINE platform/compiler combination.NEWLINE """NEWLINE global failed_fcompilerNEWLINE if failed_fcompiler:NEWLINE return NoneNEWLINENEWLINE load_all_fcompiler_classes()NEWLINE if plat is None:NEWLINE plat = os.nameNEWLINE if compiler is None:NEWLINE compiler = get_default_fcompiler(plat, requiref90=requiref90,NEWLINE c_compiler=c_compiler)NEWLINE if compiler in fcompiler_class:NEWLINE module_name, klass, long_description = fcompiler_class[compiler]NEWLINE elif compiler in fcompiler_aliases:NEWLINE module_name, klass, long_description = fcompiler_aliases[compiler]NEWLINE else:NEWLINE msg = "don't know how to compile Fortran code on platform '%s'" % platNEWLINE if compiler is not None:NEWLINE msg = msg + " with '%s' compiler." % compilerNEWLINE msg = msg + " Supported compilers are: %s)" \NEWLINE % (','.join(fcompiler_class.keys()))NEWLINE log.warn(msg)NEWLINE failed_fcompiler = TrueNEWLINE return NoneNEWLINENEWLINE compiler = klass(verbose=verbose, dry_run=dry_run, force=force)NEWLINE compiler.c_compiler = c_compilerNEWLINE return compilerNEWLINENEWLINEdef show_fcompilers(dist=None):NEWLINE """Print list of available compilers (used by the "--help-fcompiler"NEWLINE option to "config_fc").NEWLINE """NEWLINE if dist is None:NEWLINE from distutils.dist import DistributionNEWLINE from numpy.distutils.command.config_compiler import config_fcNEWLINE dist = Distribution()NEWLINE dist.script_name = os.path.basename(sys.argv[0])NEWLINE dist.script_args = ['config_fc'] + sys.argv[1:]NEWLINE try:NEWLINE dist.script_args.remove('--help-fcompiler')NEWLINE except ValueError:NEWLINE passNEWLINE dist.cmdclass['config_fc'] = config_fcNEWLINE dist.parse_config_files()NEWLINE dist.parse_command_line()NEWLINE compilers = []NEWLINE compilers_na = []NEWLINE compilers_ni = []NEWLINE if not fcompiler_class:NEWLINE load_all_fcompiler_classes()NEWLINE platform_compilers = available_fcompilers_for_platform()NEWLINE for compiler in platform_compilers:NEWLINE v = NoneNEWLINE log.set_verbosity(-2)NEWLINE try:NEWLINE c = new_fcompiler(compiler=compiler, verbose=dist.verbose)NEWLINE c.customize(dist)NEWLINE v = c.get_version()NEWLINE except (DistutilsModuleError, CompilerNotFound):NEWLINE e = get_exception()NEWLINE log.debug("show_fcompilers: %s not found" % (compiler,))NEWLINE log.debug(repr(e))NEWLINENEWLINE if v is None:NEWLINE compilers_na.append(("fcompiler="+compiler, None,NEWLINE fcompiler_class[compiler][2]))NEWLINE else:NEWLINE c.dump_properties()NEWLINE compilers.append(("fcompiler="+compiler, None,NEWLINE fcompiler_class[compiler][2] + ' (%s)' % v))NEWLINENEWLINE compilers_ni = list(set(fcompiler_class.keys()) - set(platform_compilers))NEWLINE compilers_ni = [("fcompiler="+fc, None, fcompiler_class[fc][2])NEWLINE for fc in compilers_ni]NEWLINENEWLINE compilers.sort()NEWLINE compilers_na.sort()NEWLINE compilers_ni.sort()NEWLINE pretty_printer = FancyGetopt(compilers)NEWLINE pretty_printer.print_help("Fortran compilers found:")NEWLINE pretty_printer = FancyGetopt(compilers_na)NEWLINE pretty_printer.print_help("Compilers available for this "NEWLINE "platform, but not found:")NEWLINE if compilers_ni:NEWLINE pretty_printer = FancyGetopt(compilers_ni)NEWLINE pretty_printer.print_help("Compilers not available on this platform:")NEWLINE print("For compiler details, run 'config_fc --verbose' setup command.")NEWLINENEWLINENEWLINEdef dummy_fortran_file():NEWLINE fo, name = make_temp_file(suffix='.f')NEWLINE fo.write(" subroutine dummy()\n end\n")NEWLINE fo.close()NEWLINE return name[:-2]NEWLINENEWLINENEWLINEis_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z', re.I).matchNEWLINE_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-', re.I).searchNEWLINE_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-', re.I).searchNEWLINE_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-', re.I).searchNEWLINE_free_f90_start = re.compile(r'[^c*!]\s*[^\s\d\t]', re.I).matchNEWLINENEWLINEdef is_free_format(file):NEWLINE """Check if file is in free format Fortran."""NEWLINE # f90 allows both fixed and free format, assuming fixed unlessNEWLINE # signs of free format are detected.NEWLINE result = 0NEWLINE f = open_latin1(file, 'r')NEWLINE line = f.readline()NEWLINE n = 10000 # the number of non-comment lines to scan for hintsNEWLINE if _has_f_header(line):NEWLINE n = 0NEWLINE elif _has_f90_header(line):NEWLINE n = 0NEWLINE result = 1NEWLINE while n>0 and line:NEWLINE line = line.rstrip()NEWLINE if line and line[0]!='!':NEWLINE n -= 1NEWLINE if (line[0]!='\t' and _free_f90_start(line[:5])) or line[-1:]=='&':NEWLINE result = 1NEWLINE breakNEWLINE line = f.readline()NEWLINE f.close()NEWLINE return resultNEWLINENEWLINEdef has_f90_header(src):NEWLINE f = open_latin1(src, 'r')NEWLINE line = f.readline()NEWLINE f.close()NEWLINE return _has_f90_header(line) or _has_fix_header(line)NEWLINENEWLINE_f77flags_re = re.compile(r'(c|)f77flags\s*\(\s*(?P<fcname>\w+)\s*\)\s*=\s*(?P<fflags>.*)', re.I)NEWLINEdef get_f77flags(src):NEWLINE """NEWLINE Search the first 20 lines of fortran 77 code for line patternNEWLINE `CF77FLAGS(<fcompiler type>)=<f77 flags>`NEWLINE Return a dictionary {<fcompiler type>:<f77 flags>}.NEWLINE """NEWLINE flags = {}NEWLINE f = open_latin1(src, 'r')NEWLINE i = 0NEWLINE for line in f:NEWLINE i += 1NEWLINE if i>20: breakNEWLINE m = _f77flags_re.match(line)NEWLINE if not m: continueNEWLINE fcname = m.group('fcname').strip()NEWLINE fflags = m.group('fflags').strip()NEWLINE flags[fcname] = split_quoted(fflags)NEWLINE f.close()NEWLINE return flagsNEWLINENEWLINE# TODO: implement get_f90flags and use it in _compile similarly to get_f77flagsNEWLINENEWLINEif __name__ == '__main__':NEWLINE show_fcompilers()NEWLINE
formatter = "%r %r %r %r"NEWLINEprint (formatter % (1 ,2 ,3 , 4))NEWLINEprint (formatter % ("one","two","three","four"))NEWLINEprint (formatter % (True, False, False, True))NEWLINEprint (formatter % (formatter ,formatter, formatter, formatter))NEWLINEprint (formatter % NEWLINE ("I had this thing.",NEWLINE "That you could type up right.",NEWLINE "But it didn't sing.",NEWLINE "So I said goodnight.")NEWLINE )NEWLINE NEWLINE
# simple_xls.py: write simple Excel spreadsheetsNEWLINE# Copyright (C) University of Manchester 2013-2019 Peter BriggsNEWLINE#NEWLINE########################################################################NEWLINE#NEWLINE# simple_xls.pyNEWLINE#NEWLINE#########################################################################NEWLINENEWLINE"""NEWLINESimple spreadsheet module intended to provide a nicer programmatic interfaceNEWLINEto Excel spreadsheet generation.NEWLINENEWLINEIt is currently built on top of SpreadSheet.py, which itself uses the xlwt,NEWLINExlrd and xlutils modules. In future the relevant parts may be rewritten toNEWLINEremove the dependence on Spreadsheet.py and call the appropriate xl* classesNEWLINEand functions directly.NEWLINENEWLINEExample usageNEWLINE-------------NEWLINENEWLINEStart by making a workbook, represented by an XLSWorkBook object:NEWLINENEWLINE>>> wb = XLSWorkBook("Test")NEWLINENEWLINEThen add worksheets to this:NEWLINENEWLINE>>> wb.add_work_sheet('test')NEWLINE>>> wb.add_work_sheet('data',"My Data")NEWLINENEWLINEWorksheets have an id and an optional title. Ids must be unique and canNEWLINEbe used to fetch the XLSWorkSheet object that represent the worksheet:NEWLINENEWLINE>>> data = wb.worksheet['data']NEWLINENEWLINECells can be addressed directly using various notations:NEWLINENEWLINE>>> data['A1'] = "Column 1"NEWLINE>>> data['A']['1'] = "Updated value"NEWLINE>>> data['AZ']['3'] = "Another value"NEWLINENEWLINEThe extent of the sheet is defined by the outermost populated rows andNEWLINEcolumnsNEWLINENEWLINE>>> data.last_column # outermost populated columnNEWLINE>>> data.last_row # outermost populated rowNEWLINENEWLINEThere are various other methods for returning the next row or column; seeNEWLINEthe documentation for the XLSWorkSheet class.NEWLINENEWLINEData can be added cell-wise (i.e. referencing individual cells as above),NEWLINErow-wise, column-wise and block-wise.NEWLINENEWLINEColumn-wise operations include inserting a column (shifting columns aboveNEWLINEit along one to make space):NEWLINENEWLINE>>> data.insert_column('B',data=['hello','goodbye','whatev'])NEWLINENEWLINEAppend a column (writing data to the first empty column at the end ofNEWLINEthe sheet):NEWLINENEWLINE>>> data.append_column(data=['hello','goodbye','whatev'])NEWLINENEWLINEWrite data to a column, overwriting any existing values:NEWLINENEWLINE>>> data.write_column(data=['hello','goodbye','whatev'])NEWLINENEWLINEData can be specified as a list, text or as a single value which isNEWLINErepeated for each cell (i.e. a "fill" value).NEWLINENEWLINESimilar row-wise operations also exist:NEWLINENEWLINE>>> data.insert_row(4,data=['Dozy','Beaky','Mick','Titch'])NEWLINE>>> data.append_row(data=['Dozy','Beaky','Mick','Titch'])NEWLINE>>> data.write_row(4,data=['Dozy','Beaky','Mick','Titch'])NEWLINENEWLINEBlock-wise data can be added via a tab and newline-delimited string:NEWLINENEWLINE>>> data.insert_block_data("This\tis\t\tsome\n\trandom\n\tdata")NEWLINE>>> data.insert_block_data("This\tis\t\tsome\n\tMORE\trandom\n\tdata",NEWLINE... col='M',row=7)NEWLINENEWLINEFormulae can be specified by prefixing a '=' symbol to the start of theNEWLINEcell contents, e.g.:NEWLINENEWLINE>>> data['A3'] = '=A1+A2'NEWLINENEWLINE'?' and '#' are special characters that can be used to indicate 'currentNEWLINErow' and 'current column' respectively, e.g.:NEWLINENEWLINE>>> data.fill_column('A','=B?+C?') # evaluates to 'B1+C1' (A1), 'B2+C2' (A2) etcNEWLINENEWLINEStyling and formatting information can be associated with a cell, eitherNEWLINEwhen adding column, row or block data or by using the 'set_style' method.NEWLINEIn each case the styling information is passed via an XLSStyle object, e.g.NEWLINENEWLINE>>> data.set_style(XLSStyle(number_format=NumberFormats.PERCENTAGE),'A3')NEWLINENEWLINEThe workbook can be saved to file:NEWLINENEWLINE>>> wb.save_as_xls('test.xls')NEWLINENEWLINEAlternatively the contents of a sheet (or a subset) can be rendered as text:NEWLINENEWLINE>>> data.render_as_text(include_columns_and_rows=True,NEWLINE... eval_formulae=True,NEWLINE... include_styles=True)NEWLINE>>> data.render_as_text(start='B1',end='C6',include_columns_and_rows=True)NEWLINENEWLINE"""NEWLINENEWLINE#######################################################################NEWLINE# Import modules that this module depends onNEWLINE#######################################################################NEWLINENEWLINEfrom builtins import strNEWLINEimport reNEWLINEtry:NEWLINE from collections.abc import IteratorNEWLINEexcept ImportError:NEWLINE from collections import IteratorNEWLINEimport loggingNEWLINEimport xlsxwriterNEWLINEfrom . import SpreadsheetNEWLINEfrom .utils import OrderedDictionaryNEWLINEfrom builtins import rangeNEWLINENEWLINE#######################################################################NEWLINE# ConstantsNEWLINE#######################################################################NEWLINENEWLINE# Value to assign to failed evaluationsNEWLINEBAD_REF="## !REF ##"NEWLINENEWLINE# Number formatsNEWLINEclass NumberFormats(object):NEWLINE THOUSAND_SEPARATOR=0NEWLINE PERCENTAGE=1NEWLINENEWLINE# Spreadsheet limitsNEWLINEclass XLSLimits(object):NEWLINE """NEWLINE Limits for XLS filesNEWLINE """NEWLINE MAX_LEN_WORKSHEET_TITLE = 31 # Max worksheet title lengthNEWLINE MAX_LEN_WORKSHEET_CELL_VALUE = 250 # Maximum no. of characters in cellNEWLINE MAX_NUMBER_ROWS_PER_WORKSHEET = 65536 # Max number of rows per worksheetNEWLINE MAX_NUMBER_COLS_PER_WORKSHEET = 256 # Max nuber of columns per worksheetNEWLINENEWLINEclass XLSXLimits(object):NEWLINE """NEWLINE Limits for XLSX filesNEWLINE """NEWLINE MAX_LEN_WORKSHEET_TITLE = 31 # Max worksheet title lengthNEWLINE MAX_LEN_WORKSHEET_CELL_VALUE = 1024 # Maximum no. of characters in cellNEWLINE MAX_NUMBER_ROWS_PER_WORKSHEET = 1048576 # Max number of rows per worksheetNEWLINE MAX_NUMBER_COLS_PER_WORKSHEET = 16384 # Max nuber of columns per worksheetNEWLINENEWLINEclass Limits(XLSLimits):NEWLINE """NEWLINE Limits for XLS files (kept for backwards compatibility)NEWLINE """NEWLINENEWLINE#######################################################################NEWLINE# Class definitionsNEWLINE#######################################################################NEWLINENEWLINEclass XLSWorkBook(object):NEWLINE """Class for creating an Excel (xls) spreadsheetNEWLINENEWLINE An XLSWorkBook instance provides an interface to creating anNEWLINE Excel spreadsheet.NEWLINENEWLINE It consists of a collection of XLSWorkSheet objects, eachNEWLINE of which represents a sheet in the workbook.NEWLINENEWLINE Sheets are created and appended using the add_work_sheetNEWLINE method:NEWLINENEWLINE >>> xls = XLSWorkBook()NEWLINE >>> sheet = xls('example')NEWLINE NEWLINE Sheets are kept in the 'worksheet' property and can be acquiredNEWLINE by name:NEWLINENEWLINE >>> sheet = xls.worksheet['example']NEWLINENEWLINE Once the worksheet(s) have been populated an XLS file can beNEWLINE created using the 'save_as_xls' method:NEWLINENEWLINE >>> xls.save_as_xls('example.xls')NEWLINENEWLINE """NEWLINE def __init__(self,title=None):NEWLINE """Create a new XLSWorkBook instanceNEWLINENEWLINE Arguments:NEWLINE title: optional, a title for the work bookNEWLINENEWLINE """NEWLINE self.title = titleNEWLINE self.worksheet = OrderedDictionary()NEWLINENEWLINE def add_work_sheet(self,name,title=None):NEWLINE """Create and append a new worksheetNEWLINENEWLINE Creates a new XLSWorkSheet object and appends itNEWLINE to the workbook.NEWLINENEWLINE Arguments:NEWLINE name: unique name for the worksheetNEWLINE title: optional, title for the worksheet - defaults toNEWLINE the name.NEWLINENEWLINE Returns:NEWLINE New XLSWorkSheet object.NEWLINENEWLINE """NEWLINE if name in self.worksheet:NEWLINE raise KeyError("Worksheet called '%s' already exists" %NEWLINE name)NEWLINE if title is None:NEWLINE title = nameNEWLINE self.worksheet[name] = XLSWorkSheet(title)NEWLINE return self.worksheet[name]NEWLINENEWLINE def save_as_xls(self,filen):NEWLINE """Output the workbook contents to an Excel-format fileNEWLINENEWLINE Arguments:NEWLINE filen: name of the file to write the workbook to.NEWLINENEWLINE """NEWLINE xls = Spreadsheet.Workbook()NEWLINE for name in self.worksheet:NEWLINE worksheet = self.worksheet[name]NEWLINE ws = xls.addSheet(worksheet.title)NEWLINE ws.addText(worksheet.render_as_text(include_styles=True))NEWLINE if worksheet.freeze_panes is not None:NEWLINE col = column_index_to_integer(NEWLINE CellIndex(worksheet.freeze_panes).column)NEWLINE row = CellIndex(worksheet.freeze_panes).row - 1NEWLINE ws.freezePanes(column=col,row=row)NEWLINE xls.save(filen)NEWLINENEWLINE def save_as_xlsx(self,filen):NEWLINE """Output the workbook contents to an XLSX-format fileNEWLINENEWLINE Arguments:NEWLINE filen: name of the file to write the workbook to.NEWLINENEWLINE """NEWLINE xlsx = xlsxwriter.Workbook(filen)NEWLINE styles = {}NEWLINE default_min_col_width = 7NEWLINE for name in self.worksheet:NEWLINE worksheet = self.worksheet[name]NEWLINE ws = xlsx.add_worksheet(worksheet.title)NEWLINE # Write content to worksheet cell by cellNEWLINE start = CellIndex('A1')NEWLINE end = CellIndex(cell(worksheet.last_column,NEWLINE worksheet.last_row))NEWLINE for col in ColumnRange(start.column,end.column):NEWLINE # Maximum column widthNEWLINE max_width = default_min_col_widthNEWLINE for row in range(start.row,end.row+1):NEWLINE # Get the valueNEWLINE value = worksheet.render_cell(cell(col,row),NEWLINE eval_formulae=False,NEWLINE apply_format=False)NEWLINE # Handle styles for this cellNEWLINE style = worksheet.get_style(cell(col,row))NEWLINE if style:NEWLINE style_name = style.nameNEWLINE try:NEWLINE xlsx_fmt = styles[style_name]NEWLINE except KeyError:NEWLINE xlsx_fmt = xlsx.add_format()NEWLINE if style.bold:NEWLINE xlsx_fmt.set_bold()NEWLINE if style.color is not None:NEWLINE xlsx_fmt.set_color(style.color)NEWLINE if style.bgcolor is not None:NEWLINE xlsx_fmt.set_bg_color(style.bgcolor)NEWLINE if style.font_size is not None:NEWLINE xlsx_fmt.set_font_size(style.font_size)NEWLINE styles[style_name] = xlsx_fmtNEWLINE else:NEWLINE xlsx_fmt = NoneNEWLINE # Deal with cell contentNEWLINE if value.startswith('='):NEWLINE # Cell contains formulaNEWLINE result = eval_formula(value,worksheet)NEWLINE ws.write_formula(cell(col,row),value,xlsx_fmt,result)NEWLINE col_width = len(str(result))NEWLINE else:NEWLINE # Deal with a data itemNEWLINE # Attempt to convert to a number typeNEWLINE # i.e. integer/floatNEWLINE try:NEWLINE # Try integerNEWLINE value = int(str(value))NEWLINE except ValueError:NEWLINE # Not an integer, try floatNEWLINE try:NEWLINE value = float(str(value))NEWLINE except ValueError:NEWLINE # Not a float eitherNEWLINE passNEWLINE # Write to the worksheetNEWLINE ws.write(cell(col,row),value,xlsx_fmt)NEWLINE col_width = len(str(value))NEWLINE # Handle column widthsNEWLINE max_width = max(max_width,col_width)NEWLINE # Set the column widthNEWLINE icol = column_index_to_integer(col)NEWLINE ws.set_column(icol,icol,max_width*1.2)NEWLINE # Handle freeze panesNEWLINE if worksheet.freeze_panes is not None:NEWLINE ws.freeze_panes(worksheet.freeze_panes)NEWLINE xlsx.close()NEWLINENEWLINEclass XLSWorkSheet(object):NEWLINE """Class for creating sheets within an XLS workbook.NEWLINENEWLINE XLSWorkSheet objects represent a sheet within an ExcelNEWLINE workbook.NEWLINENEWLINE Cells are addressed within the sheet using Excel notationNEWLINE i.e. <column><row> (columns start at index 'A' and rows atNEWLINE '1', examples are 'A1' or 'D19'):NEWLINENEWLINE >>> ws = XLSWorkSheet('example')NEWLINE >>> ws['A1'] = 'some data'NEWLINE >>> value = ws['A1']NEWLINENEWLINE If there is no data stored for the cell then 'None' isNEWLINE returned. Any cell can addressed without errors.NEWLINENEWLINE Data can also be added column-wise, row-wise or as aNEWLINE "block" of tab- and new-line delimited data:NEWLINENEWLINE >>> ws.insert_column_data('B',[1,2,3])NEWLINE >>> ws.insert_row_data(4,['x','y','z'])NEWLINE >>> ws.insert_block_data("This\tis\nthe\tdata")NEWLINENEWLINE A column can be "filled" with a single repeating value:NEWLINE NEWLINE >>> ws.fill_column('D','single value')NEWLINENEWLINE The extent of the sheet can be determined from theNEWLINE 'last_column' and last_row' properties; the 'next_column'NEWLINE and 'next_row' properties report the next empty columnNEWLINE and row respectively.NEWLINENEWLINE Cells can contain Excel-style formula by adding anNEWLINE equals sign to the start of the value. Typically formulaeNEWLINE reference other cells and perform mathematical operationsNEWLINE on them, e.g.:NEWLINENEWLINE >>> ws['E11'] = "=A1+A2"NEWLINENEWLINE Wildcard characters can be used which will be automaticallyNEWLINE translated into the cell column ('#') or row ('?'), forNEWLINE example:NEWLINENEWLINE >>> ws['F46'] = "=#47+#48"NEWLINENEWLINE will be transformed to "=F47+F48".NEWLINENEWLINE Styles can be applied to cells, using either the 'set_style'NEWLINE method or via the 'style' argument of some methods, toNEWLINE associate an XLSStyle object. Associated XLSStyle objectsNEWLINE can be retrieved using the 'get_style' method.NEWLINENEWLINE The value of an individual cell can be 'rendered' forNEWLINE output using the 'render_cell' method:NEWLINENEWLINE >>> print(ws.render_cell('F46'))NEWLINENEWLINE All or part of the sheet can be rendered as a tab- andNEWLINE newline-delimited string by using the 'render_as_text'NEWLINE method:NEWLINENEWLINE >>> print(ws.render_as_text())NEWLINENEWLINE """NEWLINE def __init__(self,title):NEWLINE """Create new XLSWorkSheet objectNEWLINENEWLINE Arguments:NEWLINE title: title string for the worksheetNEWLINENEWLINE """NEWLINE self.title = str(title)[:Spreadsheet.MAX_LEN_WORKSHEET_TITLE]NEWLINE self.data = {}NEWLINE self.styles = {}NEWLINE self.rows = []NEWLINE self.columns = []NEWLINE self.freeze_panes = NoneNEWLINENEWLINE def __setitem__(self,idx,value):NEWLINE """Implement 'x[idx] = value'NEWLINENEWLINE """NEWLINE idx = CellIndex(idx)NEWLINE if not idx.is_full:NEWLINE raise KeyError("Invalid index: '%s'" % idx)NEWLINE self.data[idx.idx] = valueNEWLINE if idx.column not in self.columns:NEWLINE self.columns.append(idx.column)NEWLINE self.columns = sorted(self.columns,key=lambda x: x[::-1])NEWLINE if idx.row not in self.rows:NEWLINE self.rows.append(idx.row)NEWLINE self.rows.sort()NEWLINENEWLINE def __getitem__(self,idx):NEWLINE """Implement 'value = x[idx]'NEWLINENEWLINE """NEWLINE if str(idx).isalpha():NEWLINE return XLSColumn(idx,parent=self)NEWLINE else:NEWLINE try:NEWLINE return self.data[idx]NEWLINE except Exception as ex:NEWLINE return NoneNEWLINENEWLINE def __delitem__(self,idx):NEWLINE """Implement 'del(x[idx])'NEWLINENEWLINE """NEWLINE try:NEWLINE del(self.data[idx])NEWLINE except KeyError:NEWLINE passNEWLINE idx = CellIndex(idx)NEWLINE if self.column_is_empty(idx.column):NEWLINE self.columns.remove(idx.column)NEWLINE if self.row_is_empty(idx.row):NEWLINE self.rows.remove(idx.row)NEWLINENEWLINE @propertyNEWLINE def last_column(self):NEWLINE """Return index of last column with dataNEWLINENEWLINE """NEWLINE try:NEWLINE return self.columns[-1]NEWLINE except IndexError:NEWLINE return 'A'NEWLINENEWLINE @propertyNEWLINE def next_column(self):NEWLINE """Index of first empty column after highest index with dataNEWLINENEWLINE """NEWLINE if len(self.columns):NEWLINE return column_integer_to_index(column_index_to_integer(self.last_column)+1)NEWLINE else:NEWLINE return 'A'NEWLINENEWLINE @propertyNEWLINE def last_row(self):NEWLINE """Return index of last row with dataNEWLINENEWLINE """NEWLINE try:NEWLINE return int(self.rows[-1])NEWLINE except IndexError:NEWLINE return 1NEWLINENEWLINE @propertyNEWLINE def next_row(self):NEWLINE """Index of first empty row after highest index with dataNEWLINENEWLINE """NEWLINE if len(self.rows):NEWLINE return self.last_row + 1NEWLINE else:NEWLINE return 1NEWLINENEWLINE def column_is_empty(self,col):NEWLINE """Determine whether a column is emptyNEWLINENEWLINE Returns False if any cells in the column are populated,NEWLINE otherwise returns True.NEWLINENEWLINE """NEWLINE if col not in self.columns:NEWLINE return TrueNEWLINE for row in self.rows:NEWLINE if self[cell(col,row)] is not None:NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINE def row_is_empty(self,row):NEWLINE """Determine whether a row is emptyNEWLINENEWLINE Returns False if any cells in the row are populated,NEWLINE otherwise returns True.NEWLINENEWLINE """NEWLINE if row not in self.rows:NEWLINE return TrueNEWLINE for col in self.columns:NEWLINE if self[cell(col,row)] is not None:NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINE def columnof(self,s,row=1):NEWLINE """Return column index for cell which matches stringNEWLINENEWLINE Return index of first column where the content matchesNEWLINE the specified string 's'.NEWLINENEWLINE Arguments:NEWLINE s: string to search forNEWLINE row: row to search in (defaults to 1)NEWLINENEWLINE Returns:NEWLINE Column index of first matching cell. Raises LookUpErrorNEWLINE if no match is found.NEWLINENEWLINE """NEWLINE for col in self.columns:NEWLINE if self[cell(col,row)] == s:NEWLINE return colNEWLINE raise LookupError("No match for '%s' in row %d" % (s,row))NEWLINENEWLINE def insert_column(self,position,data=None,text=None,fill=None,from_row=None,style=None):NEWLINE """Create a new column at the specified column positionNEWLINENEWLINE Inserts a new column at the specified column position,NEWLINE pushing up the column currently at that position plus allNEWLINE higher positioned columns.NEWLINENEWLINE By default the inserted column is empty, however data canNEWLINE be specified to populate the column.NEWLINENEWLINE Arguments:NEWLINE position: column index specifying position to insert theNEWLINE column atNEWLINE data: optional, list of data items to populate theNEWLINE inserted columnNEWLINE text: optional, tab-delimited string of text to be usedNEWLINE to populate the inserted columnNEWLINE fill: optional, single data item to be repeated to fillNEWLINE the inserted columnNEWLINE from_row: optional, if specified then inserted column isNEWLINE populated from that row onwardsNEWLINE style: optional, an XLSStyle object to associate with theNEWLINE data being insertedNEWLINENEWLINE Returns:NEWLINE The index of the inserted column.NEWLINENEWLINE """NEWLINE # Get list of all columns we want to move (in reverse order)NEWLINE columns_to_bump = []NEWLINE try:NEWLINE i = self.columns.index(position)NEWLINE columns_to_bump = self.columns[i:][::-1]NEWLINE except ValueError:NEWLINE for col in self.columns:NEWLINE if cmp_column_indices(col,position) > -1:NEWLINE i = self.columns.index(col)NEWLINE columns_to_bump = self.columns[i:][::-1]NEWLINE breakNEWLINE # Shift columns, if requiredNEWLINE for col in columns_to_bump:NEWLINE next_col = column_integer_to_index(column_index_to_integer(col)+1)NEWLINE for row in range(1,self.last_row+1):NEWLINE # Get cell indexNEWLINE idx = cell(col,row)NEWLINE if idx in self.data:NEWLINE # Copy contents to next columnNEWLINE self.data[cell(next_col,row)] = self.data[idx]NEWLINE # Remove this cellNEWLINE del(self.data[idx])NEWLINE # Append a new last column index to list of columnsNEWLINE self.columns.append(self.next_column)NEWLINE # Remove the inserted column index from the list of columnsNEWLINE if position in self.columns:NEWLINE self.columns.remove(position)NEWLINE # Now insert data at the new positionNEWLINE self.write_column(position,data=data,text=text,fill=fill,from_row=from_row,style=style)NEWLINE return positionNEWLINENEWLINE def append_column(self,data=None,text=None,fill=None,from_row=None,style=None):NEWLINE """Create a new column at the end of the sheetNEWLINENEWLINE Appends a new column at the end of the worksheet i.e. in theNEWLINE first available empty column.NEWLINENEWLINE By default the appended column is empty, however data canNEWLINE be specified to populate the column.NEWLINENEWLINE Arguments:NEWLINE data: optional, list of data items to populate theNEWLINE inserted columnNEWLINE text: optional, tab-delimited string of text to be usedNEWLINE to populate the inserted columnNEWLINE fill: optional, single data item to be repeated to fillNEWLINE the inserted columnNEWLINE from_row: optional, if specified then inserted column isNEWLINE populated from that row onwardsNEWLINE style: optional, an XLSStyle object to associate with theNEWLINE data being insertedNEWLINENEWLINE Returns:NEWLINE The index of the appended column.NEWLINENEWLINE """NEWLINE new_col = self.next_columnNEWLINE # Now insert data into the new positionNEWLINE self.write_column(new_col,data=data,text=text,fill=fill,from_row=from_row,style=style)NEWLINE return new_colNEWLINENEWLINE def write_column(self,col,data=None,text=None,fill=None,from_row=None,style=None):NEWLINE """Write data to rows in a columnNEWLINENEWLINE Data can be specified as a list, a newline-delimited string, orNEWLINE as a single repeated data item.NEWLINENEWLINE Arguments:NEWLINE data: optional, list of data items to populate theNEWLINE inserted columnNEWLINE text: optional, newline-delimited string of text to be usedNEWLINE to populate the inserted columnNEWLINE fill: optional, single data item to be repeated to fillNEWLINE the inserted columnNEWLINE from_row: optional, if specified then inserted column isNEWLINE populated from that row onwardsNEWLINE style: optional, an XLSStyle object to associate with theNEWLINE data being insertedNEWLINENEWLINE """NEWLINE # Set initial rowNEWLINE if from_row is None:NEWLINE from_row = 1NEWLINE # Write in data from a listNEWLINE if data is not None:NEWLINE items = dataNEWLINE elif text is not None:NEWLINE items = text.split('\n')NEWLINE elif fill is not None:NEWLINE items = [fill for i in range(from_row,self.last_row+1)]NEWLINE else:NEWLINE # Nothing to doNEWLINE returnNEWLINE # Add column index to list of columnsNEWLINE if col not in self.columns:NEWLINE self.columns.append(col)NEWLINE # Write data items to cellsNEWLINE row = from_rowNEWLINE for item in items:NEWLINE self.data[cell(col,row)] = itemNEWLINE if row not in self.rows:NEWLINE self.rows.append(row)NEWLINE if style is not None:NEWLINE self.set_style(style,cell(col,row))NEWLINE row += 1NEWLINE # Sort the column and row indicesNEWLINE self.columns = sorted(self.columns,key=lambda x: x[::-1])NEWLINE self.rows.sort()NEWLINENEWLINE def insert_column_data(self,col,data,start=None,style=None):NEWLINE """Insert list of data into a columnNEWLINENEWLINE Data items are supplied as a list, with each item in the listNEWLINE being inserted into the next row in the column.NEWLINENEWLINE By default items are inserted starting from row 1, unless aNEWLINE starting row is explicitly specified via the 'start' argument.NEWLINENEWLINE *** THIS METHOD IS DEPRECATED ***NEWLINENEWLINE Consider using insert_column, append_column or write_data.NEWLINENEWLINE Arguments:NEWLINE col: index of column to insert the data into (e.g. 'A','MZ')NEWLINE data: list of data itemsNEWLINE start: (optional) first row to insert data intoNEWLINE style: (optional) XLSStyle object to be associated with eachNEWLINE cell that has data inserted into itNEWLINENEWLINE """NEWLINE # Insert data items from a list into a column in the spreadsheetNEWLINE if start is None:NEWLINE i = 1NEWLINE else:NEWLINE i = int(start)NEWLINE for item in data:NEWLINE self[cell(col,i)] = itemNEWLINE if style is not None:NEWLINE self.set_style(style,cell(col,i))NEWLINE i += 1NEWLINENEWLINE def rowof(self,s,column='A'):NEWLINE """Return row index for cell which matches stringNEWLINENEWLINE Return index of first row where the content matchesNEWLINE the specified string 's'.NEWLINENEWLINE Arguments:NEWLINE s: string to search forNEWLINE column: column to search in (defaults to 'A')NEWLINENEWLINE Returns:NEWLINE Row index of first matching cell. Raises LookUpErrorNEWLINE if no match is found.NEWLINENEWLINE """NEWLINE # Get row where cell in row matches 'name'NEWLINE # i.e. look up a row indexNEWLINE for row in range(1,self.last_row+1):NEWLINE if self[cell(column,row)] == s:NEWLINE return rowNEWLINE raise LookupError("No match for '%s' in column '%s'" %NEWLINE (s,column))NEWLINENEWLINE def insert_row(self,position,data=None,text=None,fill=None,from_column=None,style=None):NEWLINE """Create a new row at the specified row positionNEWLINENEWLINE Inserts a new row at the specified row position,NEWLINE pushing up the row currently at that position plus allNEWLINE higher positioned row.NEWLINENEWLINE By default the inserted row is empty, however data canNEWLINE be specified to populate the column.NEWLINENEWLINE Arguments:NEWLINE position: row index specifying position to insert theNEWLINE row atNEWLINE data: optional, list of data items to populate theNEWLINE inserted rowNEWLINE text: optional, newline-delimited string of text to be usedNEWLINE to populate the inserted rowNEWLINE fill: optional, single data item to be repeated to fillNEWLINE the inserted rowNEWLINE from_row: optional, if specified then inserted row isNEWLINE populated from that column onwardsNEWLINE style: optional, an XLSStyle object to associate with theNEWLINE data being insertedNEWLINENEWLINE Returns:NEWLINE The index of the inserted row.NEWLINENEWLINE """NEWLINE # Create a new row before the specified rowNEWLINE # All rows above it move up one positionNEWLINE # 'New' row position is actually 'before_row'NEWLINE # Bump all rows up one positionNEWLINE # Get list of all rows we want to move (in reverse order)NEWLINE row_list = list(range(self.last_row,position-1,-1))NEWLINE for row in row_list:NEWLINE next_row = row + 1NEWLINE for col in self.columns:NEWLINE # Get cell indexNEWLINE idx = cell(col,row)NEWLINE if idx in self.data:NEWLINE # Copy contents to next rowNEWLINE self.data[cell(col,next_row)] = self.data[idx]NEWLINE # Remove this cellNEWLINE del(self.data[idx])NEWLINE # Add a new last row index to the list of rowsNEWLINE self.rows.append(self.next_row)NEWLINE # Remove the inserted row index from the listNEWLINE if position in self.rows:NEWLINE self.rows.remove(position)NEWLINE # Now insert data at the new positionNEWLINE self.write_row(position,data=data,text=text,fill=fill,from_column=from_column,style=style)NEWLINE return positionNEWLINENEWLINE def append_row(self,data=None,text=None,fill=None,from_column=None,style=None):NEWLINE """Create a new row at the end of the sheetNEWLINENEWLINE Appends a new row at the end of the worksheet i.e. in theNEWLINE first available empty row.NEWLINENEWLINE By default the appended row is empty, however data canNEWLINE be specified to populate the row.NEWLINENEWLINE Arguments:NEWLINE data: optional, list of data items to populate theNEWLINE inserted rowNEWLINE text: optional, newline-delimited string of text to be usedNEWLINE to populate the inserted rowNEWLINE fill: optional, single data item to be repeated to fillNEWLINE the inserted rowNEWLINE from_row: optional, if specified then inserted row isNEWLINE populated from that column onwardsNEWLINE style: optional, an XLSStyle object to associate with theNEWLINE data being insertedNEWLINENEWLINE Returns:NEWLINE The index of the inserted row.NEWLINENEWLINE """NEWLINE # Create a new row at the end of the sheetNEWLINE new_row = self.next_rowNEWLINE # Now insert data into the new positionNEWLINE self.write_row(new_row,data=data,text=text,fill=fill,from_column=from_column,style=style)NEWLINE return new_rowNEWLINENEWLINE def write_row(self,row,data=None,text=None,fill=None,from_column=None,style=None):NEWLINE """Write data to rows in a columnNEWLINENEWLINE Data can be specified as a list, a tab-delimited string, orNEWLINE as a single repeated data item.NEWLINENEWLINE Arguments:NEWLINE row: row index specifying which rowNEWLINE data: optional, list of data items to populate theNEWLINE inserted rowNEWLINE text: optional, tab-delimited string of text to be usedNEWLINE to populate the inserted rowNEWLINE from_column: optional, if specified then inserted row isNEWLINE populated from that column onwardsNEWLINE style: optional, an XLSStyle object to associate with theNEWLINE data being insertedNEWLINENEWLINE """NEWLINE # Set initial columnNEWLINE if from_column is None:NEWLINE from_column = 'A'NEWLINE # Write in data from a listNEWLINE if data is not None:NEWLINE items = dataNEWLINE elif text is not None:NEWLINE items = text.split('\t')NEWLINE elif fill is not None:NEWLINE items = [fill for i in range(from_row,self.last_row+1)]NEWLINE else:NEWLINE # Nothing to doNEWLINE returnNEWLINE # Add row index to list of rowsNEWLINE if row not in self.rows:NEWLINE self.rows.append(row)NEWLINE # Write data items to cellsNEWLINE col = from_columnNEWLINE for item in items:NEWLINE self.data[cell(col,row)] = itemNEWLINE if col not in self.columns:NEWLINE self.columns.append(col)NEWLINE if style is not None:NEWLINE self.set_style(style,cell(col,row))NEWLINE col = incr_col(col)NEWLINE # Sort the column and row indicesNEWLINE self.columns = sorted(self.columns,key=lambda x: x[::-1])NEWLINE self.rows.sort()NEWLINENEWLINE def insert_row_data(self,row,data,start=None,style=None):NEWLINE """Insert list of data into a rowNEWLINENEWLINE Data items are supplied as a list, with each item in the listNEWLINE being inserted into the next column in the row.NEWLINENEWLINE By default items are inserted starting from column 'A', unless aNEWLINE starting column is explicitly specified via the 'start' argument.NEWLINENEWLINE *** THIS METHOD IS DEPRECATED ***NEWLINENEWLINE Consider using insert_row, append_row or write_row.NEWLINENEWLINE Arguments:NEWLINE row: index of row to insert the data into (e.g. 1, 112)NEWLINE data: list of data itemsNEWLINE start: (optional) first column to insert data intoNEWLINE style: (optional) XLSStyle object to be associated with eachNEWLINE cell that has data inserted into itNEWLINENEWLINE """NEWLINE # Insert data items from a list into a row in the spreadsheetNEWLINE if start is None:NEWLINE i = column_index_to_integer('A')NEWLINE else:NEWLINE i = column_index_to_integer(start)NEWLINE for item in data:NEWLINE self[cell(column_integer_to_index(i),row)] = itemNEWLINE if style is not None:NEWLINE self.set_style(style,cell(column_integer_to_index(i),row))NEWLINE i += 1NEWLINENEWLINE def insert_block_data(self,data,col=None,row=None,style=None):NEWLINE """Insert data items from a block of textNEWLINENEWLINE Data items are supplied via a block of tab- and newline-delimitedNEWLINE text. Each tab-delimited item is inserted into the next column inNEWLINE a row; newlines indicate that subsequent items are inserted intoNEWLINE the next row.NEWLINENEWLINE By default items are inserted starting from cell 'A1'; a differentNEWLINE starting cell can be explicitly specified via the 'col' and 'row'NEWLINE arguments.NEWLINENEWLINE Arguments:NEWLINE data: block of tab- and newline-delimited dataNEWLINE col: (optional) first column to insert data intoNEWLINE row: (optional) first row to insert data intoNEWLINE style: (optional) XLSStyle object to be associated with eachNEWLINE cell that has data inserted into itNEWLINENEWLINE """NEWLINE # Insert data items from a block of text into the spreadsheetNEWLINE # Text must be tab and newline delimitedNEWLINE if row is None:NEWLINE j = int(1)NEWLINE else:NEWLINE j = int(row)NEWLINE for line in data.split('\n'):NEWLINE if col is None:NEWLINE i = column_index_to_integer('A')NEWLINE else:NEWLINE i = column_index_to_integer(col)NEWLINE for item in line.strip('\n').split('\t'):NEWLINE icol = column_integer_to_index(i)NEWLINE if not item:NEWLINE item = NoneNEWLINE self[cell(icol,j)] = itemNEWLINE if style is not None:NEWLINE self.set_style(style,cell(icol,j))NEWLINE i += 1NEWLINE j += 1NEWLINENEWLINE def fill_column(self,column,item,start=None,end=None,style=None):NEWLINE """Fill a column with a single repeated data itemNEWLINENEWLINE A single data item is inserted into all rows in the specifiedNEWLINE column which have at least one data item already in any columnNEWLINE in the worksheet. A different range of rows can be specifiedNEWLINE via the 'start' and 'end' arguments.NEWLINENEWLINE *** THIS METHOD IS DEPRECATED ***NEWLINENEWLINE Consider using insert_column, append_column or write_data.NEWLINENEWLINE Arguments:NEWLINE column: index of column to insert the item into (e.g. 'A','MZ')NEWLINE item: data item to be repeatedNEWLINE start: (optional) first row to insert data intoNEWLINE end: (optional) last row to insert data intoNEWLINE style: (optional) XLSStyle object to be associated with eachNEWLINE cell that has data inserted into itNEWLINENEWLINE """NEWLINE # Fill a column with the same data itemNEWLINE if (start is None or end is None) and (not self.columns and not self.rows):NEWLINE # Empty sheet, nothing to fillNEWLINE returnNEWLINE if start is None:NEWLINE i = 1NEWLINE else:NEWLINE i = int(start)NEWLINE if end is None:NEWLINE j = self.last_rowNEWLINE else:NEWLINE j = int(end)NEWLINE for row in range(i,j+1):NEWLINE self[cell(column,row)] = itemNEWLINE if style is not None:NEWLINE self.set_style(style,cell(column,row))NEWLINENEWLINE def set_style(self,cell_style,start,end=None):NEWLINE """Associate style information with one or more cellsNEWLINE NEWLINE Associates a specified XLSStyle object with a singleNEWLINE cell, or with a range of cells (if a second cell indexNEWLINE is supplied).NEWLINENEWLINE The style associated with a cell can be fetched usingNEWLINE the 'get_style' method.NEWLINENEWLINE Arguments:NEWLINE cell_style: XLSStyle objectNEWLINE start: cell index e.g. 'A1'NEWLINE end: (optional) second cell index; together withNEWLINE 'start' this defines a range of cells to associateNEWLINE the style with.NEWLINE NEWLINE """NEWLINE if end is None:NEWLINE # Specified a single cell targetNEWLINE self.styles[start] = cell_styleNEWLINE returnNEWLINE # Specify a range of cellsNEWLINE start_cell = CellIndex(start)NEWLINE end_cell = CellIndex(end)NEWLINE for col in ColumnRange(start_cell.column,end_cell.column):NEWLINE for row in range(start_cell.row,end_cell.row+1):NEWLINE self.styles[cell(col,row)] = cell_styleNEWLINENEWLINE def get_style(self,idx):NEWLINE """Return the style information associated with a cellNEWLINENEWLINE Returns an XLSStyle object associated with the specificNEWLINE cell.NEWLINENEWLINE If no style was previously associated then return a newNEWLINE XLSStyle object.NEWLINENEWLINE Arguments:NEWLINE idx: cell index e.g 'A1'NEWLINENEWLINE Returns:NEWLINE XLSStyle object.NEWLINENEWLINE """NEWLINE try:NEWLINE return self.styles[idx]NEWLINE except KeyError:NEWLINE # Return empty style objectNEWLINE return XLSStyle()NEWLINENEWLINE def render_cell(self,idx,eval_formulae=False,apply_format=False):NEWLINE """Text representation of value stored in a cellNEWLINENEWLINE Create a text representation of a cell's contents. If the cellNEWLINE contains a formula then '?'s will be replaced with the row indexNEWLINE and '#'s with the column index. Optionally the formula can alsoNEWLINE be evaluated, and any style information associated with the cellNEWLINE can also be rendered.NEWLINENEWLINE Arguments:NEWLINE idx: cell index e.g. 'A1'NEWLINE eval_formulae: (optional) if True then if the cell containsNEWLINE a formula, attempt to evaluate it and return the result.NEWLINE Otherwise return the formula itself (this is the default)NEWLINE apply_format: (optional) if True then format numbers accordingNEWLINE to the formatting information associated with the cellNEWLINE (default is not to apply formatting).NEWLINENEWLINE Returns:NEWLINE String representing the cell contents.NEWLINENEWLINE """NEWLINE item = self[idx]NEWLINE if item is None:NEWLINE # Empty itemNEWLINE return ''NEWLINE try:NEWLINE if item.startswith('='):NEWLINE # FormulaNEWLINE item = item.replace('?',NEWLINE str(CellIndex(idx).row)).replace('#',NEWLINE str(CellIndex(idx).column))NEWLINE if eval_formulae:NEWLINE logging.debug("Evaluating %s from %s" % (item,idx))NEWLINE item = eval_formula(item,self)NEWLINE except AttributeError:NEWLINE passNEWLINE if apply_format:NEWLINE style = self.get_style(idx)NEWLINE if style is not None:NEWLINE try:NEWLINE return format_value(item,style.number_format)NEWLINE except Exception as ex:NEWLINE logging.debug("Exception: %s" % ex)NEWLINE raise exNEWLINE else:NEWLINE return str(item)NEWLINENEWLINE def render_as_text(self,include_columns_and_rows=False,NEWLINE include_styles=False,NEWLINE eval_formulae=False,NEWLINE apply_format=False,NEWLINE start=None,end=None):NEWLINE """Text representation of all or part of the worksheetNEWLINENEWLINE All or part of the sheet can be rendered as a tab- andNEWLINE newline-delimited string.NEWLINENEWLINE Arguments:NEWLINE include_columns_and_rows: (optional) if True then also outputNEWLINE a header row of column indices, and a column of row indicesNEWLINE (default is to not output columns and rows).NEWLINE include_styles: (optional) if True then also render the stylingNEWLINE information associated with the cell (default is not to applyNEWLINE styling).NEWLINE apply_format: (optional) if True then format numbers accordingNEWLINE to the formatting information associated with the cellNEWLINE (default is not to apply formatting).NEWLINE eval_formulae: (optional) if True then if the cell containsNEWLINE a formula, attempt to evaluate it and return the result.NEWLINE Otherwise return the formula itself (this is the default)NEWLINE start: (optional) specify the top-lefthand most cell index toNEWLINE start rendering from (default is 'A1').NEWLINE end: (optional) specify the bottom-righthand most cell indexNEWLINE to finish rendering at (default is the cell corresponding toNEWLINE the highest column and row indices. Note that this cell mayNEWLINE be empty.)NEWLINENEWLINE Returns:NEWLINE String containing the rendered sheet or sheet subset, with itemsNEWLINE within a row separated by tabs, and rows separated by newlines.NEWLINENEWLINE """NEWLINE # Output worksheet as text (i.e. string)NEWLINE if start is None:NEWLINE start = CellIndex('A1')NEWLINE else:NEWLINE start = CellIndex(start)NEWLINE if end is None:NEWLINE end = CellIndex(cell(self.last_column,self.last_row))NEWLINE else:NEWLINE end = CellIndex(end)NEWLINE text = []NEWLINE if include_columns_and_rows:NEWLINE line = ['']NEWLINE for col in ColumnRange(start.column,end.column):NEWLINE line.append(col)NEWLINE text.append('\t'.join(line))NEWLINE for row in range(start.row,end.row+1):NEWLINE line = []NEWLINE if include_columns_and_rows:NEWLINE line.append(u'%s' % row)NEWLINE for col in ColumnRange(start.column,end.column):NEWLINE value = self.render_cell(cell(col,row),NEWLINE eval_formulae=eval_formulae,NEWLINE apply_format=apply_format)NEWLINE if include_styles:NEWLINE value = self.get_style(cell(col,row)).style(value)NEWLINE line.append(u"%s" % value)NEWLINE text.append('\t'.join(line))NEWLINE return '\n'.join(text)NEWLINENEWLINEclass XLSStyle(object):NEWLINE """Class representing a set of styling and formatting dataNEWLINENEWLINE An XLSStyle object represents a collection of data used forNEWLINE styling and formatting cell values on output to an Excel file.NEWLINENEWLINE The style attributes can be set on instantiation, or queriedNEWLINE and modified afterwards.NEWLINENEWLINE The attributes are:NEWLINENEWLINE bold: whether text is bold or not (boolean)NEWLINE color: text color (name)NEWLINE bgcolor: background color (name)NEWLINE wrap: whether text in a cell should wrap (boolean)NEWLINE border: style of cell border (thick, medium, thin etc)NEWLINE number_format: a format code from the NumbersFormat classNEWLINE font_size: font size in points (integer)NEWLINE centre: whether text is centred in the cell (boolean)NEWLINE shrink_to_fit: whether to shrink cell to fit the contents.NEWLINENEWLINE The 'name' property can be used to generate a name for the styleNEWLINE based on the attributes that have been set, for example:NEWLINENEWLINE >>> XLSStyle(bold=true).nameNEWLINE ... '__bold__'NEWLINENEWLINE """NEWLINE def __init__(self,bold=False,color=None,bgcolor=None,wrap=False,NEWLINE border=None,number_format=None,font_size=None,centre=False,NEWLINE shrink_to_fit=False):NEWLINE """Create a new XLSStyle objectNEWLINENEWLINE The available arguments are the same as the attributes.NEWLINENEWLINE """NEWLINE self.bold=boldNEWLINE self.color=colorNEWLINE self.bgcolor=bgcolorNEWLINE self.wrap=wrapNEWLINE self.border=borderNEWLINE self.number_format=number_formatNEWLINE self.font_size = font_sizeNEWLINE self.centre = centreNEWLINE self.shrink_to_fit = shrink_to_fitNEWLINENEWLINE def __nonzero__(self):NEWLINE return self.__bool__()NEWLINENEWLINE def __bool__(self):NEWLINE return \NEWLINE (self.bold) or \NEWLINE (self.color is not None) or \NEWLINE (self.bgcolor is not None) or \NEWLINE (self.wrap) or \NEWLINE (self.border is not None) or \NEWLINE (self.number_format is not None) or \NEWLINE (self.font_size is not None) or \NEWLINE (self.centre) or \NEWLINE (self.shrink_to_fit)NEWLINENEWLINE @propertyNEWLINE def name(self):NEWLINE """Return a name based on the attributesNEWLINE """NEWLINE name = []NEWLINE if self.bold:NEWLINE name.append('bold')NEWLINE if self.color is not None:NEWLINE name.append('color=%s' % self.color)NEWLINE if self.bgcolor is not None:NEWLINE name.append('bgcolor=%s' % self.bgcolor)NEWLINE if self.wrap:NEWLINE name.append('wrap')NEWLINE if self.border is not None:NEWLINE name.append('border=%s' % self.border)NEWLINE if self.number_format is not None:NEWLINE name.append('number_format=%s' % self.number_format)NEWLINE if self.font_size is not None:NEWLINE name.append('font_size=%s' % self.font_size)NEWLINE if self.centre:NEWLINE name.append('centre')NEWLINE if self.shrink_to_fit:NEWLINE name.append('shrink_to_fit')NEWLINE name = '__'.join(name)NEWLINE if name:NEWLINE return '__%s__' % nameNEWLINE else:NEWLINE return ''NEWLINENEWLINE def style(self,item):NEWLINE """Wrap 'item' with <style...>...</style> tagsNEWLINENEWLINE Given a string (or object that can be rendered as a string)NEWLINE return the string representation surrounded by <style...>NEWLINE </style> tags, where the tag attributes describe the styleNEWLINE information stored in the XLSStyle object:NEWLINENEWLINE font=boldNEWLINE color=(color)NEWLINE bgcolor=(color)NEWLINE wrapNEWLINE border=(border)NEWLINE number_format=(format)NEWLINE font_size=(size)NEWLINE centreNEWLINE shrink_to_fitNEWLINENEWLINE """NEWLINE style = []NEWLINE if self.bold:NEWLINE style.append("font=bold")NEWLINE if self.color is not None:NEWLINE style.append("color=%s" % self.color)NEWLINE if self.bgcolor is not None:NEWLINE style.append("bgcolor=%s" % self.bgcolor)NEWLINE if self.wrap:NEWLINE style.append("wrap")NEWLINE if self.border is not None:NEWLINE style.append("border=%s" % self.border)NEWLINE if self.number_format is not None:NEWLINE style.append("number_format=%s" % self.excel_number_format)NEWLINE if self.font_size is not None:NEWLINE style.append("font_size=%s" % self.font_size)NEWLINE if self.centre:NEWLINE style.append("centre")NEWLINE if self.shrink_to_fit:NEWLINE style.append("shrink_to_fit")NEWLINE if style:NEWLINE return "<style %s>%s</style>" % (' '.join(style),item)NEWLINE else:NEWLINE return itemNEWLINENEWLINE @propertyNEWLINE def excel_number_format(self):NEWLINE """Return an Excel-style equivalent of the stored number formatNEWLINENEWLINE Returns an Excel-style number format, or None if the formatNEWLINE isn't set or is unrecognised.NEWLINENEWLINE """NEWLINE if self.number_format == NumberFormats.THOUSAND_SEPARATOR:NEWLINE return "#,###"NEWLINE elif self.number_format == NumberFormats.PERCENTAGE:NEWLINE return "0.0%"NEWLINE return NoneNEWLINENEWLINEclass ColumnRange(Iterator):NEWLINE """Iterator for a range of column indicesNEWLINENEWLINE Range-style iterator for iterating over alphabetical columnNEWLINE indices, e.g.NEWLINENEWLINE >>> for c in ColumnRange('A','Z'):NEWLINE ... print(c)NEWLINENEWLINE """NEWLINE def __init__(self,i,j=None,include_end=True,reverse=False):NEWLINE """Create an iterator for a range of column indicesNEWLINENEWLINE Acts like 'range' i.e.:NEWLINENEWLINE ColumnRange('C'): equivalent to ['A','B','C']NEWLINE ColumnRange('C',include_end=False): ['A','B']NEWLINE ColumnRange('C','F'): ['C','D','E','F']NEWLINE ColumnRange(''C','F',include_end=False): ['C','D','E']NEWLINENEWLINE Arguments:NEWLINE i: defines start column if j is not None, orNEWLINE end column if j is not None (in which caseNEWLINE start column will be 'A')NEWLINE j: defines end column (if not None)NEWLINE include_end: if True then the end column is alsoNEWLINE included; otherwise it is omitted.NEWLINE reverse: if True then the columns are returned inNEWLINE descending orderNEWLINENEWLINE """NEWLINE self.incr = 1NEWLINE if j is None:NEWLINE self.start = column_index_to_integer('A')NEWLINE self.end = column_index_to_integer(i)NEWLINE else:NEWLINE self.start = column_index_to_integer(i)NEWLINE self.end = column_index_to_integer(j)NEWLINE if reverse:NEWLINE self.end,self.start = self.start,self.endNEWLINE self.incr = -1NEWLINE self.column = self.start-self.incrNEWLINE if include_end:NEWLINE self.end += self.incrNEWLINENEWLINE def next(self):NEWLINE """Implements Iterator subclass 'next' method (Python 2 only)NEWLINENEWLINE """NEWLINE return self.__next__()NEWLINENEWLINE def __next__(self):NEWLINE """Implements Iterator subclass '__next__' methodNEWLINENEWLINE """NEWLINE self.column = self.column + self.incrNEWLINE if self.column == self.end:NEWLINE raise StopIterationNEWLINE return column_integer_to_index(self.column)NEWLINENEWLINEclass CellIndex(object):NEWLINE """Convenience class for handling XLS-style cell indicesNEWLINENEWLINE The CellIndex class provides a way of handling XLS-styleNEWLINE cell indices i.e. 'A1', 'BZ112' etc.NEWLINENEWLINE Given a putative cell index it extracts the column andNEWLINE row which can then be accessed via the 'column' andNEWLINE 'row' attributes respectively.NEWLINENEWLINE The 'is_full' property reports whether the suppliedNEWLINE index is actually a 'full' index with both column andNEWLINE row specifiers. If it is just a column or just a rowNEWLINE then only the appropriate 'column' or 'row' attributesNEWLINE will be set.NEWLINENEWLINE """NEWLINE def __init__(self,idx):NEWLINE """Create a new CellIndex instanceNEWLINENEWLINE ArgumentsNEWLINE idx: cell index e.g. 'A1', 'BZ112'NEWLINENEWLINE """NEWLINE self.idx = str(idx)NEWLINE try:NEWLINE r = re.compile(r'^([A-Z]+)([0-9]+)$').match(idx)NEWLINE self.column = r.group(1)NEWLINE self.row = int(r.group(2))NEWLINE except:NEWLINE self.column = NoneNEWLINE self.row = NoneNEWLINE if str(idx).isalpha():NEWLINE self.column = idxNEWLINE elif str(idx).isdigit():NEWLINE self.row = int(idx)NEWLINENEWLINE @propertyNEWLINE def is_full(self):NEWLINE """Return True if index has both column and row informationNEWLINENEWLINE """ NEWLINE return not (self.column is None or self.row is None)NEWLINENEWLINE def __repr__(self):NEWLINE """Implement __repr__ built-in; returns cell indexNEWLINENEWLINE """NEWLINE return "%s%s" % ('' if self.column is None else self.column,NEWLINE '' if self.row is None else self.row)NEWLINENEWLINEclass XLSColumn(object):NEWLINE """Class representing a column in a XLSWorkSheetNEWLINENEWLINE An XLSColumn object provides access to data in a columnNEWLINE from a XLSWorkSheet object. Typically one can be returnedNEWLINE by doing something like:NEWLINENEWLINE >>> colA = ws['A']NEWLINE NEWLINE and individual cell values then accessed by row numberNEWLINE alone, e.g.:NEWLINENEWLINE >>> value = colA['1']NEWLINE >>> colA['2'] = "New value"NEWLINE NEWLINE """NEWLINE def __init__(self,column_index,parent=None):NEWLINE """Create a new XLSColumn instanceNEWLINENEWLINE ArgumentsNEWLINE column_index: column indexNEWLINE parent: parent XLSWorkSheet objectNEWLINENEWLINE """NEWLINE self.index = column_indexNEWLINE self.parent = parentNEWLINENEWLINE def __setitem__(self,idx,value):NEWLINE """Implement set item i.e. x['key'] = valueNEWLINENEWLINE """NEWLINE self.parent[self.full_index(idx)] = valueNEWLINENEWLINE def __getitem__(self,idx):NEWLINE """Implement get item i.e. y['key'] returns valueNEWLINENEWLINE """NEWLINE try:NEWLINE return self.parent[self.full_index(idx)]NEWLINE except Exception as ex:NEWLINE return NoneNEWLINENEWLINE def full_index(self,row):NEWLINE """Return the full index for a cell in the columnNEWLINENEWLINE Given a row index, returns the index of the cellNEWLINE that this addresses within the column (e.g. if theNEWLINE column is 'A' then row 2 addresses cell 'A2').NEWLINENEWLINE """NEWLINE return cell(self.index,row)NEWLINENEWLINE#######################################################################NEWLINE# FunctionsNEWLINE#######################################################################NEWLINENEWLINEdef cmp_column_indices(x,y):NEWLINE """Comparision function for column indicesNEWLINENEWLINE x and y are XLS-style column indices e.g. 'A', 'B', 'AA' etc.NEWLINENEWLINE Returns -1 if x is a column index less than y, 1 if it isNEWLINE greater than y, and 0 if it's equal.NEWLINENEWLINE """NEWLINE # Do string comparision on reverse of column indicesNEWLINE return (x[::-1] > y[::-1]) - (x[::-1] < y[::-1])NEWLINENEWLINEdef cell(col,row):NEWLINE """Return XLS cell index for column and rowNEWLINENEWLINE E.g. cell('A',3) returns 'A3'NEWLINENEWLINE """NEWLINE return "%s%s" % (col,row)NEWLINENEWLINEdef incr_col(col,incr=1):NEWLINE """Return column index incremented by specific number of positionsNEWLINENEWLINE Arguments:NEWLINE col: index of column to be incrementedNEWLINE incr: optional, number of cells to shift by. Can be negativeNEWLINE to go backwards. Defaults to 1 i.e. next column along.NEWLINENEWLINE """NEWLINE return column_integer_to_index(column_index_to_integer(col)+incr)NEWLINENEWLINEdef column_index_to_integer(col):NEWLINE """Convert XLS-style column index into equivalent integerNEWLINENEWLINE Given a column index e.g. 'A', 'BZ' etc, converts itNEWLINE to the integer equivalent using zero-based countingNEWLINE system (so 'A' is equivalent to zero, 'B' to 1 etc).NEWLINENEWLINE """NEWLINE # Convert column index e.g. 'A', 'BZ' etc toNEWLINE # integer equivalentNEWLINE idx = 0NEWLINE i = 0NEWLINE for c in col[::-1]:NEWLINE idx = idx + pow(26,i)*(ord(c)-64)NEWLINE i += 1NEWLINE return idx-1NEWLINENEWLINEdef column_integer_to_index(idx):NEWLINE """Convert integer column index to XLS-style equivalentNEWLINENEWLINE Given an integer index, converts it to the XLS-styleNEWLINE equivalent e.g. 'A', 'BZ' etc, using a zero-basedNEWLINE counting system (so zero is equivalent to 'A', 1 to 'B'NEWLINE etc).NEWLINENEWLINE """NEWLINE # Convert integer column to index equivalentNEWLINE col = ''NEWLINE while idx >= 0:NEWLINE col += chr((idx%26)+65)NEWLINE idx = idx//26-1NEWLINE return col[::-1]NEWLINENEWLINEdef eval_formula(item,worksheet):NEWLINE """Evaluate a formula using the contents of a worksheetNEWLINENEWLINE Given an item, attempts an Excel-style evaluation.NEWLINENEWLINE If the item doesn't start with '=' then it is returned as-is.NEWLINE Otherwise the function attempts to evaluate the formula,NEWLINE including looking up (and if necessary also evaluating) theNEWLINE contents of any cells that are referenced.NEWLINENEWLINE *** Note that the implementation of the evaluation is veryNEWLINE simplistic and cannot handle complex formulae or functionsNEWLINENEWLINE Currently it can only deal with:NEWLINENEWLINE * basic mathematical operations (+-*/)NEWLINENEWLINE """NEWLINE # Evaluate a formula from a cell item and return the computed valueNEWLINE logging.debug("Item is %s" % item)NEWLINE if item.startswith('='):NEWLINE item = item[1:]NEWLINE logging.debug("Item reset to %s" % item)NEWLINE else:NEWLINE logging.debug("Returning %s" % item)NEWLINE return itemNEWLINE ops = "+-/*"NEWLINE formula = ''NEWLINE arg = ''NEWLINE nargs = 0NEWLINE for c in item:NEWLINE logging.debug(c)NEWLINE if c not in ops:NEWLINE arg += cNEWLINE else:NEWLINE logging.debug("-> %s" % arg)NEWLINE if CellIndex(arg).is_full:NEWLINE arg = worksheet.render_cell(arg,eval_formulae=True)NEWLINE logging.debug("-> %s" % arg)NEWLINE try:NEWLINE arg = convert_to_number(arg) NEWLINE if c == '/':NEWLINE arg = float(arg)NEWLINE except ValueError:NEWLINE # Failed to convert to numberNEWLINE logging.debug("Error converting %s to number" % arg)NEWLINE return BAD_REFNEWLINE formula = formula + str(arg) + cNEWLINE nargs += 1NEWLINE arg = ''NEWLINE logging.debug("-> %s" % formula)NEWLINE # End of stringNEWLINE if CellIndex(arg).is_full:NEWLINE arg = worksheet.render_cell(arg,eval_formulae=True)NEWLINE if nargs:NEWLINE try:NEWLINE arg = convert_to_number(arg)NEWLINE except ValueError:NEWLINE # Failed to convert to floatNEWLINE logging.debug("Error converting %s to number" % arg)NEWLINE return BAD_REFNEWLINE else:NEWLINE # Single value was referencedNEWLINE try:NEWLINE return convert_to_number(arg)NEWLINE except ValueError:NEWLINE return argNEWLINE formula = formula + str(arg)NEWLINE logging.debug("Formula '%s'" % formula)NEWLINE if re.compile(r"^[0-9+\-\/\*]+").match(formula):NEWLINE try:NEWLINE item = eval(formula)NEWLINE except Exception as ex:NEWLINE logging.debug("Error processing %s: %s" % (item,ex))NEWLINE return BAD_REFNEWLINE else:NEWLINE item = formulaNEWLINE return itemNEWLINENEWLINEdef convert_to_number(s):NEWLINE """Convert a number to float or int as appropriateNEWLINENEWLINE Raises ValueError if neither conversion is possible.NEWLINENEWLINE """NEWLINE if is_int(s):NEWLINE return int(s)NEWLINE elif is_float(s):NEWLINE return float(s)NEWLINE raise ValueError("%s not a number?" % s)NEWLINENEWLINEdef is_float(s):NEWLINE """Test if a number is a floatNEWLINE """NEWLINE try:NEWLINE return str(float(s)) == sNEWLINE except ValueError:NEWLINE return FalseNEWLINENEWLINEdef is_int(s):NEWLINE """Test if a number is an integerNEWLINE """NEWLINE try:NEWLINE return str(int(s)) == sNEWLINE except ValueError:NEWLINE return FalseNEWLINENEWLINEdef format_value(value,number_format=None):NEWLINE """Format a cell value based on the specified number formatNEWLINENEWLINE """NEWLINE logging.debug("format_value: %s (%s) %s" % (value,type(value),number_format))NEWLINE if number_format is None:NEWLINE return str(value)NEWLINE if number_format == NumberFormats.PERCENTAGE:NEWLINE # Convert to percentageNEWLINE logging.debug("Percentage")NEWLINE return "%.1f%%" % (float(value) * 100.0)NEWLINE if number_format == NumberFormats.THOUSAND_SEPARATOR:NEWLINE # Add thousands separatorNEWLINE i = int(value)NEWLINE value = []NEWLINE while i >= 1000:NEWLINE value.append("%03d" % (i%1000))NEWLINE i = i//1000NEWLINE value.append(str(i))NEWLINE value = value[::-1]NEWLINE return ','.join(value)NEWLINE # Unknown, do nothingNEWLINE return str(value)NEWLINENEWLINE#######################################################################NEWLINE# Example usageNEWLINE#######################################################################NEWLINENEWLINEif __name__ == "__main__":NEWLINE wb = XLSWorkBook("Test")NEWLINENEWLINE wb.add_work_sheet('test')NEWLINE wb.add_work_sheet('test2')NEWLINE wb.add_work_sheet('data',"Data")NEWLINE print("%s" % wb.worksheet['test'].title)NEWLINE print("%s" % wb.worksheet['test2'].title)NEWLINE print("%s" % wb.worksheet['data'].title)NEWLINENEWLINE data = wb.worksheet['data']NEWLINE data['A1'] = "Column 1"NEWLINE print("%s" % data['A1'])NEWLINE print("%s" % data['A2'])NEWLINE print("%s" % data['A']['1'])NEWLINE data['A']['1'] = "Updated value"NEWLINE print("%s" % data['A1'])NEWLINENEWLINE data['B']['12'] = "Another value"NEWLINE data['Z']['5'] = "And another"NEWLINE data['AZ']['3'] = "Yet another"NEWLINE data['AB']['3'] = "And another again"NEWLINE NEWLINE print("%s,%s" % (data.columns,data.rows))NEWLINENEWLINE print(data.render_as_text(include_columns_and_rows=True,NEWLINE eval_formulae=True,NEWLINE include_styles=True))NEWLINENEWLINE print(data.render_as_text(start='B1',NEWLINE end='C6',NEWLINE include_columns_and_rows=True))NEWLINENEWLINE # Examples rendering into XLS and XLSX outputNEWLINE wb = XLSWorkBook("Test")NEWLINE ws = wb.add_work_sheet('test','Test')NEWLINE # TextNEWLINE ws['A1'] = "Arbitrary text"NEWLINE # FormulaNEWLINE ws['A3'] = "Formula example:"NEWLINE ws['A5'] = 2NEWLINE ws['B5'] = 5NEWLINE ws['A6'] = "Sum"NEWLINE ws['B6'] = "=A5+B5"NEWLINE # Set styles on formulaNEWLINE ws.set_style(XLSStyle(bold=True),'A6')NEWLINE ws.set_style(XLSStyle(bold=True),'B6')NEWLINE # More style examplesNEWLINE ws['A9'] = "Bold"NEWLINE ws.set_style(XLSStyle(bold=True),'A9')NEWLINE ws['A10'] = "Red"NEWLINE ws.set_style(XLSStyle(color='red'),'A10')NEWLINE ws['A11'] = "White on green"NEWLINE ws.set_style(XLSStyle(bold=True,color='white',bgcolor='green'),'A11')NEWLINE ws['A12'] = "Black on gray"NEWLINE ws.set_style(XLSStyle(bold=True,color='black',bgcolor='gray25'),'A12')NEWLINE # Freeze panesNEWLINE ws = wb.add_work_sheet('freeze','Freeze')NEWLINE ws.append_row(data=('X','Y','Z'),NEWLINE style=XLSStyle(color='white',NEWLINE bgcolor='green',NEWLINE bold=True))NEWLINE for i in range(100):NEWLINE ws.append_row(data=(i,i*2,'=A?+B?'))NEWLINE ws.freeze_panes = 'A2'NEWLINE # Save out to XLS(X) filesNEWLINE wb.save_as_xls('test.xls')NEWLINE wb.save_as_xlsx('test.xlsx')NEWLINENEWLINE
from __future__ import unicode_literalsNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django import templateNEWLINEfrom django.contrib.humanize.templatetags.humanize import intcommaNEWLINEfrom django.template.defaultfilters import stringfilterNEWLINENEWLINEfrom wagtail.wagtailcore import hooksNEWLINEfrom wagtail.wagtailcore.models import get_navigation_menu_items, UserPagePermissionsProxy, PageViewRestrictionNEWLINEfrom wagtail.wagtailcore.utils import camelcase_to_underscore, escape_scriptNEWLINEfrom wagtail.wagtailcore.utils import cautious_slugify as _cautious_slugifyNEWLINEfrom wagtail.wagtailadmin.menu import admin_menuNEWLINENEWLINEfrom wagtail.utils.pagination import DEFAULT_PAGE_KEYNEWLINENEWLINENEWLINEregister = template.Library()NEWLINENEWLINEregister.filter('intcomma', intcomma)[email protected]_tag('wagtailadmin/shared/explorer_nav.html')NEWLINEdef explorer_nav():NEWLINE return {NEWLINE 'nodes': get_navigation_menu_items()NEWLINE }[email protected]_tag('wagtailadmin/shared/explorer_nav_child.html')NEWLINEdef explorer_subnav(nodes):NEWLINE return {NEWLINE 'nodes': nodesNEWLINE }[email protected]_tag('wagtailadmin/shared/main_nav.html', takes_context=True)NEWLINEdef main_nav(context):NEWLINE request = context['request']NEWLINENEWLINE return {NEWLINE 'menu_html': admin_menu.render_html(request),NEWLINE 'request': request,NEWLINE }[email protected]_tagNEWLINEdef main_nav_js():NEWLINE return admin_menu.media['js'][email protected]("ellipsistrim")NEWLINEdef ellipsistrim(value, max_length):NEWLINE if len(value) > max_length:NEWLINE truncd_val = value[:max_length]NEWLINE if not len(value) == (max_length + 1) and value[max_length + 1] != " ":NEWLINE truncd_val = truncd_val[:truncd_val.rfind(" ")]NEWLINE return truncd_val + "..."NEWLINE return [email protected] fieldtype(bound_field):NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.field.__class__.__name__)NEWLINE except AttributeError:NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.__class__.__name__)NEWLINE except AttributeError:NEWLINE return ""[email protected] widgettype(bound_field):NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.field.widget.__class__.__name__)NEWLINE except AttributeError:NEWLINE try:NEWLINE return camelcase_to_underscore(bound_field.widget.__class__.__name__)NEWLINE except AttributeError:NEWLINE return ""[email protected]_tag(takes_context=True)NEWLINEdef page_permissions(context, page):NEWLINE """NEWLINE Usage: {% page_permissions page as page_perms %}NEWLINE Sets the variable 'page_perms' to a PagePermissionTester object that can be queried to find outNEWLINE what actions the current logged-in user can perform on the given page.NEWLINE """NEWLINE # Create a UserPagePermissionsProxy object to represent the user's global permissions, andNEWLINE # cache it in the context for the duration of the page request, if one does not exist alreadyNEWLINE if 'user_page_permissions' not in context:NEWLINE context['user_page_permissions'] = UserPagePermissionsProxy(context['request'].user)NEWLINENEWLINE # Now retrieve a PagePermissionTester from it, specific to the given pageNEWLINE return context['user_page_permissions'].for_page(page)[email protected]_tag(takes_context=True)NEWLINEdef test_page_is_public(context, page):NEWLINE """NEWLINE Usage: {% test_page_is_public page as is_public %}NEWLINE Sets 'is_public' to True iff there are no page view restrictions in place onNEWLINE this page.NEWLINE Caches the list of page view restrictions in the context, to avoid repeatedNEWLINE DB queries on repeated calls.NEWLINE """NEWLINE if 'all_page_view_restriction_paths' not in context:NEWLINE context['all_page_view_restriction_paths'] = PageViewRestriction.objects.select_related('page').values_list('page__path', flat=True)NEWLINENEWLINE is_private = any([NEWLINE page.path.startswith(restricted_path)NEWLINE for restricted_path in context['all_page_view_restriction_paths']NEWLINE ])NEWLINENEWLINE return not [email protected]_tagNEWLINEdef hook_output(hook_name):NEWLINE """NEWLINE Example: {% hook_output 'insert_editor_css' %}NEWLINE Whenever we have a hook whose functions take no parameters and return a string, this tag can be usedNEWLINE to output the concatenation of all of those return values onto the page.NEWLINE Note that the output is not escaped - it is the hook function's responsibility to escape unsafe content.NEWLINE """NEWLINE snippets = [fn() for fn in hooks.get_hooks(hook_name)]NEWLINE return ''.join(snippets)[email protected]_tagNEWLINEdef usage_count_enabled():NEWLINE return getattr(settings, 'WAGTAIL_USAGE_COUNT_ENABLED', False)[email protected]_tagNEWLINEdef base_url_setting():NEWLINE return getattr(settings, 'BASE_URL', None)NEWLINENEWLINENEWLINEclass EscapeScriptNode(template.Node):NEWLINE TAG_NAME = 'escapescript'NEWLINENEWLINE def __init__(self, nodelist):NEWLINE super(EscapeScriptNode, self).__init__()NEWLINE self.nodelist = nodelistNEWLINENEWLINE def render(self, context):NEWLINE out = self.nodelist.render(context)NEWLINE return escape_script(out)NEWLINENEWLINE @classmethodNEWLINE def handle(cls, parser, token):NEWLINE nodelist = parser.parse(('end' + EscapeScriptNode.TAG_NAME,))NEWLINE parser.delete_first_token()NEWLINE return cls(nodelist)NEWLINENEWLINEregister.tag(EscapeScriptNode.TAG_NAME, EscapeScriptNode.handle)NEWLINENEWLINENEWLINE# Helpers for Widget.render_with_errors, our extension to the Django widget API that allows widgets toNEWLINE# take on the responsibility of rendering their own error [email protected] render_with_errors(bound_field):NEWLINE """NEWLINE Usage: {{ field|render_with_errors }} as opposed to {{ field }}.NEWLINE If the field (a BoundField instance) has errors on it, and the associated widget implementsNEWLINE a render_with_errors method, call that; otherwise, call the regular widget rendering mechanism.NEWLINE """NEWLINE widget = bound_field.field.widgetNEWLINE if bound_field.errors and hasattr(widget, 'render_with_errors'):NEWLINE return widget.render_with_errors(bound_field.html_name, bound_field.value(), attrs={'id': bound_field.auto_id}, errors=bound_field.errors)NEWLINE else:NEWLINE return bound_field.as_widget()[email protected] has_unrendered_errors(bound_field):NEWLINE """NEWLINE Return true if this field has errors that were not accounted for by render_with_errors, becauseNEWLINE the widget does not support the render_with_errors methodNEWLINE """NEWLINE return bound_field.errors and not hasattr(bound_field.field.widget, 'render_with_errors')[email protected](is_safe=True)NEWLINE@stringfilterNEWLINEdef cautious_slugify(value):NEWLINE return _cautious_slugify(value)[email protected]_tag(takes_context=True)NEWLINEdef querystring(context, **kwargs):NEWLINE """NEWLINE Print out the current querystring. Any keyword arguments to this templateNEWLINE tag will be added to the querystring before it is printed out.NEWLINENEWLINE <a href="/page/{% querystring key='value' %}">NEWLINENEWLINE Will result in something like:NEWLINENEWLINE <a href="/page/?foo=bar&key=value">NEWLINE """NEWLINE request = context['request']NEWLINE querydict = request.GET.copy()NEWLINE # Can't do querydict.update(kwargs), because QueryDict.update() appends toNEWLINE # the list of values, instead of replacing the values.NEWLINE for key, value in kwargs.items():NEWLINE if value is None:NEWLINE # Remove the key if the value is NoneNEWLINE querydict.pop(key, None)NEWLINE else:NEWLINE # Set the key otherwiseNEWLINE querydict[key] = valueNEWLINENEWLINE return '?' + querydict.urlencode()[email protected]_tag(takes_context=True)NEWLINEdef pagination_querystring(context, page_number, page_key=DEFAULT_PAGE_KEY):NEWLINE """NEWLINE Print out a querystring with an updated page number:NEWLINENEWLINE {% if page.has_next_page %}NEWLINE <a href="{% pagination_link page.next_page_number %}">Next page</a>NEWLINE {% endif %}NEWLINE """NEWLINE return querystring(context, **{page_key: page_number})[email protected]_tag("wagtailadmin/pages/listing/_pagination.html",NEWLINE takes_context=True)NEWLINEdef paginate(context, page, base_url='', page_key=DEFAULT_PAGE_KEY,NEWLINE classnames=''):NEWLINE """NEWLINE Print pagination previous/next links, and the page count. Take theNEWLINE following arguments:NEWLINENEWLINE pageNEWLINE The current page of results. This should be a Django pagination `Page`NEWLINE instanceNEWLINENEWLINE base_urlNEWLINE The base URL of the next/previous page, with no querystring.NEWLINE This is optional, and defaults to the current page by just printing theNEWLINE querystring for the next/previous page.NEWLINENEWLINE page_keyNEWLINE The name of the page variable in the query string. Defaults to the sameNEWLINE name as used in the :func:`~wagtail.utils.pagination.paginate`NEWLINE function.NEWLINENEWLINE classnamesNEWLINE Extra classes to add to the next/previous links.NEWLINE """NEWLINE request = context['request']NEWLINE return {NEWLINE 'base_url': base_url,NEWLINE 'classnames': classnames,NEWLINE 'request': request,NEWLINE 'page': page,NEWLINE 'page_key': page_key,NEWLINE 'paginator': page.paginator,NEWLINE }NEWLINE
# Generated by Django 3.0.6 on 2020-05-16 05:00NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINEimport django.db.models.deletionNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE initial = TrueNEWLINENEWLINE dependencies = [NEWLINE ('books', '0002_auto_20200513_0504'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.CreateModel(NEWLINE name='Inventory',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('for_sale', models.BooleanField(default=True)),NEWLINE ('price', models.FloatField()),NEWLINE ('book', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='books.Book')),NEWLINE ],NEWLINE ),NEWLINE ]NEWLINE
innings_format='{team} : {total}/{wickets} || {over}|{overs}\n\t\t\t{ball_by}'NEWLINEclass matchstats:NEWLINE NEWLINE def __init__(self,mcode,team1,team2,overs):NEWLINE self.match_stats={'mcode':mcode,'date_time':None,'team1':team1,'team2':team2,'overs':overs,'status':None,'score1':[None,0,0,0,overs,None],'score2':[None,0,0,0,overs,None],'runs_to_win':None,'1st_innings':None,'2nd_innings':None}NEWLINE self.mcode=mcodeNEWLINE self.team1=team1NEWLINE self.team2=team2NEWLINE self.truns=0NEWLINE self.twickets=0NEWLINE self.over=0NEWLINE self.overs=oversNEWLINE self.score=''NEWLINE self.innings=''NEWLINE self.target=0NEWLINE self.balls=0NEWLINE NEWLINE def set_score(self,score,innings,bat):NEWLINE self.match_stats[score][0:4]=bat,self.truns,self.twickets,self.overNEWLINE score_list=self.match_stats[score]NEWLINE self.score=scoreNEWLINE self.innings=inningsNEWLINE self.match_stats[innings]=innings_format.format(team=score_list[0],total=score_list[1],wickets=score_list[2],over=score_list[3],overs=score_list[4],ball_by='')NEWLINE NEWLINE def innings_view(self):NEWLINE score_list=self.match_stats[self.score]NEWLINE self.match_stats[self.innings]=innings_format.format(team=score_list[0],total=score_list[1],wickets=score_list[2],over=score_list[3],overs=score_list[4],ball_by=score_list[5])NEWLINE NEWLINE def update_score(self,run,disp_over):NEWLINE self.truns+=runNEWLINE self.balls+=1NEWLINE self.over=disp_overNEWLINE self.match_stats[self.score][1]=self.trunsNEWLINE self.match_stats[self.score][3]=self.overNEWLINE self.innings_view()NEWLINE def update_ball_by(self,ball_by_ball):NEWLINE ball_str='['NEWLINE for ball in ball_by_ball:NEWLINE ball_str=ball_str+' '+ballNEWLINE ball_str+=']'NEWLINE self.match_stats[self.score][5]=ball_strNEWLINE self.innings_view()NEWLINE def delete_ball_by(self):NEWLINE self.match_stats[self.score][5]=''NEWLINE self.innings_view()NEWLINE NEWLINE def extras(self,run):NEWLINE self.truns+=runNEWLINE self.match_stats[self.score][1]=self.trunsNEWLINE self.innings_view()NEWLINENEWLINE def score_reset(self):NEWLINE self.truns=0NEWLINE self.over=0NEWLINE self.twickets=0NEWLINE self.balls=0NEWLINENEWLINE def calc_balls_left(self):NEWLINE balls_left=(self.overs*6)-self.ballsNEWLINE runs_to_win=self.target-self.trunsNEWLINE return balls_left,runs_to_winNEWLINE def result_check(self,balls_left):NEWLINE if self.truns>=self.target:NEWLINE return 'WIN'NEWLINE elif self.truns==(self.target-1) and balls_left==0:NEWLINE return 'DRAW'NEWLINE elif self.truns<self.target and balls_left==0:NEWLINE return 'LOSE'NEWLINE else:NEWLINE return NoneNEWLINE NEWLINE def update_wicket(self,disp_over):NEWLINE self.twickets+=1NEWLINE self.balls+=1NEWLINE self.over=disp_overNEWLINE self.match_stats[self.score][2]=self.twicketsNEWLINE self.match_stats[self.score][3]=self.overNEWLINE self.innings_view()NEWLINE if(self.twickets==10):NEWLINE return TrueNEWLINE else:NEWLINE return FalseNEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE NEWLINE
# -*- coding: utf-8 -*-NEWLINE# BioSTEAM: The Biorefinery Simulation and Techno-Economic Analysis ModulesNEWLINE# Copyright (C) 2020, Yoel Cortes-Pena <[email protected]>NEWLINE# NEWLINE# This module is under the UIUC open-source license. See NEWLINE# github.com/BioSTEAMDevelopmentGroup/biosteam/blob/master/LICENSE.txtNEWLINE# for license details.NEWLINE"""NEWLINE"""NEWLINEimport numpy as npNEWLINEfrom ._batch_bioreactor import BatchBioreactorNEWLINEfrom scipy.integrate import odeintNEWLINEfrom thermosteam.reaction import Reaction, ParallelReactionNEWLINENEWLINE__all__ = ('Fermentation',)NEWLINENEWLINEclass Fermentation(BatchBioreactor):NEWLINE """NEWLINE Create a Fermentation object which models large-scale batch fermentationNEWLINE for the production of 1st generation ethanol using yeastNEWLINE [1]_ [2]_ [3]_ [4]_. A compound with CAS 'Yeast' must be present.NEWLINE Only sucrose and glucose are taken into account for conversion.NEWLINE Conversion is based on reaction time, `tau`. Cleaning and unloading time,NEWLINE `tau_0`, fraction of working volume, `V_wf`, and number of reactors,NEWLINE `N_reactors`, are attributes that can be changed. Cost of a reactorNEWLINE is based on the NREL batch fermentation tank cost assuming volumetricNEWLINE scaling with a 6/10th exponent [5]_. NEWLINE NEWLINE ParametersNEWLINE ----------NEWLINE ins : streamsNEWLINE Inlet fluids to be mixed into the fermentor.NEWLINE outs : stream sequenceNEWLINE * [0] VentNEWLINE * [1] EffluentNEWLINE tau : floatNEWLINE Reaction time.NEWLINE N : int, optionalNEWLINE Number of batch reactorsNEWLINE V : float, optionalNEWLINE Target volume of reactors [m^3].NEWLINE T=305.15 : floatNEWLINE Temperature of reactor [K].NEWLINE P=101325 : floatNEWLINE Operating pressure of reactor [Pa].NEWLINE Nmin=2 : intNEWLINE Minimum number of fermentors.NEWLINE Nmax=36: intNEWLINE Maximum number of fermentors. NEWLINE efficiency=0.9 : float, optionalNEWLINE User enforced efficiency.NEWLINE iskinetic=False: bool, optionalNEWLINE If True, `Fermenation.kinetic_model` will be used.NEWLINE NEWLINE NotesNEWLINE -----NEWLINE Either N or V must be given.NEWLINE NEWLINE ExamplesNEWLINE --------NEWLINE Simulate a Fermentation object which models batch fermentation for theNEWLINE production of 1st generation ethanol using yeast.NEWLINE NEWLINE >>> from biorefineries.lipidcane import chemicalsNEWLINE >>> from biosteam.units import FermentationNEWLINE >>> from biosteam import Stream, settingsNEWLINE >>> settings.set_thermo(chemicals)NEWLINE >>> feed = Stream('feed',NEWLINE ... Water=1.20e+05,NEWLINE ... Glucose=1.89e+03,NEWLINE ... Sucrose=2.14e+04,NEWLINE ... DryYeast=1.03e+04,NEWLINE ... units='kg/hr',NEWLINE ... T=32+273.15)NEWLINE >>> F1 = Fermentation('F1',NEWLINE ... ins=feed, outs=('CO2', 'product'),NEWLINE ... tau=8, efficiency=0.90, N=8)NEWLINE >>> F1.simulate()NEWLINE >>> F1.show()NEWLINE Fermentation: F1NEWLINE ins...NEWLINE [0] feedNEWLINE phase: 'l', T: 305.15 K, P: 101325 PaNEWLINE flow (kmol/hr): Water 6.66e+03NEWLINE Glucose 10.5NEWLINE Sucrose 62.5NEWLINE Yeast 415NEWLINE [1] missing streamNEWLINE outs...NEWLINE [0] CO2NEWLINE phase: 'g', T: 304.19 K, P: 101325 PaNEWLINE flow (kmol/hr): Water 9.48NEWLINE Ethanol 3.52NEWLINE CO2 244NEWLINE [1] productNEWLINE phase: 'l', T: 304.19 K, P: 101325 PaNEWLINE flow (kmol/hr): Water 6.59e+03NEWLINE Ethanol 240NEWLINE Glucose 4.07NEWLINE Yeast 484NEWLINE >>> F1.results()NEWLINE Fermentation Units F1NEWLINE Power Rate kW 66.6NEWLINE Cost USD/hr 5.21NEWLINE Chilled water Duty kJ/hr -7.55e+06NEWLINE Flow kmol/hr 5.06e+03NEWLINE Cost USD/hr 37.8NEWLINE Design Reactor volume m3 247NEWLINE Batch time hr 12.6NEWLINE Loading time hr 1.57NEWLINE Number of reactors 8NEWLINE Recirculation flow rate m3/hr 17.7NEWLINE Reactor duty kJ/hr 7.55e+06NEWLINE Cleaning and unloading time hr 3NEWLINE Working volume fraction 0.9NEWLINE Purchase cost Heat exchangers USD 1.02e+05NEWLINE Reactors USD 1.87e+06NEWLINE Agitators USD 1.17e+05NEWLINE Cleaning in place USD 8.9e+04NEWLINE Recirculation pumps USD 1.26e+05NEWLINE Total purchase cost USD 2.31e+06NEWLINE Utility cost USD/hr 43NEWLINE NEWLINE ReferencesNEWLINE ----------NEWLINE .. [1] Oliveira, Samuel C., et al. "Discrimination between ethanol NEWLINE inhibition models in a continuous alcoholic fermentation process usingNEWLINE flocculating yeast." Applied biochemistry and biotechnology 74.3 (1998): 161-172.NEWLINE NEWLINE .. [2] Oliveira, Samuel C., et al. "Continuous ethanol fermentation in aNEWLINE tower reactor with flocculating yeast recycle: scale-up effects on processNEWLINE performance, kinetic parameters and model predictions." BioprocessNEWLINE Engineering 20.6 (1999): 525-530.NEWLINE NEWLINE .. [3] Oliveira, Samuel C., et al. "Mathematical modeling of a continuousNEWLINE alcoholic fermentation process in a two-stage tower reactor cascade withNEWLINE flocculating yeast recycle." Bioprocess and biosystems engineering 38.3NEWLINE (2015): 469-479.NEWLINE NEWLINE .. [4] Oliveira, Samuel C., et al. "Kinetic Modeling of 1‐G EthanolNEWLINE Fermentations." Fermentation Processes. InTech, 2017.NEWLINE NEWLINE .. [5] D. Humbird, R. Davis, L. Tao, C. Kinchin, D. Hsu, and A. AdenNEWLINE National. Renewable Energy Laboratory Golden, Colorado. P. Schoen,NEWLINE J. Lukas, B. Olthof, M. Worley, D. Sexton, and D. Dudgeon. Harris GroupNEWLINE Inc. Seattle, Washington and Atlanta, Georgia. Process Design and EconomicsNEWLINE for Biochemical Conversion of Lignocellulosic Biomass to Ethanol Dilute-AcidNEWLINE Pretreatment and Enzymatic Hydrolysis of Corn Stover. May 2011. TechnicalNEWLINE Report NREL/TP-5100-47764NEWLINE NEWLINE """NEWLINE line = 'Fermentation'NEWLINE NEWLINE #: tuple[float] Kinetic parameters for the kinetic model. Default constants are fitted for Oliveria's model (mu_m1, mu_m2, Ks1, Ks2, Pm1, Pm2, Xm, Y_PS, a)NEWLINE kinetic_constants = (0.31, # mu_m1NEWLINE 1.01, # mu_m2NEWLINE 1.88, # Ks1NEWLINE 2.81, # Ks2NEWLINE 82.8, # Pm1NEWLINE 108.2, # Pm2NEWLINE 113.4, # XmNEWLINE 0.45, # Y_PSNEWLINE 0.18) # aNEWLINE NEWLINE def __init__(self, ID='', ins=None, outs=(), thermo=None, *, NEWLINE tau, N=None, V=None, T=305.15, P=101325., Nmin=2, Nmax=36,NEWLINE efficiency=0.9, iskinetic=False):NEWLINE BatchBioreactor.__init__(self, ID, ins, outs, thermo,NEWLINE tau=tau, N=N, V=V, T=T, P=P, Nmin=Nmin, Nmax=Nmax)NEWLINE self._load_components()NEWLINE self.iskinetic = iskineticNEWLINE chemicals = self.chemicalsNEWLINE self.hydrolysis_reaction = Reaction('Sucrose + Water -> 2Glucose', 'Sucrose', 1.00, chemicals)NEWLINE self.fermentation_reaction = Reaction('Glucose -> 2Ethanol + 2CO2', 'Glucose', efficiency, chemicals)NEWLINE self.cell_growth_reaction = cell_growth = Reaction('Glucose -> Yeast', 'Glucose', 0.70, chemicals, basis='wt')NEWLINE cell_growth.basis = 'mol'NEWLINE if all([i in self.chemicals for i in ('FFA', 'DAG', 'TAG', 'Glycerol')]):NEWLINE self.lipid_reaction = self.oil_reaction = ParallelReaction([NEWLINE Reaction('TAG + 3Water -> 3FFA + Glycerol', 'TAG', 0.23, chemicals),NEWLINE Reaction('TAG + Water -> FFA + DAG', 'TAG', 0.02, chemicals)NEWLINE ])NEWLINE else:NEWLINE self.lipid_reaction = NoneNEWLINE self.efficiency = efficiencyNEWLINE NEWLINE def _calc_efficiency(self, feed, tau): # pragma: no coverNEWLINE # Get initial concentrationsNEWLINE y, e, s, w = feed.indices(['Yeast',NEWLINE '64-17-5',NEWLINE '492-61-5',NEWLINE '7732-18-5'])NEWLINE mass = feed.massNEWLINE F_vol = feed.F_volNEWLINE concentration_in = mass/F_volNEWLINE X0, P0, S0 = (concentration_in[i] for i in (y, e, s))NEWLINE NEWLINE # Integrate to get final concentrationNEWLINE t = np.linspace(0, tau, 1000)NEWLINE C_t = odeint(self.kinetic_model, (X0, P0, S0), t,NEWLINE args=self.kinetic_constants)NEWLINE # Cache dataNEWLINE self._X = C_t[:, 0]NEWLINE self._P = C_t[:, 1]NEWLINE self._S = S = C_t[:, 2]NEWLINE NEWLINE # Calculate efficiencyNEWLINE Sf = S[-1]NEWLINE Sf = Sf if Sf > 0 else 0NEWLINE Y_PS = self.kinetic_constants[-2]NEWLINE eff = (S0 - Sf)/S0 * Y_PS/0.511NEWLINE return effNEWLINE NEWLINE @staticmethodNEWLINE def kinetic_model(z, t, *kinetic_constants): # pragma: no coverNEWLINE """NEWLINE Return change of yeast, ethanol, and substrate concentration in kg/m3.NEWLINE NEWLINE ParametersNEWLINE ----------NEWLINE z : Iterable with (X, E, S) [-]:NEWLINE * X: Yeast concentration (kg/m3)NEWLINE * P: Ethanol concentration (kg/m3)NEWLINE * S: Substrate concentration (kg/m3)NEWLINE NEWLINE t : floatNEWLINE Time pointNEWLINE NEWLINE *kinetic_constantsNEWLINE * mu_m1: Maximum specific growth rate (1/hr)NEWLINE * mu_m2: Maximum specific ethanol production rate (g-product/g-cell-hr)NEWLINE * Ks1: Sugar saturation constant for growth (g/L)NEWLINE * Ks2: Sugar saturation constant for product (g/L)NEWLINE * Pm1: Maximum product concentration at zero growth [mu_m1=0] (g/L)NEWLINE * Pm2: Maximum product concentration [mu_m2=0] (g/L)NEWLINE * Xm: Maximum cell concentration [mu_m1=0] (g/L)NEWLINE * Y_PS: Ethanol yield based on sugar consumedNEWLINE * a: Toxic powerNEWLINE NEWLINE """NEWLINE mu_m1, mu_m2, Ks1, Ks2, Pm1, Pm2, Xm, Y_PS, a = kinetic_constantsNEWLINE NEWLINE # Current yeast, ethanol, and glucose concentration (kg/m3)NEWLINE X, P, S = zNEWLINE NEWLINE # Compute coefficientsNEWLINE mu_X = mu_m1 * (S/(Ks1 + S)) * (1 - P/Pm1)**a*((1-X/Xm))NEWLINE mu_P = mu_m2 * (S/(Ks2 + S)) * (1 - P/Pm2)NEWLINE mu_S = mu_P/0.45NEWLINE NEWLINE # Compute derivativesNEWLINE dXdt = mu_X * XNEWLINE dPdt = (mu_P * X)NEWLINE dSdt = - mu_S * XNEWLINE return (dXdt, dPdt, dSdt)NEWLINENEWLINE @propertyNEWLINE def efficiency(self):NEWLINE return self.fermentation_reaction.XNEWLINE @efficiency.setterNEWLINE def efficiency(self, efficiency):NEWLINE self.fermentation_reaction.X = efficiencyNEWLINENEWLINE def _run(self):NEWLINE vent, effluent = self.outsNEWLINE effluent.mix_from(self.ins)NEWLINE self.hydrolysis_reaction(effluent)NEWLINE if self.iskinetic:NEWLINE self.fermentation_reaction.X = self._calc_efficiency(effluent, self._tau)NEWLINE self.fermentation_reaction(effluent)NEWLINE self.cell_growth_reaction(effluent)NEWLINE if self.lipid_reaction: self.lipid_reaction(effluent)NEWLINE vent.empty()NEWLINE vent.receive_vent(effluent)NEWLINE
#!/usr/bin/env python3NEWLINE# Copyright (c) Meta Platforms, Inc. and affiliates.NEWLINE# All rights reserved.NEWLINE#NEWLINE# This source code is licensed under the BSD-style license found in theNEWLINE# LICENSE file in the root directory of this source tree.NEWLINENEWLINEfrom typing import List, OptionalNEWLINENEWLINEimport torchNEWLINEfrom torch import nnNEWLINEfrom torchrec.modules.embedding_modules import EmbeddingBagCollectionNEWLINEfrom torchrec.modules.mlp import MLPNEWLINEfrom torchrec.sparse.jagged_tensor import (NEWLINE KeyedJaggedTensor,NEWLINE KeyedTensor,NEWLINE)NEWLINENEWLINE# Sphinx Documentation Text (for user-facing classes only)NEWLINENEWLINE"""NEWLINE.. fb:display_title::NEWLINE DLRM APINEWLINE=====NEWLINENotations uses throughout:NEWLINENEWLINEF: number of sparseFeaturesNEWLINED: embedding_dimension of sparse featuresNEWLINEB: batch_sizeNEWLINEnum_features: number of dense featuresNEWLINENEWLINE"""NEWLINENEWLINENEWLINEdef choose(n: int, k: int) -> int:NEWLINE """NEWLINE Simple implementation of math.comb for python 3.7 compatibilityNEWLINE """NEWLINE if 0 <= k <= n:NEWLINE ntok = 1NEWLINE ktok = 1NEWLINE for t in range(1, min(k, n - k) + 1):NEWLINE ntok *= nNEWLINE ktok *= tNEWLINE n -= 1NEWLINE return ntok // ktokNEWLINE else:NEWLINE return 0NEWLINENEWLINENEWLINEclass SparseArch(nn.Module):NEWLINE """NEWLINE Processes the Sparse Features of DLRM. Does Embedding Lookup for allNEWLINE EmbeddingBag and Embedding features of each collection.NEWLINENEWLINE Constructor Args:NEWLINE embedding_bag_collection: EmbeddingBagCollection,NEWLINENEWLINE Call Args:NEWLINE features: KeyedJaggedTensor,NEWLINENEWLINE Returns:NEWLINE KeyedJaggedTensor - size F * D X BNEWLINENEWLINE Example:NEWLINE >>> eb1_config = EmbeddingBagConfig(NEWLINE name="t1", embedding_dim=3, num_embeddings=10, feature_names=["f1"]NEWLINE )NEWLINE eb2_config = EmbeddingBagConfig(NEWLINE name="t2", embedding_dim=4, num_embeddings=10, feature_names=["f2"]NEWLINE )NEWLINE ebc_config = EmbeddingBagCollectionConfig(tables=[eb1_config, eb2_config])NEWLINENEWLINE ebc = EmbeddingBagCollection(config=ebc_config)NEWLINENEWLINE # 0 1 2 <-- batchNEWLINE # 0 [0,1] None [2]NEWLINE # 1 [3] [4] [5,6,7]NEWLINE # ^NEWLINE # featureNEWLINE features = KeyedJaggedTensor.from_offsets_sync(NEWLINE keys=["f1", "f2"],NEWLINE values=torch.tensor([0, 1, 2, 3, 4, 5, 6, 7]),NEWLINE offsets=torch.tensor([0, 2, 2, 3, 4, 5, 8]),NEWLINE )NEWLINENEWLINE sparse_arch(features)NEWLINE """NEWLINENEWLINE def __init__(self, embedding_bag_collection: EmbeddingBagCollection) -> None:NEWLINE super().__init__()NEWLINE self.embedding_bag_collection: EmbeddingBagCollection = embedding_bag_collectionNEWLINENEWLINE def forward(NEWLINE self,NEWLINE features: KeyedJaggedTensor,NEWLINE ) -> KeyedTensor:NEWLINE return self.embedding_bag_collection(features)NEWLINENEWLINENEWLINEclass DenseArch(nn.Module):NEWLINE """NEWLINE Processes the dense features of DLRM model.NEWLINENEWLINE Constructor Args:NEWLINE in_features: int - size of the input.NEWLINE layer_sizes: List[int] - list of layer sizes.NEWLINE device: (Optional[torch.device]).NEWLINENEWLINE Call Args:NEWLINE features: torch.Tensor - size B X num_featuresNEWLINENEWLINE Returns:NEWLINE torch.Tensor - size B X DNEWLINENEWLINE Example:NEWLINE >>> B = 20NEWLINE D = 3NEWLINE dense_arch = DenseArch(10, layer_sizes=[15, D])NEWLINE dense_embedded = dense_arch(torch.rand((B, 10)))NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE in_features: int,NEWLINE layer_sizes: List[int],NEWLINE device: Optional[torch.device] = None,NEWLINE ) -> None:NEWLINE super().__init__()NEWLINE self.model: nn.Module = MLP(NEWLINE in_features, layer_sizes, bias=True, activation="relu", device=deviceNEWLINE )NEWLINENEWLINE def forward(self, features: torch.Tensor) -> torch.Tensor:NEWLINE return self.model(features)NEWLINENEWLINENEWLINEclass InteractionArch(nn.Module):NEWLINE """NEWLINE Processes the output of both SparseArch (sparse_features) and DenseArchNEWLINE (dense_features). Returns the pairwise dot product of each sparse feature pair,NEWLINE the dot product of each sparse features with the output of the dense layer,NEWLINE and the dense layer itself (all concatenated).NEWLINENEWLINE NOTE: The dimensionality of the dense_features (D) is expected to match theNEWLINE dimensionality of the sparse_features so that the dot products between them can beNEWLINE computed.NEWLINENEWLINE Constructor Args:NEWLINE num_sparse_features: int - size FNEWLINENEWLINE Call Args:NEWLINE dense_features: torch.Tensor - size B X DNEWLINE sparse_features: KeyedJaggedTensor - size F * D X BNEWLINENEWLINE Returns:NEWLINE torch.Tensor - B X (D + F + F choose 2)NEWLINENEWLINE Example:NEWLINE >>> D = 3NEWLINE B = 10NEWLINE keys = ["f1", "f2"]NEWLINE F = len(keys)NEWLINE inter_arch = InteractionArch(num_sparse_features=F)NEWLINENEWLINE dense_features = torch.rand((B, D))NEWLINENEWLINE sparse_features = KeyedTensor(NEWLINE keys=keys,NEWLINE length_per_key=[D, D],NEWLINE values=torch.rand((B, D * F)),NEWLINE )NEWLINENEWLINE # B X (D + F + F choose 2)NEWLINE concat_dense = inter_arch(dense_features, sparse_features)NEWLINE """NEWLINENEWLINE def __init__(self, num_sparse_features: int) -> None:NEWLINE super().__init__()NEWLINE self.F = num_sparse_featuresNEWLINE self.triu_indices: torch.Tensor = torch.triu_indices(NEWLINE self.F + 1, self.F + 1, offset=1NEWLINE )NEWLINENEWLINE def forward(NEWLINE self, dense_features: torch.Tensor, sparse_features: KeyedTensorNEWLINE ) -> torch.Tensor:NEWLINE if self.F <= 0:NEWLINE return dense_featuresNEWLINE (B, D) = dense_features.shapeNEWLINENEWLINE sparse_values = sparse_features.values().reshape(B, self.F, D)NEWLINE combined_values = torch.cat((dense_features.unsqueeze(1), sparse_values), dim=1)NEWLINENEWLINE # dense/sparse + sparse/sparse interactionNEWLINE # size B X (F + F choose 2)NEWLINE interactions = torch.bmm(NEWLINE combined_values, torch.transpose(combined_values, 1, 2)NEWLINE )NEWLINE interactions_flat = interactions[:, self.triu_indices[0], self.triu_indices[1]]NEWLINENEWLINE return torch.cat((dense_features, interactions_flat), dim=1)NEWLINENEWLINENEWLINEclass OverArch(nn.Module):NEWLINE """NEWLINE Final Arch of DLRM - simple MLP over OverArch.NEWLINENEWLINE Constructor Args:NEWLINE in_features: intNEWLINE layer_sizes: list[int]NEWLINE device: (Optional[torch.device]).NEWLINENEWLINE Call Args:NEWLINE features: torch.TensorNEWLINENEWLINE Returns:NEWLINE torch.Tensor - size B X layer_sizes[-1]NEWLINENEWLINE Example:NEWLINE >>> B = 20NEWLINE D = 3NEWLINE over_arch = OverArch(10, [5, 1])NEWLINE logits = over_arch(torch.rand((B, 10)))NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE in_features: int,NEWLINE layer_sizes: List[int],NEWLINE device: Optional[torch.device] = None,NEWLINE ) -> None:NEWLINE super().__init__()NEWLINE if len(layer_sizes) <= 1:NEWLINE raise ValueError("OverArch must have multiple layers.")NEWLINE self.model: nn.Module = nn.Sequential(NEWLINE MLP(NEWLINE in_features,NEWLINE layer_sizes[:-1],NEWLINE bias=True,NEWLINE activation="relu",NEWLINE device=device,NEWLINE ),NEWLINE nn.Linear(layer_sizes[-2], layer_sizes[-1], bias=True, device=device),NEWLINE )NEWLINENEWLINE def forward(self, features: torch.Tensor) -> torch.Tensor:NEWLINE return self.model(features)NEWLINENEWLINENEWLINEclass DLRM(nn.Module):NEWLINE """NEWLINE Recsys model from "Deep Learning Recommendation Model for Personalization andNEWLINE Recommendation Systems" (https://arxiv.org/abs/1906.00091). Processes sparseNEWLINE features by learning pooled embeddings for each feature. Learns the relationshipNEWLINE between dense features and sparse features by projecting dense features into theNEWLINE same embedding space. Also, learns the pairwise relationships between sparseNEWLINE features.NEWLINENEWLINE The module assumes all sparse features have the same embedding dimensionNEWLINE (i.e, each EmbeddingBagConfig uses the same embedding_dim)NEWLINENEWLINE Constructor Args:NEWLINE embedding_bag_collection (EmbeddingBagCollection): collection of embedding bagsNEWLINE used to define SparseArch.NEWLINE dense_in_features (int): the dimensionality of the dense input features.NEWLINE dense_arch_layer_sizes (list[int]): the layer sizes for the DenseArch.NEWLINE over_arch_layer_sizes (list[int]): the layer sizes for the OverArch. NOTE: TheNEWLINE output dimension of the InteractionArch should not be manually specifiedNEWLINE here.NEWLINE dense_device: (Optional[torch.device]).NEWLINENEWLINE Call Args:NEWLINE dense_features: torch.Tensor,NEWLINE sparse_features: KeyedJaggedTensor,NEWLINENEWLINE Returns:NEWLINE torch.Tensor - logits with size B X 1NEWLINENEWLINE Example:NEWLINE >>> B = 2NEWLINE D = 8NEWLINENEWLINE eb1_config = EmbeddingBagConfig(NEWLINE name="t1", embedding_dim=D, num_embeddings=100, feature_names=["f1", "f3"]NEWLINE )NEWLINE eb2_config = EmbeddingBagConfig(NEWLINE name="t2",NEWLINE embedding_dim=D,NEWLINE num_embeddings=100,NEWLINE feature_names=["f2"],NEWLINE )NEWLINE ebc_config = EmbeddingBagCollectionConfig(tables=[eb1_config, eb2_config])NEWLINENEWLINE ebc = EmbeddingBagCollection(config=ebc_config)NEWLINE model = DLRM(NEWLINE embedding_bag_collection=ebc,NEWLINE dense_in_features=100,NEWLINE dense_arch_layer_sizes=[20],NEWLINE over_arch_layer_sizes=[5, 1],NEWLINE )NEWLINENEWLINE features = torch.rand((B, 100))NEWLINENEWLINE # 0 1NEWLINE # 0 [1,2] [4,5]NEWLINE # 1 [4,3] [2,9]NEWLINE # ^NEWLINE # featureNEWLINE sparse_features = KeyedJaggedTensor.from_offsets_sync(NEWLINE keys=["f1", "f3"],NEWLINE values=torch.tensor([1, 2, 4, 5, 4, 3, 2, 9]),NEWLINE offsets=torch.tensor([0, 2, 4, 6, 8]),NEWLINE )NEWLINENEWLINE logits = model(NEWLINE dense_features=features,NEWLINE sparse_features=sparse_features,NEWLINE )NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE embedding_bag_collection: EmbeddingBagCollection,NEWLINE dense_in_features: int,NEWLINE dense_arch_layer_sizes: List[int],NEWLINE over_arch_layer_sizes: List[int],NEWLINE dense_device: Optional[torch.device] = None,NEWLINE ) -> None:NEWLINE super().__init__()NEWLINE assert (NEWLINE len(embedding_bag_collection.embedding_bag_configs) > 0NEWLINE ), "At least one embedding bag is required"NEWLINE for i in range(1, len(embedding_bag_collection.embedding_bag_configs)):NEWLINE conf_prev = embedding_bag_collection.embedding_bag_configs[i - 1]NEWLINE conf = embedding_bag_collection.embedding_bag_configs[i]NEWLINE assert (NEWLINE conf_prev.embedding_dim == conf.embedding_dimNEWLINE ), "All EmbeddingBagConfigs must have the same dimension"NEWLINE embedding_dim: int = embedding_bag_collection.embedding_bag_configs[NEWLINE 0NEWLINE ].embedding_dimNEWLINE if dense_arch_layer_sizes[-1] != embedding_dim:NEWLINE raise ValueError(NEWLINE f"embedding_bag_collection dimension ({embedding_dim}) and final dense "NEWLINE "arch layer size ({dense_arch_layer_sizes[-1]}) must match."NEWLINE )NEWLINENEWLINE num_feature_names = sum(NEWLINE [NEWLINE len(conf.feature_names)NEWLINE for conf in embedding_bag_collection.embedding_bag_configsNEWLINE ]NEWLINE )NEWLINENEWLINE over_in_features = (NEWLINE embedding_dim + choose(num_feature_names, 2) + num_feature_namesNEWLINE )NEWLINENEWLINE self.sparse_arch = SparseArch(embedding_bag_collection)NEWLINE self.dense_arch = DenseArch(NEWLINE in_features=dense_in_features,NEWLINE layer_sizes=dense_arch_layer_sizes,NEWLINE device=dense_device,NEWLINE )NEWLINE self.inter_arch = InteractionArch(num_sparse_features=num_feature_names)NEWLINE self.over_arch = OverArch(NEWLINE in_features=over_in_features,NEWLINE layer_sizes=over_arch_layer_sizes,NEWLINE device=dense_device,NEWLINE )NEWLINENEWLINE def forward(NEWLINE self,NEWLINE dense_features: torch.Tensor,NEWLINE sparse_features: KeyedJaggedTensor,NEWLINE ) -> torch.Tensor:NEWLINE embedded_dense = self.dense_arch(dense_features)NEWLINE embedded_sparse = self.sparse_arch(sparse_features)NEWLINE concatenated_dense = self.inter_arch(NEWLINE dense_features=embedded_dense, sparse_features=embedded_sparseNEWLINE )NEWLINE logits = self.over_arch(concatenated_dense)NEWLINE return logitsNEWLINE
#!/usr/bin/env python2NEWLINEimport numpy as npNEWLINEimport path_parserNEWLINEfrom mpl_toolkits.mplot3d import Axes3DNEWLINEimport matplotlib.pyplot as pltNEWLINEfrom matplotlib import cmNEWLINEfrom matplotlib.ticker import LinearLocator, FormatStrFormatterNEWLINEfrom scipy.spatial import KDTreeNEWLINE#ruta='sample_map_origin_map.txt'NEWLINEruta='Trayectoria3.txt'NEWLINENEWLINEdef main():NEWLINE arr_in=np.array(list(path_parser.read_points(ruta)))NEWLINE print arr_inNEWLINEif __name__ == '__main__':NEWLINE main()
# Copyright 2015 The Chromium Authors. All rights reserved.NEWLINE# Use of this source code is governed by a BSD-style license that can beNEWLINE# found in the LICENSE file.NEWLINE"""Determines support level for different steps for masters."""NEWLINENEWLINEfrom services.deps import GetOSPlatformNameNEWLINEfrom model.wf_config import FinditConfigNEWLINENEWLINE# Explicitly list unsupported masters. Additional work might be needed in orderNEWLINE# to support them.NEWLINE_UNSUPPORTED_MASTERS = [NEWLINE 'chromium.lkgr', # Disable as results are not showed on Sheriff-o-Matic.NEWLINE 'chromium.gpu', # Disable as too many false positives.NEWLINE 'chromium.memory.fyi',NEWLINE 'chromium.gpu.fyi',NEWLINE 'chromium.perf',NEWLINE]NEWLINENEWLINENEWLINEdef _ConvertOldMastersFormatToNew(masters_to_disallowed_steps):NEWLINE """Converts the old masters format to the new rules dict.NEWLINENEWLINE Args:NEWLINE masters_to_disallowed_steps: A dict in the format:NEWLINE {NEWLINE 'master1': ['step1', 'step2', ...],NEWLINE 'master2': ['step3', 'step4', ...]NEWLINE }NEWLINENEWLINE Returns:NEWLINE A dict in the latest rules dict format:NEWLINE {NEWLINE 'supported_masters': {NEWLINE 'master1': {NEWLINE 'unsupported_steps: ['step1', 'step2', ...], (if any)NEWLINE }NEWLINE },NEWLINE 'global': {}NEWLINE }NEWLINE """NEWLINE supported_masters = {}NEWLINE steps_for_masters_rules_in_latest_format = {NEWLINE 'supported_masters': supported_masters,NEWLINE 'global': {}NEWLINE }NEWLINENEWLINE for master, unsupported_steps in masters_to_disallowed_steps.iteritems():NEWLINE supported_masters[master] = {}NEWLINE if unsupported_steps:NEWLINE supported_masters[master]['unsupported_steps'] = unsupported_stepsNEWLINENEWLINE return steps_for_masters_rules_in_latest_formatNEWLINENEWLINENEWLINEdef GetStepsForMastersRules(settings=None, version=None):NEWLINE if settings is None:NEWLINE settings = FinditConfig.Get(version)NEWLINE return (settings.steps_for_masters_rules orNEWLINE _ConvertOldMastersFormatToNew(settings.masters_to_disallowed_steps))NEWLINENEWLINENEWLINEdef MasterIsSupported(master_name):NEWLINE """Returns ``True`` if the given master is supported, otherwise ``False``."""NEWLINE return master_name in GetStepsForMastersRules()['supported_masters']NEWLINENEWLINENEWLINEdef StepIsSupportedForMaster(step_name, master_name):NEWLINE """Determines whether or not a step is supported for the given build master.NEWLINENEWLINE Args:NEWLINE step_name: The name of the step to check.NEWLINE master_name: The name of the build master to check.NEWLINENEWLINE Returns:NEWLINE True if Findit supports analyzing the failure, False otherwise.NEWLINE Rules:NEWLINE 1. If a master is not supported, then neither are any of its steps.NEWLINE 2. If a master specifies check_global = True, then all of its steps areNEWLINE supported except those according to those blacklisted under global.NEWLINE 3. If a master specifies check_global = True, but also specifies aNEWLINE supported_steps, then supported_steps is to override any blacklistedNEWLINE steps under global.NEWLINE 4. If a master specifies check_global = True, but also species its ownNEWLINE unsupported_list, those unsupported_steps are in addition to thoseNEWLINE under global.NEWLINE 5. If a master specifies check_global = False, then all steps underNEWLINE 'supported_steps' are always supported and nothing else.NEWLINE 'unsupported_steps' is not allowed.NEWLINE """NEWLINE if not MasterIsSupported(master_name):NEWLINE return FalseNEWLINENEWLINE steps_for_masters_rules = GetStepsForMastersRules()NEWLINE supported_masters = steps_for_masters_rules['supported_masters']NEWLINENEWLINE supported_master = supported_masters[master_name]NEWLINE check_global = supported_master.get('check_global', True)NEWLINENEWLINE if not check_global:NEWLINE supported_steps = supported_master['supported_steps']NEWLINE return step_name in supported_stepsNEWLINENEWLINE supported_steps = supported_master.get('supported_steps', [])NEWLINE unsupported_steps = supported_master.get('unsupported_steps', [])NEWLINE global_unsupported_steps = (NEWLINE steps_for_masters_rules['global'].get('unsupported_steps', []))NEWLINENEWLINE return (step_name in supported_steps orNEWLINE (step_name not in unsupported_steps andNEWLINE step_name not in global_unsupported_steps))NEWLINENEWLINENEWLINEdef EnableStrictRegexForCompileLinkFailures(wf_mastername, wf_buildername):NEWLINE """Returns True if strict regex should be used for the given builder."""NEWLINE trybot_config = FinditConfig.Get().builders_to_trybots.get(NEWLINE wf_mastername, {}).get(wf_buildername, {})NEWLINE return trybot_config.get('strict_regex', False)NEWLINENEWLINENEWLINEdef ShouldSkipTestTryJobs(wf_mastername, wf_buildername):NEWLINE """Returns True if test try jobs should be triggered.NEWLINENEWLINE By default, test try jobs should be supported unless the master/builderNEWLINE specifies to bail out.NEWLINENEWLINE Args:NEWLINE wf_mastername: The mastername of a waterfall builder.NEWLINE wf_buildername: The buildername of a waterfall builder.NEWLINENEWLINE Returns:NEWLINE True if test try jobs are to be skipped, False otherwise.NEWLINE """NEWLINE trybot_config = FinditConfig.Get().builders_to_trybots.get(NEWLINE wf_mastername, {}).get(wf_buildername, {})NEWLINE return trybot_config.get('not_run_tests', False)NEWLINENEWLINENEWLINEdef GetTryJobSettings():NEWLINE return FinditConfig().Get().try_job_settingsNEWLINENEWLINENEWLINEdef GetSwarmingSettings():NEWLINE return FinditConfig().Get().swarming_settingsNEWLINENEWLINENEWLINEdef GetDownloadBuildDataSettings():NEWLINE return FinditConfig().Get().download_build_data_settingsNEWLINENEWLINENEWLINEdef GetActionSettings():NEWLINE return FinditConfig().Get().action_settingsNEWLINENEWLINENEWLINEdef GetCheckFlakeSettings():NEWLINE return FinditConfig().Get().check_flake_settingsNEWLINENEWLINENEWLINEdef GetFlakeDetectionSettings():NEWLINE return FinditConfig.Get().flake_detection_settingsNEWLINENEWLINENEWLINEdef GetCodeCoverageSettings():NEWLINE return FinditConfig().Get().code_coverage_settingsNEWLINENEWLINENEWLINEdef GetCodeReviewSettings():NEWLINE return FinditConfig().Get().code_review_settingsNEWLINE
# coding: utf-8NEWLINENEWLINE"""NEWLINE SendinBlue APINEWLINENEWLINE SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable | # noqa: E501NEWLINENEWLINE OpenAPI spec version: 3.0.0NEWLINE Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.gitNEWLINE"""NEWLINENEWLINENEWLINEimport pprintNEWLINEimport re # noqa: F401NEWLINENEWLINEimport sixNEWLINENEWLINENEWLINEclass GetSmsCampaigns(object):NEWLINE """NOTE: This class is auto generated by the swagger code generator program.NEWLINENEWLINE Do not edit the class manually.NEWLINE """NEWLINENEWLINE """NEWLINE Attributes:NEWLINE swagger_types (dict): The key is attribute nameNEWLINE and the value is attribute type.NEWLINE attribute_map (dict): The key is attribute nameNEWLINE and the value is json key in definition.NEWLINE """NEWLINE swagger_types = {NEWLINE 'campaigns': 'list[object]',NEWLINE 'count': 'int'NEWLINE }NEWLINENEWLINE attribute_map = {NEWLINE 'campaigns': 'campaigns',NEWLINE 'count': 'count'NEWLINE }NEWLINENEWLINE def __init__(self, campaigns=None, count=None): # noqa: E501NEWLINE """GetSmsCampaigns - a model defined in Swagger""" # noqa: E501NEWLINENEWLINE self._campaigns = NoneNEWLINE self._count = NoneNEWLINE self.discriminator = NoneNEWLINENEWLINE if campaigns is not None:NEWLINE self.campaigns = campaignsNEWLINE if count is not None:NEWLINE self.count = countNEWLINENEWLINE @propertyNEWLINE def campaigns(self):NEWLINE """Gets the campaigns of this GetSmsCampaigns. # noqa: E501NEWLINENEWLINENEWLINE :return: The campaigns of this GetSmsCampaigns. # noqa: E501NEWLINE :rtype: list[object]NEWLINE """NEWLINE return self._campaignsNEWLINENEWLINE @campaigns.setterNEWLINE def campaigns(self, campaigns):NEWLINE """Sets the campaigns of this GetSmsCampaigns.NEWLINENEWLINENEWLINE :param campaigns: The campaigns of this GetSmsCampaigns. # noqa: E501NEWLINE :type: list[object]NEWLINE """NEWLINENEWLINE self._campaigns = campaignsNEWLINENEWLINE @propertyNEWLINE def count(self):NEWLINE """Gets the count of this GetSmsCampaigns. # noqa: E501NEWLINENEWLINE Number of SMS campaigns retrieved # noqa: E501NEWLINENEWLINE :return: The count of this GetSmsCampaigns. # noqa: E501NEWLINE :rtype: intNEWLINE """NEWLINE return self._countNEWLINENEWLINE @count.setterNEWLINE def count(self, count):NEWLINE """Sets the count of this GetSmsCampaigns.NEWLINENEWLINE Number of SMS campaigns retrieved # noqa: E501NEWLINENEWLINE :param count: The count of this GetSmsCampaigns. # noqa: E501NEWLINE :type: intNEWLINE """NEWLINENEWLINE self._count = countNEWLINENEWLINE def to_dict(self):NEWLINE """Returns the model properties as a dict"""NEWLINE result = {}NEWLINENEWLINE for attr, _ in six.iteritems(self.swagger_types):NEWLINE value = getattr(self, attr)NEWLINE if isinstance(value, list):NEWLINE result[attr] = list(map(NEWLINE lambda x: x.to_dict() if hasattr(x, "to_dict") else x,NEWLINE valueNEWLINE ))NEWLINE elif hasattr(value, "to_dict"):NEWLINE result[attr] = value.to_dict()NEWLINE elif isinstance(value, dict):NEWLINE result[attr] = dict(map(NEWLINE lambda item: (item[0], item[1].to_dict())NEWLINE if hasattr(item[1], "to_dict") else item,NEWLINE value.items()NEWLINE ))NEWLINE else:NEWLINE result[attr] = valueNEWLINE if issubclass(GetSmsCampaigns, dict):NEWLINE for key, value in self.items():NEWLINE result[key] = valueNEWLINENEWLINE return resultNEWLINENEWLINE def to_str(self):NEWLINE """Returns the string representation of the model"""NEWLINE return pprint.pformat(self.to_dict())NEWLINENEWLINE def __repr__(self):NEWLINE """For `print` and `pprint`"""NEWLINE return self.to_str()NEWLINENEWLINE def __eq__(self, other):NEWLINE """Returns true if both objects are equal"""NEWLINE if not isinstance(other, GetSmsCampaigns):NEWLINE return FalseNEWLINENEWLINE return self.__dict__ == other.__dict__NEWLINENEWLINE def __ne__(self, other):NEWLINE """Returns true if both objects are not equal"""NEWLINE return not self == otherNEWLINE
# coding=utf-8NEWLINE"""NEWLINEThis code was generated byNEWLINE\ / _ _ _| _ _NEWLINE | (_)\/(_)(_|\/| |(/_ v1.0.0NEWLINE / /NEWLINE"""NEWLINENEWLINEfrom twilio.base import deserializeNEWLINEfrom twilio.base import valuesNEWLINEfrom twilio.base.instance_context import InstanceContextNEWLINEfrom twilio.base.instance_resource import InstanceResourceNEWLINEfrom twilio.base.list_resource import ListResourceNEWLINEfrom twilio.base.page import PageNEWLINENEWLINENEWLINEclass KeyList(ListResource):NEWLINE """ """NEWLINENEWLINE def __init__(self, version, account_sid):NEWLINE """NEWLINE Initialize the KeyListNEWLINENEWLINE :param Version version: Version that contains the resourceNEWLINE :param account_sid: A 34 character string that uniquely identifies this resource.NEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyListNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyListNEWLINE """NEWLINE super(KeyList, self).__init__(version)NEWLINENEWLINE # Path SolutionNEWLINE self._solution = {'account_sid': account_sid, }NEWLINE self._uri = '/Accounts/{account_sid}/Keys.json'.format(**self._solution)NEWLINENEWLINE def stream(self, limit=None, page_size=None):NEWLINE """NEWLINE Streams KeyInstance records from the API as a generator stream.NEWLINE This operation lazily loads records as efficiently as possible until the limitNEWLINE is reached.NEWLINE The results are returned as a generator, so this operation is memory efficient.NEWLINENEWLINE :param int limit: Upper limit for the number of records to return. stream()NEWLINE guarantees to never return more than limit. Default is no limitNEWLINE :param int page_size: Number of records to fetch per request, when not set will useNEWLINE the default value of 50 records. If no page_size is definedNEWLINE but a limit is defined, stream() will attempt to read theNEWLINE limit with the most efficient page size, i.e. min(limit, 1000)NEWLINENEWLINE :returns: Generator that will yield up to limit resultsNEWLINE :rtype: list[twilio.rest.api.v2010.account.key.KeyInstance]NEWLINE """NEWLINE limits = self._version.read_limits(limit, page_size)NEWLINENEWLINE page = self.page(page_size=limits['page_size'], )NEWLINENEWLINE return self._version.stream(page, limits['limit'], limits['page_limit'])NEWLINENEWLINE def list(self, limit=None, page_size=None):NEWLINE """NEWLINE Lists KeyInstance records from the API as a list.NEWLINE Unlike stream(), this operation is eager and will load `limit` records intoNEWLINE memory before returning.NEWLINENEWLINE :param int limit: Upper limit for the number of records to return. list() guaranteesNEWLINE never to return more than limit. Default is no limitNEWLINE :param int page_size: Number of records to fetch per request, when not set will useNEWLINE the default value of 50 records. If no page_size is definedNEWLINE but a limit is defined, list() will attempt to read the limitNEWLINE with the most efficient page size, i.e. min(limit, 1000)NEWLINENEWLINE :returns: Generator that will yield up to limit resultsNEWLINE :rtype: list[twilio.rest.api.v2010.account.key.KeyInstance]NEWLINE """NEWLINE return list(self.stream(limit=limit, page_size=page_size, ))NEWLINENEWLINE def page(self, page_token=values.unset, page_number=values.unset,NEWLINE page_size=values.unset):NEWLINE """NEWLINE Retrieve a single page of KeyInstance records from the API.NEWLINE Request is executed immediatelyNEWLINENEWLINE :param str page_token: PageToken provided by the APINEWLINE :param int page_number: Page Number, this value is simply for client stateNEWLINE :param int page_size: Number of records to return, defaults to 50NEWLINENEWLINE :returns: Page of KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyPageNEWLINE """NEWLINE params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })NEWLINENEWLINE response = self._version.page(NEWLINE 'GET',NEWLINE self._uri,NEWLINE params=params,NEWLINE )NEWLINENEWLINE return KeyPage(self._version, response, self._solution)NEWLINENEWLINE def get_page(self, target_url):NEWLINE """NEWLINE Retrieve a specific page of KeyInstance records from the API.NEWLINE Request is executed immediatelyNEWLINENEWLINE :param str target_url: API-generated URL for the requested results pageNEWLINENEWLINE :returns: Page of KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyPageNEWLINE """NEWLINE response = self._version.domain.twilio.request(NEWLINE 'GET',NEWLINE target_url,NEWLINE )NEWLINENEWLINE return KeyPage(self._version, response, self._solution)NEWLINENEWLINE def get(self, sid):NEWLINE """NEWLINE Constructs a KeyContextNEWLINENEWLINE :param sid: The unique string that identifies the resourceNEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyContextNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyContextNEWLINE """NEWLINE return KeyContext(self._version, account_sid=self._solution['account_sid'], sid=sid, )NEWLINENEWLINE def __call__(self, sid):NEWLINE """NEWLINE Constructs a KeyContextNEWLINENEWLINE :param sid: The unique string that identifies the resourceNEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyContextNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyContextNEWLINE """NEWLINE return KeyContext(self._version, account_sid=self._solution['account_sid'], sid=sid, )NEWLINENEWLINE def __repr__(self):NEWLINE """NEWLINE Provide a friendly representationNEWLINENEWLINE :returns: Machine friendly representationNEWLINE :rtype: strNEWLINE """NEWLINE return '<Twilio.Api.V2010.KeyList>'NEWLINENEWLINENEWLINEclass KeyPage(Page):NEWLINE """ """NEWLINENEWLINE def __init__(self, version, response, solution):NEWLINE """NEWLINE Initialize the KeyPageNEWLINENEWLINE :param Version version: Version that contains the resourceNEWLINE :param Response response: Response from the APINEWLINE :param account_sid: A 34 character string that uniquely identifies this resource.NEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyPageNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyPageNEWLINE """NEWLINE super(KeyPage, self).__init__(version, response)NEWLINENEWLINE # Path SolutionNEWLINE self._solution = solutionNEWLINENEWLINE def get_instance(self, payload):NEWLINE """NEWLINE Build an instance of KeyInstanceNEWLINENEWLINE :param dict payload: Payload response from the APINEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE """NEWLINE return KeyInstance(self._version, payload, account_sid=self._solution['account_sid'], )NEWLINENEWLINE def __repr__(self):NEWLINE """NEWLINE Provide a friendly representationNEWLINENEWLINE :returns: Machine friendly representationNEWLINE :rtype: strNEWLINE """NEWLINE return '<Twilio.Api.V2010.KeyPage>'NEWLINENEWLINENEWLINEclass KeyContext(InstanceContext):NEWLINE """ """NEWLINENEWLINE def __init__(self, version, account_sid, sid):NEWLINE """NEWLINE Initialize the KeyContextNEWLINENEWLINE :param Version version: Version that contains the resourceNEWLINE :param account_sid: The SID of the Account that created the resource to fetchNEWLINE :param sid: The unique string that identifies the resourceNEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyContextNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyContextNEWLINE """NEWLINE super(KeyContext, self).__init__(version)NEWLINENEWLINE # Path SolutionNEWLINE self._solution = {'account_sid': account_sid, 'sid': sid, }NEWLINE self._uri = '/Accounts/{account_sid}/Keys/{sid}.json'.format(**self._solution)NEWLINENEWLINE def fetch(self):NEWLINE """NEWLINE Fetch a KeyInstanceNEWLINENEWLINE :returns: Fetched KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE """NEWLINE params = values.of({})NEWLINENEWLINE payload = self._version.fetch(NEWLINE 'GET',NEWLINE self._uri,NEWLINE params=params,NEWLINE )NEWLINENEWLINE return KeyInstance(NEWLINE self._version,NEWLINE payload,NEWLINE account_sid=self._solution['account_sid'],NEWLINE sid=self._solution['sid'],NEWLINE )NEWLINENEWLINE def update(self, friendly_name=values.unset):NEWLINE """NEWLINE Update the KeyInstanceNEWLINENEWLINE :param unicode friendly_name: A string to describe the resourceNEWLINENEWLINE :returns: Updated KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE """NEWLINE data = values.of({'FriendlyName': friendly_name, })NEWLINENEWLINE payload = self._version.update(NEWLINE 'POST',NEWLINE self._uri,NEWLINE data=data,NEWLINE )NEWLINENEWLINE return KeyInstance(NEWLINE self._version,NEWLINE payload,NEWLINE account_sid=self._solution['account_sid'],NEWLINE sid=self._solution['sid'],NEWLINE )NEWLINENEWLINE def delete(self):NEWLINE """NEWLINE Deletes the KeyInstanceNEWLINENEWLINE :returns: True if delete succeeds, False otherwiseNEWLINE :rtype: boolNEWLINE """NEWLINE return self._version.delete('delete', self._uri)NEWLINENEWLINE def __repr__(self):NEWLINE """NEWLINE Provide a friendly representationNEWLINENEWLINE :returns: Machine friendly representationNEWLINE :rtype: strNEWLINE """NEWLINE context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())NEWLINE return '<Twilio.Api.V2010.KeyContext {}>'.format(context)NEWLINENEWLINENEWLINEclass KeyInstance(InstanceResource):NEWLINE """ """NEWLINENEWLINE def __init__(self, version, payload, account_sid, sid=None):NEWLINE """NEWLINE Initialize the KeyInstanceNEWLINENEWLINE :returns: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE """NEWLINE super(KeyInstance, self).__init__(version)NEWLINENEWLINE # Marshaled PropertiesNEWLINE self._properties = {NEWLINE 'sid': payload['sid'],NEWLINE 'friendly_name': payload['friendly_name'],NEWLINE 'date_created': deserialize.rfc2822_datetime(payload['date_created']),NEWLINE 'date_updated': deserialize.rfc2822_datetime(payload['date_updated']),NEWLINE }NEWLINENEWLINE # ContextNEWLINE self._context = NoneNEWLINE self._solution = {'account_sid': account_sid, 'sid': sid or self._properties['sid'], }NEWLINENEWLINE @propertyNEWLINE def _proxy(self):NEWLINE """NEWLINE Generate an instance context for the instance, the context is capable ofNEWLINE performing various actions. All instance actions are proxied to the contextNEWLINENEWLINE :returns: KeyContext for this KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyContextNEWLINE """NEWLINE if self._context is None:NEWLINE self._context = KeyContext(NEWLINE self._version,NEWLINE account_sid=self._solution['account_sid'],NEWLINE sid=self._solution['sid'],NEWLINE )NEWLINE return self._contextNEWLINENEWLINE @propertyNEWLINE def sid(self):NEWLINE """NEWLINE :returns: The unique string that identifies the resourceNEWLINE :rtype: unicodeNEWLINE """NEWLINE return self._properties['sid']NEWLINENEWLINE @propertyNEWLINE def friendly_name(self):NEWLINE """NEWLINE :returns: The string that you assigned to describe the resourceNEWLINE :rtype: unicodeNEWLINE """NEWLINE return self._properties['friendly_name']NEWLINENEWLINE @propertyNEWLINE def date_created(self):NEWLINE """NEWLINE :returns: The RFC 2822 date and time in GMT that the resource was createdNEWLINE :rtype: datetimeNEWLINE """NEWLINE return self._properties['date_created']NEWLINENEWLINE @propertyNEWLINE def date_updated(self):NEWLINE """NEWLINE :returns: The RFC 2822 date and time in GMT that the resource was last updatedNEWLINE :rtype: datetimeNEWLINE """NEWLINE return self._properties['date_updated']NEWLINENEWLINE def fetch(self):NEWLINE """NEWLINE Fetch a KeyInstanceNEWLINENEWLINE :returns: Fetched KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE """NEWLINE return self._proxy.fetch()NEWLINENEWLINE def update(self, friendly_name=values.unset):NEWLINE """NEWLINE Update the KeyInstanceNEWLINENEWLINE :param unicode friendly_name: A string to describe the resourceNEWLINENEWLINE :returns: Updated KeyInstanceNEWLINE :rtype: twilio.rest.api.v2010.account.key.KeyInstanceNEWLINE """NEWLINE return self._proxy.update(friendly_name=friendly_name, )NEWLINENEWLINE def delete(self):NEWLINE """NEWLINE Deletes the KeyInstanceNEWLINENEWLINE :returns: True if delete succeeds, False otherwiseNEWLINE :rtype: boolNEWLINE """NEWLINE return self._proxy.delete()NEWLINENEWLINE def __repr__(self):NEWLINE """NEWLINE Provide a friendly representationNEWLINENEWLINE :returns: Machine friendly representationNEWLINE :rtype: strNEWLINE """NEWLINE context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())NEWLINE return '<Twilio.Api.V2010.KeyInstance {}>'.format(context)NEWLINE
#!/usr/bin/env python3NEWLINE# Copyright (c) 2014-2020 The Vadercoin Core developersNEWLINE# Distributed under the MIT software license, see the accompanyingNEWLINE# file COPYING or http://www.opensource.org/licenses/mit-license.php.NEWLINENEWLINE"""NEWLINE ZMQ example using python3's asyncioNEWLINENEWLINE Vadercoin should be started with the command line arguments:NEWLINE vadercoind -testnet -daemon \NEWLINE -zmqpubrawtx=tcp://127.0.0.1:28332 \NEWLINE -zmqpubrawblock=tcp://127.0.0.1:28332 \NEWLINE -zmqpubhashtx=tcp://127.0.0.1:28332 \NEWLINE -zmqpubhashblock=tcp://127.0.0.1:28332 \NEWLINE -zmqpubsequence=tcp://127.0.0.1:28332NEWLINENEWLINE We use the asyncio library here. `self.handle()` installs itself as aNEWLINE future at the end of the function. Since it never returns with the eventNEWLINE loop having an empty stack of futures, this creates an infinite loop. AnNEWLINE alternative is to wrap the contents of `handle` inside `while True`.NEWLINENEWLINE A blocking example using python 2.7 can be obtained from the git history:NEWLINE https://github.com/vadercoin/vadercoin/blob/37a7fe9e440b83e2364d5498931253937abe9294/contrib/zmq/zmq_sub.pyNEWLINE"""NEWLINENEWLINEimport binasciiNEWLINEimport asyncioNEWLINEimport zmqNEWLINEimport zmq.asyncioNEWLINEimport signalNEWLINEimport structNEWLINEimport sysNEWLINENEWLINEif (sys.version_info.major, sys.version_info.minor) < (3, 5):NEWLINE print("This example only works with Python 3.5 and greater")NEWLINE sys.exit(1)NEWLINENEWLINEport = 28332NEWLINENEWLINEclass ZMQHandler():NEWLINE def __init__(self):NEWLINE self.loop = asyncio.get_event_loop()NEWLINE self.zmqContext = zmq.asyncio.Context()NEWLINENEWLINE self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)NEWLINE self.zmqSubSocket.setsockopt(zmq.RCVHWM, 0)NEWLINE self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashblock")NEWLINE self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashtx")NEWLINE self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock")NEWLINE self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx")NEWLINE self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "sequence")NEWLINE self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % port)NEWLINENEWLINE async def handle(self) :NEWLINE topic, body, seq = await self.zmqSubSocket.recv_multipart()NEWLINE sequence = "Unknown"NEWLINE if len(seq) == 4:NEWLINE sequence = str(struct.unpack('<I', seq)[-1])NEWLINE if topic == b"hashblock":NEWLINE print('- HASH BLOCK ('+sequence+') -')NEWLINE print(binascii.hexlify(body))NEWLINE elif topic == b"hashtx":NEWLINE print('- HASH TX ('+sequence+') -')NEWLINE print(binascii.hexlify(body))NEWLINE elif topic == b"rawblock":NEWLINE print('- RAW BLOCK HEADER ('+sequence+') -')NEWLINE print(binascii.hexlify(body[:80]))NEWLINE elif topic == b"rawtx":NEWLINE print('- RAW TX ('+sequence+') -')NEWLINE print(binascii.hexlify(body))NEWLINE elif topic == b"sequence":NEWLINE hash = binascii.hexlify(body[:32])NEWLINE label = chr(body[32])NEWLINE mempool_sequence = None if len(body) != 32+1+8 else struct.unpack("<Q", body[32+1:])[0]NEWLINE print('- SEQUENCE ('+sequence+') -')NEWLINE print(hash, label, mempool_sequence)NEWLINE # schedule ourselves to receive the next messageNEWLINE asyncio.ensure_future(self.handle())NEWLINENEWLINE def start(self):NEWLINE self.loop.add_signal_handler(signal.SIGINT, self.stop)NEWLINE self.loop.create_task(self.handle())NEWLINE self.loop.run_forever()NEWLINENEWLINE def stop(self):NEWLINE self.loop.stop()NEWLINE self.zmqContext.destroy()NEWLINENEWLINEdaemon = ZMQHandler()NEWLINEdaemon.start()NEWLINE
# Copyright 2021 UW-IT, University of WashingtonNEWLINE# SPDX-License-Identifier: Apache-2.0NEWLINENEWLINEfrom .utils import MOCK_SETTINGS, get_mock_manifest, generate_mocksNEWLINEfrom webpack_bridge.templatetags.webpack_bridge import render_webpack_entryNEWLINENEWLINEfrom django.test import TestCaseNEWLINEfrom django.test.utils import override_settingsNEWLINENEWLINENEWLINE@override_settings(**MOCK_SETTINGS)NEWLINEclass TestRenderWebpackEntry(TestCase):NEWLINE def setUp(self):NEWLINE self.mock_manifest = get_mock_manifest()NEWLINENEWLINE def test_render(self):NEWLINE mocks = generate_mocks(self.mock_manifest)NEWLINE with mocks[0], mocks[1]:NEWLINE self.assertEqual(NEWLINE render_webpack_entry(NEWLINE 'home',NEWLINE js='async',NEWLINE css='crossorigin'NEWLINE ),NEWLINE '<script src="/static/home-234xz0jk.js" async></script>\n'NEWLINE '<script src="/static/vendor-4t4g534y.js" async></script>\n'NEWLINE '<link rel="stylesheet" type="text/css"'NEWLINE ' href="/static/other-home-89m07yfg.css" crossorigin>\n'NEWLINE )NEWLINE
"""NEWLINE Created by howie.hu at 2022-01-21.NEWLINE Description: 执行分发动作NEWLINE - 执行命令: PIPENV_DOTENV_LOCATION=./pro.env pipenv run python src/sender/action.pyNEWLINE Changelog: all notable changes to this file will be documentedNEWLINE"""NEWLINEimport timeNEWLINENEWLINEfrom src.config import ConfigNEWLINEfrom src.databases import MongodbManagerNEWLINEfrom src.sender.send_factory import send_factoryNEWLINEfrom src.utils.log import LOGGERNEWLINENEWLINENEWLINEdef send_doc(sender_conf: dict):NEWLINE """NEWLINE 对文章进行分发NEWLINE Args:NEWLINE sender_conf (dict): 分发配置NEWLINE """NEWLINE sender_list = sender_conf["sender_list"]NEWLINE query_days = sender_conf.get("query_days", 2)NEWLINE delta_time = sender_conf.get("delta_time", 3)NEWLINE skip_ads = sender_conf.get("skip_ads", False)NEWLINE if sender_list:NEWLINE # 是否启用分发器NEWLINE mongo_base = MongodbManager.get_mongo_base(mongodb_config=Config.MONGODB_CONFIG)NEWLINE coll = mongo_base.get_collection(coll_name="liuli_articles")NEWLINE cur_ts = int(time.time())NEWLINE filter_dict = {NEWLINE # 时间范围,除第一次外后面其实可以去掉NEWLINE "doc_ts": {"$gte": cur_ts - (query_days * 24 * 60 * 60), "$lte": cur_ts},NEWLINE }NEWLINE if skip_ads:NEWLINE filter_dict.update(NEWLINE {NEWLINE # 至少打上一个模型标签NEWLINE "cos_model": {"$exists": True},NEWLINE # 判定结果为非广告NEWLINE "cos_model.result": 1,NEWLINE }NEWLINE )NEWLINE # 查找所有可分发文章NEWLINE for each_data in coll.find(filter_dict):NEWLINE # 分别分发给各个目标NEWLINE for send_type in sender_list:NEWLINE # 暂时固定,测试NEWLINE init_config = sender_conf.get(f"{send_type}_init_config", {})NEWLINE cos_model_resp = each_data.get("cos_model", {})NEWLINE doc_cus_des = ""NEWLINE if cos_model_resp:NEWLINE # 经过模型判断NEWLINE if cos_model_resp["result"] == 1:NEWLINE # 广告标记NEWLINE doc_cus_des = f"👿广告[概率:{cos_model_resp['probability']}]"NEWLINE else:NEWLINE doc_cus_des = "🤓非广告"NEWLINENEWLINE each_data["doc_cus_des"] = doc_cus_desNEWLINE # 每次分发休眠一定时间NEWLINE time.sleep(delta_time)NEWLINE send_factory(NEWLINE send_type=send_type, init_config=init_config, send_data=each_dataNEWLINE )NEWLINE else:NEWLINE LOGGER.warn("未配置分发器!")NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE send_config = {NEWLINE "sender_list": ["wecom"],NEWLINE "query_days": 7,NEWLINE "skip_ads": False,NEWLINE "delta_time": 3,NEWLINE }NEWLINE send_doc(send_config)NEWLINE
import numpy as npNEWLINEfrom sklearn.calibration import CalibratedClassifierCVNEWLINEfrom sklearn.datasets import load_breast_cancerNEWLINEfrom sklearn.ensemble import BaggingClassifierNEWLINEfrom sklearn.linear_model import LogisticRegressionNEWLINEfrom sklearn.linear_model import PerceptronNEWLINEfrom sklearn.model_selection import train_test_splitNEWLINEfrom sklearn.preprocessing import StandardScalerNEWLINEfrom sklearn.svm import SVCNEWLINENEWLINEfrom deslib.dcs.a_posteriori import APosterioriNEWLINE# DCS techniquesNEWLINEfrom deslib.dcs.a_priori import APrioriNEWLINEfrom deslib.dcs.lca import LCANEWLINEfrom deslib.dcs.mcb import MCBNEWLINEfrom deslib.dcs.mla import MLANEWLINEfrom deslib.dcs.ola import OLANEWLINEfrom deslib.dcs.rank import RankNEWLINE# DES techniquesNEWLINEfrom deslib.des.des_clustering import DESClusteringNEWLINEfrom deslib.des.des_knn import DESKNNNEWLINEfrom deslib.des.des_p import DESPNEWLINEfrom deslib.des.knop import KNOPNEWLINEfrom deslib.des.knora_e import KNORAENEWLINEfrom deslib.des.knora_u import KNORAUNEWLINEfrom deslib.des.meta_des import METADESNEWLINEfrom deslib.des.probabilistic import RRC, MinimumDifference, DESKLNEWLINE# Static techniquesNEWLINEfrom deslib.static.oracle import OracleNEWLINEfrom deslib.static.single_best import SingleBestNEWLINEfrom deslib.static.static_selection import StaticSelectionNEWLINENEWLINENEWLINEdef test_label_encoder_integration_list_classifiers():NEWLINE rng = np.random.RandomState(123456)NEWLINE X_dsel, X_test, X_train, y_dsel, y_test, y_train = load_dataset(encode_labels=['no', 'yes'], rng=rng)NEWLINENEWLINE pool_classifiers = [LogisticRegression(), SVC(probability=True)]NEWLINE [clf.fit(X_train, y_train) for clf in pool_classifiers]NEWLINENEWLINE knorau = KNORAU(pool_classifiers)NEWLINE knorau.fit(X_dsel, y_dsel)NEWLINENEWLINE this_score = knorau.score(X_test, y_test)NEWLINE assert np.isclose(this_score, 0.9574468085106383)NEWLINENEWLINENEWLINEdef test_label_encoder_integration_sklearn_ensembles():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers(encode_labels=['no', 'yes'])NEWLINENEWLINE knorau = KNORAU(pool_classifiers)NEWLINE knorau.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(knorau.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef setup_classifiers(encode_labels=None):NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE X_dsel, X_test, X_train, y_dsel, y_test, y_train = load_dataset(encode_labels, rng)NEWLINE model = CalibratedClassifierCV(Perceptron(max_iter=5))NEWLINE # Train a pool of 100 classifiersNEWLINE pool_classifiers = BaggingClassifier(model, n_estimators=10, random_state=rng)NEWLINE pool_classifiers.fit(X_train, y_train)NEWLINE return pool_classifiers, X_dsel, y_dsel, X_test, y_testNEWLINENEWLINENEWLINEdef load_dataset(encode_labels, rng):NEWLINE # Generate a classification datasetNEWLINE data = load_breast_cancer()NEWLINE X = data.dataNEWLINE y = data.targetNEWLINE if encode_labels is not None:NEWLINE y = np.take(encode_labels, y)NEWLINE # split the data into training and test dataNEWLINE X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=rng)NEWLINE # Scale the variables to have 0 mean and unit varianceNEWLINE scalar = StandardScaler()NEWLINE X_train = scalar.fit_transform(X_train)NEWLINE X_test = scalar.transform(X_test)NEWLINE # Split the data into training and DSEL for DS techniquesNEWLINE X_train, X_dsel, y_train, y_dsel = train_test_split(X_train, y_train, test_size=0.5, random_state=rng)NEWLINE # Considering a pool composed of 10 base classifiersNEWLINE # Calibrating Perceptrons to estimate probabilitiesNEWLINE return X_dsel, X_test, X_train, y_dsel, y_test, y_trainNEWLINENEWLINENEWLINEdef test_knorau():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE knorau = KNORAU(pool_classifiers)NEWLINE knorau.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(knorau.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_kne():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE kne = KNORAE(pool_classifiers)NEWLINE kne.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(kne.score(X_test, y_test), 0.973404255319148)NEWLINENEWLINENEWLINEdef test_desp():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE desp = DESP(pool_classifiers)NEWLINE desp.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(desp.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_ola():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE ola = OLA(pool_classifiers)NEWLINE ola.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(ola.score(X_test, y_test), 0.96808510638297873)NEWLINENEWLINENEWLINEdef test_lca():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE lca = LCA(pool_classifiers)NEWLINE lca.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(lca.score(X_test, y_test), 0.96808510638297873)NEWLINENEWLINENEWLINEdef test_MLA():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE mla = MLA(pool_classifiers)NEWLINE mla.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(mla.score(X_test, y_test), 0.96808510638297873)NEWLINENEWLINENEWLINEdef test_mcb():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE mcb = MCB(pool_classifiers, rng=rng)NEWLINE mcb.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(mcb.score(X_test, y_test), 0.96276595744680848)NEWLINENEWLINENEWLINEdef test_apriori():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE apriori = APriori(pool_classifiers, rng=rng)NEWLINE apriori.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(apriori.score(X_test, y_test), 0.97872340425531912)NEWLINENEWLINENEWLINEdef test_rank():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE rank = Rank(pool_classifiers)NEWLINE rank.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(rank.score(X_test, y_test), 0.973404255319149)NEWLINENEWLINENEWLINEdef test_aposteriori():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE a_posteriori = APosteriori(pool_classifiers, rng=rng)NEWLINE a_posteriori.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(a_posteriori.score(X_test, y_test), 0.96276595744680848)NEWLINENEWLINENEWLINEdef test_meta():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE meta_des = METADES(pool_classifiers)NEWLINE meta_des.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(meta_des.score(X_test, y_test), 0.973404255319149)NEWLINENEWLINENEWLINEdef test_rrc():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE rrc = RRC(pool_classifiers)NEWLINE rrc.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(rrc.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_deskl():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE deskl = DESKL(pool_classifiers)NEWLINE deskl.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(deskl.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_minimum_diff():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE minimum_diff = MinimumDifference(pool_classifiers)NEWLINE minimum_diff.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(minimum_diff.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_knop():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE knop = KNOP(pool_classifiers)NEWLINE knop.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(knop.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_desknn():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE desknn = DESKNN(pool_classifiers)NEWLINE desknn.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(desknn.score(X_test, y_test), 0.97340425531914898)NEWLINENEWLINENEWLINEdef test_des_clustering():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE des_clustering = DESClustering(pool_classifiers, rng=rng)NEWLINE des_clustering.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(des_clustering.score(X_test, y_test), 0.97872340425531912)NEWLINENEWLINENEWLINEdef test_oracle():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE oracle = Oracle(pool_classifiers)NEWLINE assert np.isclose(oracle.score(X_test, y_test), 0.99468085106382975)NEWLINENEWLINENEWLINEdef test_single_best():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE single_best = SingleBest(pool_classifiers)NEWLINE single_best.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(single_best.score(X_test, y_test), 0.97872340425531912)NEWLINENEWLINENEWLINEdef test_static_selection():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE static_selection = StaticSelection(pool_classifiers)NEWLINE static_selection.fit(X_dsel, y_dsel)NEWLINE assert np.isclose(static_selection.score(X_test, y_test), 0.96808510638297873)NEWLINENEWLINENEWLINE# ------------------------------------------ Testing predict_proba -----------------------------------NEWLINEdef test_kne_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE kne = KNORAE(pool_classifiers)NEWLINE kne.fit(X_dsel, y_dsel)NEWLINE probas = kne.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/kne_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)NEWLINENEWLINENEWLINEdef test_desp_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE desp = DESP(pool_classifiers)NEWLINE desp.fit(X_dsel, y_dsel)NEWLINE probas = desp.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/desp_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)NEWLINENEWLINENEWLINEdef test_ola_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE ola = OLA(pool_classifiers)NEWLINE ola.fit(X_dsel, y_dsel)NEWLINE probas = ola.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/ola_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)NEWLINENEWLINENEWLINEdef test_mcb_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE mcb = MCB(pool_classifiers, rng=rng)NEWLINE mcb.fit(X_dsel, y_dsel)NEWLINE probas = mcb.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/mcb_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)NEWLINENEWLINENEWLINEdef test_desknn_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE desknn = DESKNN(pool_classifiers)NEWLINE desknn.fit(X_dsel, y_dsel)NEWLINE probas = desknn.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/desknn_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)NEWLINENEWLINENEWLINEdef test_des_clustering_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINE rng = np.random.RandomState(123456)NEWLINENEWLINE des_clustering = DESClustering(pool_classifiers, rng=rng)NEWLINE des_clustering.fit(X_dsel, y_dsel)NEWLINE probas = des_clustering.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/des_clustering_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)NEWLINENEWLINENEWLINEdef test_knop_proba():NEWLINE pool_classifiers, X_dsel, y_dsel, X_test, y_test = setup_classifiers()NEWLINENEWLINE knop = KNOP(pool_classifiers)NEWLINE knop.fit(X_dsel, y_dsel)NEWLINE probas = knop.predict_proba(X_test)NEWLINE expected = np.load('deslib/tests/expected_values/knop_proba_integration.npy')NEWLINE assert np.allclose(probas, expected)
#coding:utf-8NEWLINEimport codecsNEWLINEimport osNEWLINEimport sysNEWLINE NEWLINEtry:NEWLINE from setuptools import setupNEWLINEexcept:NEWLINE from distutils.core import setupNEWLINE"""NEWLINE打包的用的setup必须引入,NEWLINE"""NEWLINE NEWLINEdef read(fname):NEWLINENEWLINE return codecs.open(os.path.join(os.path.dirname(__file__), fname)).read()NEWLINE NEWLINE NEWLINE NEWLINENAME = "quantaxis"NEWLINE"""NEWLINE名字,一般放你包的名字即可NEWLINE"""NEWLINEPACKAGES = ["QUANTAXIS", "QUANTAXIS.QAFetch","QUANTAXIS.QACmd", "QUANTAXIS.QAMarket", "QUANTAXIS.QABacktest","QUANTAXIS.QASQL" ,"QUANTAXIS.QATask", "QUANTAXIS.QASpider","QUANTAXIS.QASU","QUANTAXIS.QAUtil","QUANTAXIS.QAARP","QUANTAXIS.QASignal","QUANTAXIS.QAMath","QUANTAXIS.QAIndicator"]NEWLINE"""NEWLINE包含的包,可以多个,这是一个列表NEWLINE"""NEWLINE NEWLINEDESCRIPTION = "QUANTAXIS:Quantitative Financial Strategy Framework"NEWLINENEWLINE NEWLINELONG_DESCRIPTION = read("README.rst")NEWLINE"""NEWLINE参见read方法说明NEWLINE"""NEWLINE NEWLINEKEYWORDS = ["quantaxis","quant","finance"]NEWLINE"""NEWLINE关于当前包的一些关键字,方便PyPI进行分类。NEWLINE"""NEWLINE NEWLINEAUTHOR = "yutiansut"NEWLINENEWLINEAUTHOR_EMAIL = "[email protected]"NEWLINE NEWLINEURL = "http://www.yutiansut.com"NEWLINENEWLINEVERSION = "0.3.9b1.dev19"NEWLINENEWLINE NEWLINELICENSE = "MIT"NEWLINENEWLINE NEWLINEsetup(NEWLINE name = NAME,NEWLINE version = VERSION,NEWLINE description = DESCRIPTION,NEWLINE long_description = LONG_DESCRIPTION,NEWLINE classifiers = [NEWLINE 'License :: OSI Approved :: MIT License',NEWLINE 'Programming Language :: Python',NEWLINE 'Intended Audience :: Developers',NEWLINE 'Operating System :: OS Independent',NEWLINE ],NEWLINE install_requires = ['tushare>=0.7.4','pymongo>=3.4','celery>=4.0.0','six>=1.10.0'],NEWLINE keywords = KEYWORDS,NEWLINE author = AUTHOR,NEWLINE author_email = AUTHOR_EMAIL,NEWLINE url = URL,NEWLINE license = LICENSE,NEWLINE packages = PACKAGES,NEWLINE include_package_data=True,NEWLINE zip_safe=True,NEWLINE)NEWLINE NEWLINE## 把上面的变量填入了一个setup()中即可。
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ==============================================================================NEWLINE"""Public API for tf.linalg.sparse namespace."""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINE# go/tf-wildcard-importNEWLINE# pylint: disable=wildcard-importNEWLINEfrom tensorflow.python.ops.linalg.sparse.sparse_csr_matrix_grad import *NEWLINEfrom tensorflow.python.ops.linalg.sparse.sparse_csr_matrix_ops import *NEWLINE# pylint: enable=wildcard-importNEWLINE
from django.shortcuts import renderNEWLINEfrom django.contrib.auth.models import UserNEWLINEfrom django.http import Http404NEWLINEfrom django.views.generic import DetailViewNEWLINEfrom django.contrib.auth.decorators import login_requiredNEWLINEfrom django.utils.decorators import method_decoratorNEWLINEfrom django.shortcuts import get_object_or_404NEWLINENEWLINEfrom comics.models import (NEWLINE Comic,NEWLINE Post,NEWLINE ContributorNEWLINE)NEWLINENEWLINENEWLINEclass ProfileView(DetailView):NEWLINE template_name="profile.html"NEWLINE model = UserNEWLINENEWLINE def dispatch(self, *args, **kwargs):NEWLINE if kwargs.get('username'):NEWLINE self.user = get_object_or_404(User, username=kwargs.get('username'))NEWLINE elif self.request.user:NEWLINE self.user = self.request.userNEWLINE else:NEWLINE raise Http404()NEWLINE return super(ProfileView, self).dispatch(*args, **kwargs)NEWLINENEWLINE def get_object(self):NEWLINE return self.userNEWLINENEWLINE def get_context_data(self, **kwargs):NEWLINE context = super(ProfileView, self).get_context_data(**kwargs)NEWLINENEWLINE contributions = Contributor.objects.filter(contributor=self.user)NEWLINENEWLINE comics = Comic.published_comics.filter(NEWLINE post__contributor__in=contributionsNEWLINE ).order_by('-published')NEWLINENEWLINE posts = Post.published_posts.filter(NEWLINE contributor__in=contributionsNEWLINE ).exclude(NEWLINE id__in=comics.values_list('post')NEWLINE ).order_by('-published')NEWLINENEWLINE context['display_user'] = self.userNEWLINE context['posts'] = postsNEWLINE context['comics'] = comicsNEWLINENEWLINE return context
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root forNEWLINE# license information.NEWLINE#NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code isNEWLINE# regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINENEWLINEfrom .task_step_properties import TaskStepPropertiesNEWLINENEWLINENEWLINEclass DockerBuildStep(TaskStepProperties):NEWLINE """The Docker build step.NEWLINENEWLINE Variables are only populated by the server, and will be ignored whenNEWLINE sending a request.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :ivar base_image_dependencies: List of base image dependencies for a step.NEWLINE :vartype base_image_dependencies:NEWLINE list[~azure.mgmt.containerregistry.v2019_04_01.models.BaseImageDependency]NEWLINE :param context_path: The URL(absolute or relative) of the source contextNEWLINE for the task step.NEWLINE :type context_path: strNEWLINE :param context_access_token: The token (git PAT or SAS token of storageNEWLINE account blob) associated with the context for a step.NEWLINE :type context_access_token: strNEWLINE :param type: Required. Constant filled by server.NEWLINE :type type: strNEWLINE :param image_names: The fully qualified image names including theNEWLINE repository and tag.NEWLINE :type image_names: list[str]NEWLINE :param is_push_enabled: The value of this property indicates whether theNEWLINE image built should be pushed to the registry or not. Default value: True .NEWLINE :type is_push_enabled: boolNEWLINE :param no_cache: The value of this property indicates whether the imageNEWLINE cache is enabled or not. Default value: False .NEWLINE :type no_cache: boolNEWLINE :param docker_file_path: Required. The Docker file path relative to theNEWLINE source context.NEWLINE :type docker_file_path: strNEWLINE :param target: The name of the target build stage for the docker build.NEWLINE :type target: strNEWLINE :param arguments: The collection of override arguments to be used whenNEWLINE executing this build step.NEWLINE :type arguments:NEWLINE list[~azure.mgmt.containerregistry.v2019_04_01.models.Argument]NEWLINE """NEWLINENEWLINE _validation = {NEWLINE 'base_image_dependencies': {'readonly': True},NEWLINE 'type': {'required': True},NEWLINE 'docker_file_path': {'required': True},NEWLINE }NEWLINENEWLINE _attribute_map = {NEWLINE 'base_image_dependencies': {'key': 'baseImageDependencies', 'type': '[BaseImageDependency]'},NEWLINE 'context_path': {'key': 'contextPath', 'type': 'str'},NEWLINE 'context_access_token': {'key': 'contextAccessToken', 'type': 'str'},NEWLINE 'type': {'key': 'type', 'type': 'str'},NEWLINE 'image_names': {'key': 'imageNames', 'type': '[str]'},NEWLINE 'is_push_enabled': {'key': 'isPushEnabled', 'type': 'bool'},NEWLINE 'no_cache': {'key': 'noCache', 'type': 'bool'},NEWLINE 'docker_file_path': {'key': 'dockerFilePath', 'type': 'str'},NEWLINE 'target': {'key': 'target', 'type': 'str'},NEWLINE 'arguments': {'key': 'arguments', 'type': '[Argument]'},NEWLINE }NEWLINENEWLINE def __init__(self, **kwargs):NEWLINE super(DockerBuildStep, self).__init__(**kwargs)NEWLINE self.image_names = kwargs.get('image_names', None)NEWLINE self.is_push_enabled = kwargs.get('is_push_enabled', True)NEWLINE self.no_cache = kwargs.get('no_cache', False)NEWLINE self.docker_file_path = kwargs.get('docker_file_path', None)NEWLINE self.target = kwargs.get('target', None)NEWLINE self.arguments = kwargs.get('arguments', None)NEWLINE self.type = 'Docker'NEWLINE
from service import ServiceScraper, ServiceSoundDetector, ServiceLanguageDetectorNEWLINENEWLINEif __name__ == "__main__":NEWLINE services = [ServiceScraper, ServiceSoundDetector, ServiceLanguageDetector]NEWLINENEWLINE for service in services:NEWLINE s = service()NEWLINE s.process()
#NEWLINE# Copyright (c) 2021 Project CHIP AuthorsNEWLINE# All rights reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINENEWLINE# Needed to use types in type hints before they are fully defined.NEWLINEfrom __future__ import annotationsNEWLINENEWLINEfrom asyncio.futures import FutureNEWLINEimport ctypesNEWLINEfrom dataclasses import dataclass, fieldNEWLINEfrom typing import Tuple, Type, Union, List, Any, Callable, Dict, SetNEWLINEfrom ctypes import CFUNCTYPE, c_char_p, c_size_t, c_void_p, c_uint64, c_uint32, c_uint16, c_uint8, py_object, c_uint64NEWLINEfrom rich.pretty import pprintNEWLINENEWLINEfrom .ClusterObjects import Cluster, ClusterAttributeDescriptor, ClusterEventNEWLINEimport chip.exceptionsNEWLINEimport chip.interaction_modelNEWLINEimport chip.tlvNEWLINEfrom enum import Enum, uniqueNEWLINEimport inspectNEWLINEimport sysNEWLINEimport loggingNEWLINEimport threadingNEWLINEimport builtinsNEWLINENEWLINENEWLINE@uniqueNEWLINEclass EventTimestampType(Enum):NEWLINE SYSTEM = 0NEWLINE EPOCH = 1NEWLINENEWLINENEWLINE@uniqueNEWLINEclass EventPriority(Enum):NEWLINE DEBUG = 1NEWLINE INFO = 2NEWLINE CRITICAL = 3NEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributePath:NEWLINE EndpointId: int = NoneNEWLINE ClusterId: int = NoneNEWLINE AttributeId: int = NoneNEWLINENEWLINE def __init__(self, EndpointId: int = None, Cluster=None, Attribute=None, ClusterId=None, AttributeId=None):NEWLINE self.EndpointId = EndpointIdNEWLINE if Cluster is not None:NEWLINE # Wildcard read for a specific clusterNEWLINE if (Attribute is not None) or (ClusterId is not None) or (AttributeId is not None):NEWLINE raise Warning(NEWLINE "Attribute, ClusterId and AttributeId is ignored when Cluster is specified")NEWLINE self.ClusterId = Cluster.idNEWLINE returnNEWLINE if Attribute is not None:NEWLINE if (ClusterId is not None) or (AttributeId is not None):NEWLINE raise Warning(NEWLINE "ClusterId and AttributeId is ignored when Attribute is specified")NEWLINE self.ClusterId = Attribute.cluster_idNEWLINE self.AttributeId = Attribute.attribute_idNEWLINE returnNEWLINE self.ClusterId = ClusterIdNEWLINE self.AttributeId = AttributeIdNEWLINENEWLINE def __str__(self) -> str:NEWLINE return f"{self.EndpointId}/{self.ClusterId}/{self.AttributeId}"NEWLINENEWLINE def __hash__(self):NEWLINE return str(self).__hash__()NEWLINENEWLINENEWLINE@dataclassNEWLINEclass TypedAttributePath:NEWLINE ''' Encapsulates an attribute path that has strongly typed references to cluster and attributeNEWLINE cluster object types. These types serve as keys into the attribute cache.NEWLINE '''NEWLINE ClusterType: Cluster = NoneNEWLINE AttributeType: ClusterAttributeDescriptor = NoneNEWLINE AttributeName: str = NoneNEWLINE Path: AttributePath = NoneNEWLINENEWLINE def __init__(self, ClusterType: Cluster = None, AttributeType: ClusterAttributeDescriptor = None,NEWLINE Path: AttributePath = None):NEWLINE ''' Only one of either ClusterType and AttributeType OR Path may be provided.NEWLINE '''NEWLINENEWLINE #NEWLINE # First, let's populate ClusterType and AttributeType. If it's already provided,NEWLINE # we can continue onwards to deriving the label. Otherwise, we'll need toNEWLINE # walk the attribute index to find the right type information.NEWLINE #NEWLINE if (ClusterType is not None and AttributeType is not None):NEWLINE self.ClusterType = ClusterTypeNEWLINE self.AttributeType = AttributeTypeNEWLINE else:NEWLINE if (Path is None):NEWLINE raise ValueError("Path should have a valid value")NEWLINENEWLINE for cluster, attribute in _AttributeIndex:NEWLINE attributeType = _AttributeIndex[(cluster, attribute)][0]NEWLINE clusterType = _AttributeIndex[(cluster, attribute)][1]NEWLINENEWLINE if (clusterType.id == Path.ClusterId and attributeType.attribute_id == Path.AttributeId):NEWLINE self.ClusterType = clusterTypeNEWLINE self.AttributeType = attributeTypeNEWLINE breakNEWLINENEWLINE if (self.ClusterType is None or self.AttributeType is None):NEWLINE raise Exception("Schema not found")NEWLINENEWLINE # Next, let's figure out the label.NEWLINE for field in self.ClusterType.descriptor.Fields:NEWLINE if (field.Tag != self.AttributeType.attribute_id):NEWLINE continueNEWLINENEWLINE self.AttributeName = field.LabelNEWLINENEWLINE if (self.AttributeName is None):NEWLINE raise Exception("Schema not found")NEWLINENEWLINE self.Path = PathNEWLINE self.ClusterId = self.ClusterType.idNEWLINE self.AttributeId = self.AttributeType.attribute_idNEWLINENEWLINENEWLINE@dataclassNEWLINEclass EventPath:NEWLINE EndpointId: int = NoneNEWLINE ClusterId: int = NoneNEWLINE EventId: int = NoneNEWLINENEWLINE def __init__(self, EndpointId: int = None, Cluster=None, Event=None, ClusterId=None, EventId=None):NEWLINE self.EndpointId = EndpointIdNEWLINE if Cluster is not None:NEWLINE # Wildcard read for a specific clusterNEWLINE if (Event is not None) or (ClusterId is not None) or (EventId is not None):NEWLINE raise Warning(NEWLINE "Event, ClusterId and AttributeId is ignored when Cluster is specified")NEWLINE self.ClusterId = Cluster.idNEWLINE returnNEWLINE if Event is not None:NEWLINE if (ClusterId is not None) or (EventId is not None):NEWLINE raise Warning(NEWLINE "ClusterId and EventId is ignored when Event is specified")NEWLINE self.ClusterId = Event.cluster_idNEWLINE self.EventId = Event.event_idNEWLINE returnNEWLINE self.ClusterId = ClusterIdNEWLINE self.EventId = EventIdNEWLINENEWLINE def __str__(self) -> str:NEWLINE return f"{self.EndpointId}/{self.ClusterId}/{self.EventId}"NEWLINENEWLINE def __hash__(self):NEWLINE return str(self).__hash__()NEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributePathWithListIndex(AttributePath):NEWLINE ListIndex: int = NoneNEWLINENEWLINENEWLINE@dataclassNEWLINEclass EventHeader:NEWLINE EndpointId: int = NoneNEWLINE Event: ClusterEvent = NoneNEWLINE EventNumber: int = NoneNEWLINE Priority: EventPriority = NoneNEWLINE Timestamp: int = NoneNEWLINE TimestampType: EventTimestampType = NoneNEWLINENEWLINE def __init__(self, EndpointId: int = None, Event=None, EventNumber=None, Priority=None, Timestamp=None, TimestampType=None):NEWLINE self.EndpointId = EndpointIdNEWLINE self.Event = EventNEWLINE self.EventNumber = EventNumberNEWLINE self.Priority = PriorityNEWLINE self.Timestamp = TimestampNEWLINE self.Timestamp = TimestampTypeNEWLINENEWLINE def __str__(self) -> str:NEWLINE return f"{self.EndpointId}/{self.Event.cluster_id}/{self.Event.event_id}/{self.EventNumber}/{self.Priority}/{self.Timestamp}/{self.TimestampType}"NEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributeStatus:NEWLINE Path: AttributePathNEWLINE Status: Union[chip.interaction_model.Status, int]NEWLINENEWLINENEWLINE@dataclassNEWLINEclass EventStatus:NEWLINE Header: EventHeaderNEWLINE Status: chip.interaction_model.StatusNEWLINENEWLINENEWLINEAttributeWriteResult = AttributeStatusNEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributeDescriptorWithEndpoint:NEWLINE EndpointId: intNEWLINE Attribute: ClusterAttributeDescriptorNEWLINENEWLINENEWLINE@dataclassNEWLINEclass EventDescriptorWithEndpoint:NEWLINE EndpointId: intNEWLINE Event: ClusterEventNEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributeWriteRequest(AttributeDescriptorWithEndpoint):NEWLINE Data: AnyNEWLINENEWLINENEWLINEAttributeReadRequest = AttributeDescriptorWithEndpointNEWLINEEventReadRequest = EventDescriptorWithEndpointNEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributeReadResult(AttributeStatus):NEWLINE Data: Any = NoneNEWLINENEWLINENEWLINE@dataclassNEWLINEclass ValueDecodeFailure:NEWLINE ''' Encapsulates a failure to decode a TLV value into a cluster object.NEWLINE Some exceptions have custom fields, so run str(ReasonException) to get more info.NEWLINE '''NEWLINENEWLINE TLVValue: Any = NoneNEWLINE Reason: Exception = NoneNEWLINENEWLINENEWLINE@dataclassNEWLINEclass EventReadResult(EventStatus):NEWLINE Data: Any = NoneNEWLINENEWLINENEWLINE_AttributeIndex = {}NEWLINE_EventIndex = {}NEWLINE_ClusterIndex = {}NEWLINENEWLINENEWLINEdef _BuildAttributeIndex():NEWLINE ''' Build internal attribute index for locating the corresponding cluster object by path in the future.NEWLINE We do this because this operation will take a long time when there are lots of attributes, it takes about 300ms for a single query.NEWLINE This is acceptable during init, but unacceptable when the server returns lots of attributes at the same time.NEWLINE '''NEWLINE for clusterName, obj in inspect.getmembers(sys.modules['chip.clusters.Objects']):NEWLINE if ('chip.clusters.Objects' in str(obj)) and inspect.isclass(obj):NEWLINE for objName, subclass in inspect.getmembers(obj):NEWLINE if inspect.isclass(subclass) and (('Attributes') in str(subclass)):NEWLINE for attributeName, attribute in inspect.getmembers(subclass):NEWLINE if inspect.isclass(attribute):NEWLINE base_classes = inspect.getmro(attribute)NEWLINENEWLINE # Only match on classes that extend the ClusterAttributeDescriptor classNEWLINE matched = [NEWLINE value for value in base_classes if 'ClusterAttributeDescriptor' in str(value)]NEWLINE if (matched == []):NEWLINE continueNEWLINENEWLINE _AttributeIndex[(attribute.cluster_id, attribute.attribute_id)] = (eval(NEWLINE 'chip.clusters.Objects.' + clusterName + '.Attributes.' + attributeName), obj)NEWLINENEWLINENEWLINEdef _BuildClusterIndex():NEWLINE ''' Build internal cluster index for locating the corresponding cluster object by path in the future.NEWLINE '''NEWLINE for clusterName, obj in inspect.getmembers(sys.modules['chip.clusters.Objects']):NEWLINE if ('chip.clusters.Objects' in str(obj)) and inspect.isclass(obj):NEWLINE _ClusterIndex[obj.id] = objNEWLINENEWLINENEWLINE@dataclassNEWLINEclass SubscriptionParameters:NEWLINE MinReportIntervalFloorSeconds: intNEWLINE MaxReportIntervalCeilingSeconds: intNEWLINENEWLINENEWLINE@dataclassNEWLINEclass AttributeCache:NEWLINE ''' A cache that stores data & errors returned in read/subscribe reports, but organizes it topologicallyNEWLINE in a collection of nested dictionaries. The organization follows the natural data model composition ofNEWLINE the device: endpoint, then cluster, then attribute.NEWLINENEWLINE TLV data (or ValueDecodeFailure in the case of IM status codes) are stored for each attribute inNEWLINE attributeTLVCache[endpoint][cluster][attribute].NEWLINENEWLINE Upon completion of data population, it can be retrieved in a more friendly cluster object format,NEWLINE with two options available. In both options, data is in the dictionary is key'ed not by the raw numericNEWLINE cluster and attribute IDs, but instead by the cluster object descriptor types for each of those generatedNEWLINE cluster objects.NEWLINENEWLINE E.g Clusters.TestCluster is the literal key for indexing the test cluster.NEWLINE Clusters.TestCluster.Attributes.Int16u is the listeral key for indexing an attribute in the test cluster.NEWLINENEWLINE This strongly typed keys permit a more natural and safer form of indexing.NEWLINE '''NEWLINE returnClusterObject: bool = FalseNEWLINE attributeTLVCache: Dict[int, Dict[int, Dict[int, bytes]]] = field(NEWLINE default_factory=lambda: {})NEWLINE attributeCache: Dict[int, List[Cluster]] = field(NEWLINE default_factory=lambda: {})NEWLINENEWLINE def UpdateTLV(self, path: AttributePath, data: Union[bytes, ValueDecodeFailure]):NEWLINE ''' Store data in TLV since that makes it easiest to eventually convert to either theNEWLINE cluster or attribute view representations (see below in UpdateCachedData).NEWLINE '''NEWLINE if (path.EndpointId not in self.attributeTLVCache):NEWLINE self.attributeTLVCache[path.EndpointId] = {}NEWLINENEWLINE endpointCache = self.attributeTLVCache[path.EndpointId]NEWLINE if (path.ClusterId not in endpointCache):NEWLINE endpointCache[path.ClusterId] = {}NEWLINENEWLINE clusterCache = endpointCache[path.ClusterId]NEWLINE if (path.AttributeId not in clusterCache):NEWLINE clusterCache[path.AttributeId] = NoneNEWLINENEWLINE clusterCache[path.AttributeId] = dataNEWLINENEWLINE def UpdateCachedData(self):NEWLINE ''' This converts the raw TLV data into a cluster object format.NEWLINENEWLINE Two formats are available:NEWLINE 1. Attribute-View (returnClusterObject=False): Dict[EndpointId, Dict[ClusterObjectType, Dict[AttributeObjectType, AttributeValue]]]NEWLINE 2. Cluster-View (returnClusterObject=True): Dict[EndpointId, Dict[ClusterObjectType, ClusterValue]]NEWLINENEWLINE In the attribute-view, only attributes that match the original path criteria are present in the dictionary. The attribute values canNEWLINE either be the actual data for the attribute, or a ValueDecodeFailure in the case of non-success IM status codes, or other errors encountered during decode.NEWLINENEWLINE In the cluster-view, a cluster object that corresponds to all attributes on a given cluster instance is returned,NEWLINE regardless of the subset of attributes read. For attributes not returned in the report, defaults are used. If a cluster cannot be decoded,NEWLINE instead of a cluster object value, a ValueDecodeFailure shall be present.NEWLINE '''NEWLINENEWLINE tlvCache = self.attributeTLVCacheNEWLINE attributeCache = self.attributeCacheNEWLINENEWLINE for endpoint in tlvCache:NEWLINE if (endpoint not in attributeCache):NEWLINE attributeCache[endpoint] = {}NEWLINENEWLINE endpointCache = attributeCache[endpoint]NEWLINENEWLINE for cluster in tlvCache[endpoint]:NEWLINE clusterType = _ClusterIndex[cluster]NEWLINE if (clusterType is None):NEWLINE raise Exception("Cannot find cluster in cluster index")NEWLINENEWLINE if (clusterType not in endpointCache):NEWLINE endpointCache[clusterType] = {}NEWLINENEWLINE clusterCache = endpointCache[clusterType]NEWLINENEWLINE if (self.returnClusterObject):NEWLINE try:NEWLINE # Since the TLV data is already organized by attribute tags, we can trivially convert to a cluster object representation.NEWLINE endpointCache[clusterType] = clusterType.FromDict(NEWLINE data=clusterType.descriptor.TagDictToLabelDict([], tlvCache[endpoint][cluster]))NEWLINE except Exception as ex:NEWLINE logging.error(NEWLINE f"Error converting TLV to Cluster Object for path: Endpoint = {endpoint}, cluster = {str(clusterType)}")NEWLINE logging.error(f"|-- Exception: {repr(ex)}")NEWLINE decodedValue = ValueDecodeFailure(NEWLINE tlvCache[endpoint][cluster], ex)NEWLINE endpointCache[clusterType] = decodedValueNEWLINE else:NEWLINE for attribute in tlvCache[endpoint][cluster]:NEWLINE value = tlvCache[endpoint][cluster][attribute]NEWLINENEWLINE attributeType = _AttributeIndex[(NEWLINE cluster, attribute)][0]NEWLINE if (attributeType is None):NEWLINE raise Exception(NEWLINE "Cannot find attribute in attribute index")NEWLINENEWLINE if (attributeType not in clusterCache):NEWLINE clusterCache[attributeType] = {}NEWLINENEWLINE if (type(value) is ValueDecodeFailure):NEWLINE logging.error(NEWLINE f"For path: Endpoint = {endpoint}, Attribute = {str(attributeType)}, got IM Error: {str(value.Reason)}")NEWLINE clusterCache[attributeType] = valueNEWLINE else:NEWLINE try:NEWLINE decodedValue = attributeType.FromTagDictOrRawValue(NEWLINE tlvCache[endpoint][cluster][attribute])NEWLINE except Exception as ex:NEWLINE logging.error(NEWLINE f"Error converting TLV to Cluster Object for path: Endpoint = {endpoint}, Attribute = {str(attributeType)}")NEWLINE logging.error(f"|-- Exception: {repr(ex)}")NEWLINE decodedValue = ValueDecodeFailure(value, ex)NEWLINENEWLINE clusterCache[attributeType] = decodedValueNEWLINENEWLINENEWLINEclass SubscriptionTransaction:NEWLINE def __init__(self, transaction: 'AsyncReadTransaction', subscriptionId, devCtrl):NEWLINE self._onAttributeChangeCb = DefaultAttributeChangeCallbackNEWLINE self._onEventChangeCb = DefaultEventChangeCallbackNEWLINE self._readTransaction = transactionNEWLINE self._subscriptionId = subscriptionIdNEWLINE self._devCtrl = devCtrlNEWLINENEWLINE def GetAttributes(self):NEWLINE ''' Returns the attribute value cache tracking the latest state on the publisher.NEWLINE '''NEWLINE return self._readTransaction._cache.attributeCacheNEWLINENEWLINE def GetAttribute(self, path: TypedAttributePath) -> Any:NEWLINE ''' Returns a specific attribute given a TypedAttributePath.NEWLINE '''NEWLINE data = self._readTransaction._cache.attributeCacheNEWLINENEWLINE if (self._readTransaction._cache.returnClusterObject):NEWLINE return eval(f'data[path.Path.EndpointId][path.ClusterType].{path.AttributeName}')NEWLINE else:NEWLINE return data[path.Path.EndpointId][path.ClusterType][path.AttributeType]NEWLINENEWLINE def GetEvents(self):NEWLINE return self._readTransaction.GetAllEventValues()NEWLINENEWLINE def SetAttributeUpdateCallback(self, callback: Callable[[TypedAttributePath, SubscriptionTransaction], None]):NEWLINE '''NEWLINE Sets the callback function for the attribute value change event, accepts a Callable accepts an attribute path and the cached data.NEWLINE '''NEWLINE if callback is not None:NEWLINE self._onAttributeChangeCb = callbackNEWLINENEWLINE def SetEventUpdateCallback(self, callback: Callable[[EventReadResult, SubscriptionTransaction], None]):NEWLINE if callback is not None:NEWLINE self._onEventChangeCb = callbackNEWLINENEWLINE @propertyNEWLINE def OnAttributeChangeCb(self) -> Callable[[TypedAttributePath, SubscriptionTransaction], None]:NEWLINE return self._onAttributeChangeCbNEWLINENEWLINE @propertyNEWLINE def OnEventChangeCb(self) -> Callable[[EventReadResult, SubscriptionTransaction], None]:NEWLINE return self._onEventChangeCbNEWLINENEWLINE def Shutdown(self):NEWLINE self._devCtrl.ZCLShutdownSubscription(self._subscriptionId)NEWLINENEWLINE def __repr__(self):NEWLINE return f'<Subscription (Id={self._subscriptionId})>'NEWLINENEWLINENEWLINEdef DefaultAttributeChangeCallback(path: TypedAttributePath, transaction: SubscriptionTransaction):NEWLINE data = transaction.GetAttribute(path)NEWLINE value = {NEWLINE 'Endpoint': path.Path.EndpointId,NEWLINE 'Attribute': path.AttributeType,NEWLINE 'Value': dataNEWLINE }NEWLINENEWLINE print("Attribute Changed:")NEWLINE pprint(value, expand_all=True)NEWLINENEWLINENEWLINEdef DefaultEventChangeCallback(data: EventReadResult, transaction: SubscriptionTransaction):NEWLINE print("Received Event:")NEWLINE pprint(data, expand_all=True)NEWLINENEWLINENEWLINEdef _BuildEventIndex():NEWLINE ''' Build internal event index for locating the corresponding cluster object by path in the future.NEWLINE We do this because this operation will take a long time when there are lots of events, it takes about 300ms for a single query.NEWLINE This is acceptable during init, but unacceptable when the server returns lots of events at the same time.NEWLINE '''NEWLINE for clusterName, obj in inspect.getmembers(sys.modules['chip.clusters.Objects']):NEWLINE if ('chip.clusters.Objects' in str(obj)) and inspect.isclass(obj):NEWLINE for objName, subclass in inspect.getmembers(obj):NEWLINE if inspect.isclass(subclass) and (('Events' == objName)):NEWLINE for eventName, event in inspect.getmembers(subclass):NEWLINE if inspect.isclass(event):NEWLINE base_classes = inspect.getmro(event)NEWLINENEWLINE # Only match on classes that extend the ClusterEventescriptor classNEWLINE matched = [NEWLINE value for value in base_classes if 'ClusterEvent' in str(value)]NEWLINE if (matched == []):NEWLINE continueNEWLINENEWLINE _EventIndex[str(EventPath(ClusterId=event.cluster_id, EventId=event.event_id))] = eval(NEWLINE 'chip.clusters.Objects.' + clusterName + '.Events.' + eventName)NEWLINENEWLINENEWLINEclass TransactionType(Enum):NEWLINE READ_EVENTS = 1NEWLINE READ_ATTRIBUTES = 2NEWLINENEWLINENEWLINEclass AsyncReadTransaction:NEWLINE def __init__(self, future: Future, eventLoop, devCtrl, transactionType: TransactionType, returnClusterObject: bool):NEWLINE self._event_loop = eventLoopNEWLINE self._future = futureNEWLINE self._subscription_handler = NoneNEWLINE self._events = []NEWLINE self._devCtrl = devCtrlNEWLINE self._transactionType = transactionTypeNEWLINE self._cache = AttributeCache(returnClusterObject=returnClusterObject)NEWLINE self._changedPathSet = set()NEWLINENEWLINE def GetAllEventValues(self):NEWLINE return self._eventsNEWLINENEWLINE def _handleAttributeData(self, path: AttributePathWithListIndex, status: int, data: bytes):NEWLINE try:NEWLINE imStatus = statusNEWLINE try:NEWLINE imStatus = chip.interaction_model.Status(status)NEWLINE except:NEWLINE passNEWLINENEWLINE if (imStatus != chip.interaction_model.Status.Success):NEWLINE attributeValue = ValueDecodeFailure(NEWLINE None, chip.interaction_model.InteractionModelError(imStatus))NEWLINE else:NEWLINE tlvData = chip.tlv.TLVReader(data).get().get("Any", {})NEWLINE attributeValue = tlvDataNEWLINENEWLINE self._cache.UpdateTLV(path, attributeValue)NEWLINE self._changedPathSet.add(path)NEWLINENEWLINE except Exception as ex:NEWLINE logging.exception(ex)NEWLINENEWLINE def handleAttributeData(self, path: AttributePath, status: int, data: bytes):NEWLINE self._handleAttributeData(path, status, data)NEWLINENEWLINE def _handleEventData(self, header: EventHeader, path: EventPath, data: bytes):NEWLINE try:NEWLINE eventType = _EventIndex.get(str(path), None)NEWLINE eventValue = NoneNEWLINE tlvData = chip.tlv.TLVReader(data).get().get("Any", {})NEWLINE if eventType is None:NEWLINE eventValue = ValueDecodeFailure(NEWLINE tlvData, LookupError("event schema not found"))NEWLINE else:NEWLINE try:NEWLINE eventValue = eventType.FromTLV(data)NEWLINE except Exception as ex:NEWLINE logging.error(NEWLINE f"Error convering TLV to Cluster Object for path: Endpoint = {path.EndpointId}/Cluster = {path.ClusterId}/Event = {path.EventId}")NEWLINE logging.error(NEWLINE f"Failed Cluster Object: {str(eventType)}")NEWLINE logging.error(ex)NEWLINE eventValue = ValueDecodeFailure(NEWLINE tlvData, ex)NEWLINENEWLINE # If we're in debug mode, raise the exception so that we can better debug what's happening.NEWLINE if (builtins.enableDebugMode):NEWLINE raiseNEWLINENEWLINE eventResult = EventReadResult(NEWLINE Header=header, Data=eventValue, Status=chip.interaction_model.Status.Success)NEWLINE self._events.append(eventResult)NEWLINENEWLINE if (self._subscription_handler is not None):NEWLINE self._subscription_handler.OnEventChangeCb(NEWLINE eventResult, self._subscription_handler)NEWLINENEWLINE except Exception as ex:NEWLINE logging.exception(ex)NEWLINENEWLINE def handleEventData(self, header: EventHeader, path: EventPath, data: bytes):NEWLINE self._handleEventData(header, path, data)NEWLINENEWLINE def _handleError(self, chipError: int):NEWLINE self._future.set_exception(NEWLINE chip.exceptions.ChipStackError(chipError))NEWLINENEWLINE def handleError(self, chipError: int):NEWLINE self._event_loop.call_soon_threadsafe(NEWLINE self._handleError, chipErrorNEWLINE )NEWLINENEWLINE def _handleSubscriptionEstablished(self, subscriptionId):NEWLINE if not self._future.done():NEWLINE self._subscription_handler = SubscriptionTransaction(NEWLINE self, subscriptionId, self._devCtrl)NEWLINE self._future.set_result(self._subscription_handler)NEWLINENEWLINE def handleSubscriptionEstablished(self, subscriptionId):NEWLINE self._event_loop.call_soon_threadsafe(NEWLINE self._handleSubscriptionEstablished, subscriptionId)NEWLINENEWLINE def _handleReportBegin(self):NEWLINE passNEWLINENEWLINE def _handleReportEnd(self):NEWLINE self._cache.UpdateCachedData()NEWLINENEWLINE if (self._subscription_handler is not None):NEWLINE for change in self._changedPathSet:NEWLINE self._subscription_handler.OnAttributeChangeCb(NEWLINE TypedAttributePath(Path=change), self._subscription_handler)NEWLINENEWLINE # Clear it out once we've notified of all changes in this transaction.NEWLINE self._changedPathSet = set()NEWLINENEWLINE def _handleDone(self):NEWLINE if not self._future.done():NEWLINE if (self._transactionType == TransactionType.READ_EVENTS):NEWLINE self._future.set_result(self._events)NEWLINE else:NEWLINE self._future.set_result(self._cache.attributeCache)NEWLINENEWLINE def handleDone(self):NEWLINE self._event_loop.call_soon_threadsafe(self._handleDone)NEWLINENEWLINE def handleReportBegin(self):NEWLINE passNEWLINENEWLINE def handleReportEnd(self):NEWLINE # self._event_loop.call_soon_threadsafe(self._handleReportEnd)NEWLINE self._handleReportEnd()NEWLINENEWLINENEWLINEclass AsyncWriteTransaction:NEWLINE def __init__(self, future: Future, eventLoop):NEWLINE self._event_loop = eventLoopNEWLINE self._future = futureNEWLINE self._res = []NEWLINENEWLINE def _handleResponse(self, path: AttributePath, status: int):NEWLINE try:NEWLINE imStatus = chip.interaction_model.Status(status)NEWLINE self._res.append(AttributeWriteResult(Path=path, Status=imStatus))NEWLINE except:NEWLINE self._res.append(AttributeWriteResult(Path=path, Status=status))NEWLINENEWLINE def handleResponse(self, path: AttributePath, status: int):NEWLINE self._event_loop.call_soon_threadsafe(NEWLINE self._handleResponse, path, status)NEWLINENEWLINE def _handleError(self, chipError: int):NEWLINE self._future.set_exception(NEWLINE chip.exceptions.ChipStackError(chipError))NEWLINENEWLINE def handleError(self, chipError: int):NEWLINE self._event_loop.call_soon_threadsafe(NEWLINE self._handleError, chipErrorNEWLINE )NEWLINENEWLINE def _handleDone(self):NEWLINE if not self._future.done():NEWLINE self._future.set_result(self._res)NEWLINENEWLINE def handleDone(self):NEWLINE self._event_loop.call_soon_threadsafe(self._handleDone)NEWLINENEWLINENEWLINE_OnReadAttributeDataCallbackFunct = CFUNCTYPE(NEWLINE None, py_object, c_uint16, c_uint32, c_uint32, c_uint32, c_void_p, c_size_t)NEWLINE_OnSubscriptionEstablishedCallbackFunct = CFUNCTYPE(None, py_object, c_uint64)NEWLINE_OnReadEventDataCallbackFunct = CFUNCTYPE(NEWLINE None, py_object, c_uint16, c_uint32, c_uint32, c_uint32, c_uint8, c_uint64, c_uint8, c_void_p, c_size_t)NEWLINE_OnReadErrorCallbackFunct = CFUNCTYPE(NEWLINE None, py_object, c_uint32)NEWLINE_OnReadDoneCallbackFunct = CFUNCTYPE(NEWLINE None, py_object)NEWLINE_OnReportBeginCallbackFunct = CFUNCTYPE(NEWLINE None, py_object)NEWLINE_OnReportEndCallbackFunct = CFUNCTYPE(NEWLINE None, py_object)NEWLINENEWLINENEWLINE@_OnReadAttributeDataCallbackFunctNEWLINEdef _OnReadAttributeDataCallback(closure, endpoint: int, cluster: int, attribute: int, status, data, len):NEWLINE dataBytes = ctypes.string_at(data, len)NEWLINE closure.handleAttributeData(AttributePath(NEWLINE EndpointId=endpoint, ClusterId=cluster, AttributeId=attribute), status, dataBytes[:])NEWLINENEWLINENEWLINE@_OnReadEventDataCallbackFunctNEWLINEdef _OnReadEventDataCallback(closure, endpoint: int, cluster: int, event: int, number: int, priority: int, timestamp: int, timestampType: int, data, len):NEWLINE dataBytes = ctypes.string_at(data, len)NEWLINE path = EventPath(ClusterId=cluster, EventId=event)NEWLINE closure.handleEventData(EventHeader(NEWLINE EndpointId=endpoint, EventNumber=number, Priority=EventPriority(priority), Timestamp=timestamp, TimestampType=EventTimestampType(timestampType)), path, dataBytes[:])NEWLINENEWLINENEWLINE@_OnSubscriptionEstablishedCallbackFunctNEWLINEdef _OnSubscriptionEstablishedCallback(closure, subscriptionId):NEWLINE closure.handleSubscriptionEstablished(subscriptionId)NEWLINENEWLINENEWLINE@_OnReadErrorCallbackFunctNEWLINEdef _OnReadErrorCallback(closure, chiperror: int):NEWLINE closure.handleError(chiperror)NEWLINENEWLINENEWLINE@_OnReportBeginCallbackFunctNEWLINEdef _OnReportBeginCallback(closure):NEWLINE closure.handleReportBegin()NEWLINENEWLINENEWLINE@_OnReportEndCallbackFunctNEWLINEdef _OnReportEndCallback(closure):NEWLINE closure.handleReportEnd()NEWLINENEWLINENEWLINE@_OnReadDoneCallbackFunctNEWLINEdef _OnReadDoneCallback(closure):NEWLINE closure.handleDone()NEWLINE ctypes.pythonapi.Py_DecRef(ctypes.py_object(closure))NEWLINENEWLINENEWLINE_OnWriteResponseCallbackFunct = CFUNCTYPE(NEWLINE None, py_object, c_uint16, c_uint32, c_uint32, c_uint16)NEWLINE_OnWriteErrorCallbackFunct = CFUNCTYPE(NEWLINE None, py_object, c_uint32)NEWLINE_OnWriteDoneCallbackFunct = CFUNCTYPE(NEWLINE None, py_object)NEWLINENEWLINENEWLINE@_OnWriteResponseCallbackFunctNEWLINEdef _OnWriteResponseCallback(closure, endpoint: int, cluster: int, attribute: int, status):NEWLINE closure.handleResponse(AttributePath(NEWLINE EndpointId=endpoint, ClusterId=cluster, AttributeId=attribute), status)NEWLINENEWLINENEWLINE@_OnWriteErrorCallbackFunctNEWLINEdef _OnWriteErrorCallback(closure, chiperror: int):NEWLINE closure.handleError(chiperror)NEWLINENEWLINENEWLINE@_OnWriteDoneCallbackFunctNEWLINEdef _OnWriteDoneCallback(closure):NEWLINE closure.handleDone()NEWLINE ctypes.pythonapi.Py_DecRef(ctypes.py_object(closure))NEWLINENEWLINENEWLINEdef WriteAttributes(future: Future, eventLoop, device, attributes: List[AttributeWriteRequest]) -> int:NEWLINE handle = chip.native.GetLibraryHandle()NEWLINE transaction = AsyncWriteTransaction(future, eventLoop)NEWLINENEWLINE writeargs = []NEWLINE for attr in attributes:NEWLINE path = chip.interaction_model.AttributePathIBstruct.parse(NEWLINE b'\x00' * chip.interaction_model.AttributePathIBstruct.sizeof())NEWLINE path.EndpointId = attr.EndpointIdNEWLINE path.ClusterId = attr.Attribute.cluster_idNEWLINE path.AttributeId = attr.Attribute.attribute_idNEWLINE path = chip.interaction_model.AttributePathIBstruct.build(path)NEWLINE tlv = attr.Attribute.ToTLV(None, attr.Data)NEWLINE writeargs.append(ctypes.c_char_p(path))NEWLINE writeargs.append(ctypes.c_char_p(bytes(tlv)))NEWLINE writeargs.append(ctypes.c_int(len(tlv)))NEWLINENEWLINE ctypes.pythonapi.Py_IncRef(ctypes.py_object(transaction))NEWLINE res = handle.pychip_WriteClient_WriteAttributes(NEWLINE ctypes.py_object(transaction), device, ctypes.c_size_t(len(attributes)), *writeargs)NEWLINE if res != 0:NEWLINE ctypes.pythonapi.Py_DecRef(ctypes.py_object(transaction))NEWLINE return resNEWLINENEWLINENEWLINEdef ReadAttributes(future: Future, eventLoop, device, devCtrl, attributes: List[AttributePath], returnClusterObject: bool = True, subscriptionParameters: SubscriptionParameters = None) -> int:NEWLINE handle = chip.native.GetLibraryHandle()NEWLINE transaction = AsyncReadTransaction(NEWLINE future, eventLoop, devCtrl, TransactionType.READ_ATTRIBUTES, returnClusterObject)NEWLINENEWLINE readargs = []NEWLINE for attr in attributes:NEWLINE path = chip.interaction_model.AttributePathIBstruct.parse(NEWLINE b'\xff' * chip.interaction_model.AttributePathIBstruct.sizeof())NEWLINE if attr.EndpointId is not None:NEWLINE path.EndpointId = attr.EndpointIdNEWLINE if attr.ClusterId is not None:NEWLINE path.ClusterId = attr.ClusterIdNEWLINE if attr.AttributeId is not None:NEWLINE path.AttributeId = attr.AttributeIdNEWLINE path = chip.interaction_model.AttributePathIBstruct.build(path)NEWLINE readargs.append(ctypes.c_char_p(path))NEWLINENEWLINE ctypes.pythonapi.Py_IncRef(ctypes.py_object(transaction))NEWLINE minInterval = 0NEWLINE maxInterval = 0NEWLINE if subscriptionParameters is not None:NEWLINE minInterval = subscriptionParameters.MinReportIntervalFloorSecondsNEWLINE maxInterval = subscriptionParameters.MaxReportIntervalCeilingSecondsNEWLINE res = handle.pychip_ReadClient_ReadAttributes(NEWLINE ctypes.py_object(transaction), device,NEWLINE ctypes.c_bool(subscriptionParameters is not None),NEWLINE ctypes.c_uint32(minInterval), ctypes.c_uint32(maxInterval),NEWLINE ctypes.c_size_t(len(attributes)), *readargs)NEWLINE if res != 0:NEWLINE ctypes.pythonapi.Py_DecRef(ctypes.py_object(transaction))NEWLINE return resNEWLINENEWLINENEWLINEdef ReadEvents(future: Future, eventLoop, device, devCtrl, events: List[EventPath], subscriptionParameters: SubscriptionParameters = None) -> int:NEWLINE handle = chip.native.GetLibraryHandle()NEWLINE transaction = AsyncReadTransaction(NEWLINE future, eventLoop, devCtrl, TransactionType.READ_EVENTS, False)NEWLINENEWLINE readargs = []NEWLINE for attr in events:NEWLINE path = chip.interaction_model.EventPathIBstruct.parse(NEWLINE b'\xff' * chip.interaction_model.EventPathIBstruct.sizeof())NEWLINE if attr.EndpointId is not None:NEWLINE path.EndpointId = attr.EndpointIdNEWLINE if attr.ClusterId is not None:NEWLINE path.ClusterId = attr.ClusterIdNEWLINE if attr.EventId is not None:NEWLINE path.EventId = attr.EventIdNEWLINE path = chip.interaction_model.EventPathIBstruct.build(path)NEWLINE readargs.append(ctypes.c_char_p(path))NEWLINENEWLINE ctypes.pythonapi.Py_IncRef(ctypes.py_object(transaction))NEWLINE minInterval = 0NEWLINE maxInterval = 0NEWLINE if subscriptionParameters is not None:NEWLINE minInterval = subscriptionParameters.MinReportIntervalFloorSecondsNEWLINE maxInterval = subscriptionParameters.MaxReportIntervalCeilingSecondsNEWLINE res = handle.pychip_ReadClient_ReadEvents(NEWLINE ctypes.py_object(transaction), device,NEWLINE ctypes.c_bool(subscriptionParameters is not None),NEWLINE ctypes.c_uint32(minInterval), ctypes.c_uint32(maxInterval),NEWLINE ctypes.c_size_t(len(events)), *readargs)NEWLINE if res != 0:NEWLINE ctypes.pythonapi.Py_DecRef(ctypes.py_object(transaction))NEWLINE return resNEWLINENEWLINENEWLINEdef Init():NEWLINE handle = chip.native.GetLibraryHandle()NEWLINENEWLINE # Uses one of the type decorators as an indicator for everything beingNEWLINE # initialized.NEWLINE if not handle.pychip_WriteClient_InitCallbacks.argtypes:NEWLINE setter = chip.native.NativeLibraryHandleMethodArguments(handle)NEWLINENEWLINE handle.pychip_WriteClient_WriteAttributes.restype = c_uint32NEWLINE setter.Set('pychip_WriteClient_InitCallbacks', None, [NEWLINE _OnWriteResponseCallbackFunct, _OnWriteErrorCallbackFunct, _OnWriteDoneCallbackFunct])NEWLINE handle.pychip_ReadClient_ReadAttributes.restype = c_uint32NEWLINE setter.Set('pychip_ReadClient_InitCallbacks', None, [NEWLINE _OnReadAttributeDataCallbackFunct, _OnReadEventDataCallbackFunct, _OnSubscriptionEstablishedCallbackFunct, _OnReadErrorCallbackFunct, _OnReadDoneCallbackFunct,NEWLINE _OnReportBeginCallbackFunct, _OnReportEndCallbackFunct])NEWLINENEWLINE handle.pychip_WriteClient_InitCallbacks(NEWLINE _OnWriteResponseCallback, _OnWriteErrorCallback, _OnWriteDoneCallback)NEWLINE handle.pychip_ReadClient_InitCallbacks(NEWLINE _OnReadAttributeDataCallback, _OnReadEventDataCallback, _OnSubscriptionEstablishedCallback, _OnReadErrorCallback, _OnReadDoneCallback,NEWLINE _OnReportBeginCallback, _OnReportEndCallback)NEWLINENEWLINE _BuildAttributeIndex()NEWLINE _BuildClusterIndex()NEWLINE _BuildEventIndex()NEWLINE
import osNEWLINEimport pickleNEWLINEimport uuidNEWLINENEWLINEimport dagstermillNEWLINEfrom dagstermill.io_managers import local_output_notebook_io_managerNEWLINENEWLINEfrom dagster import (NEWLINE Field,NEWLINE FileHandle,NEWLINE InputDefinition,NEWLINE Int,NEWLINE List,NEWLINE ModeDefinition,NEWLINE OutputDefinition,NEWLINE ResourceDefinition,NEWLINE String,NEWLINE composite_solid,NEWLINE fs_io_manager,NEWLINE job,NEWLINE pipeline,NEWLINE repository,NEWLINE resource,NEWLINE solid,NEWLINE)NEWLINEfrom dagster.core.storage.file_manager import local_file_managerNEWLINEfrom dagster.utils import PICKLE_PROTOCOL, file_relative_pathNEWLINENEWLINEtry:NEWLINE from dagster_pandas import DataFrameNEWLINENEWLINE DAGSTER_PANDAS_PRESENT = TrueNEWLINEexcept ImportError:NEWLINE DAGSTER_PANDAS_PRESENT = FalseNEWLINENEWLINEtry:NEWLINE import sklearn as _NEWLINENEWLINE SKLEARN_PRESENT = TrueNEWLINEexcept ImportError:NEWLINE SKLEARN_PRESENT = FalseNEWLINENEWLINEtry:NEWLINE import matplotlib as _NEWLINENEWLINE MATPLOTLIB_PRESENT = TrueNEWLINEexcept ImportError:NEWLINE MATPLOTLIB_PRESENT = FalseNEWLINENEWLINENEWLINEclass BasicTest:NEWLINE def __init__(self, x):NEWLINE self.x = xNEWLINENEWLINE def __repr__(self):NEWLINE return "BasicTest: {x}".format(x=str(self.x))NEWLINENEWLINENEWLINEdef nb_test_path(name):NEWLINE return file_relative_path(__file__, f"notebooks/{name}.ipynb")NEWLINENEWLINENEWLINEdef test_nb_solid(name, **kwargs):NEWLINE output_defs = kwargs.pop("output_defs", [OutputDefinition(is_required=False)])NEWLINENEWLINE return dagstermill.define_dagstermill_solid(NEWLINE name=name,NEWLINE notebook_path=nb_test_path(name),NEWLINE output_notebook_name="notebook",NEWLINE output_defs=output_defs,NEWLINE **kwargs,NEWLINE )NEWLINENEWLINENEWLINEdef test_nb_op(name, path, **kwargs):NEWLINE output_defs = kwargs.pop("output_defs", [OutputDefinition(is_required=False)])NEWLINENEWLINE return dagstermill.define_dagstermill_op(NEWLINE name=name,NEWLINE notebook_path=path,NEWLINE output_notebook_name="notebook",NEWLINE output_defs=output_defs,NEWLINE **kwargs,NEWLINE )NEWLINENEWLINENEWLINEdefault_mode_defs = [NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE "io_manager": fs_io_manager,NEWLINE }NEWLINE )NEWLINE]NEWLINENEWLINENEWLINEhello_world = test_nb_solid("hello_world", output_defs=[])NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_pipeline():NEWLINE hello_world()NEWLINENEWLINENEWLINEhello_world_op = test_nb_op(NEWLINE "hello_world_op",NEWLINE nb_test_path("hello_world"),NEWLINE output_defs=[],NEWLINE)NEWLINENEWLINENEWLINEdef build_hello_world_job():NEWLINE @job(NEWLINE resource_defs={NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE def hello_world_job():NEWLINE hello_world_op()NEWLINENEWLINE return hello_world_jobNEWLINENEWLINENEWLINEhello_world_with_custom_tags_and_description = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_custom",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE output_notebook_name="notebook",NEWLINE tags={"foo": "bar"},NEWLINE description="custom description",NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_with_custom_tags_and_description_pipeline():NEWLINE hello_world_with_custom_tags_and_description()NEWLINENEWLINENEWLINEhello_world_config = test_nb_solid(NEWLINE "hello_world_config",NEWLINE config_schema={"greeting": Field(String, is_required=False, default_value="hello")},NEWLINE)NEWLINENEWLINENEWLINEgoodbye_config = dagstermill.define_dagstermill_solid(NEWLINE name="goodbye_config",NEWLINE notebook_path=nb_test_path("print_dagstermill_context_solid_config"),NEWLINE output_notebook_name="notebook",NEWLINE config_schema={"farewell": Field(String, is_required=False, default_value="goodbye")},NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_config_pipeline():NEWLINE hello_world_config()NEWLINE goodbye_config()NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef alias_config_pipeline():NEWLINE hello_world_config.alias("aliased_greeting")()NEWLINE goodbye_config.alias("aliased_goodbye")()NEWLINENEWLINENEWLINE@solid(input_defs=[InputDefinition("notebook")])NEWLINEdef load_notebook(notebook):NEWLINE return notebookNEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_with_output_notebook_pipeline():NEWLINE notebook = hello_world()NEWLINE load_notebook(notebook)NEWLINENEWLINENEWLINEhello_world_no_output_notebook_no_file_manager = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_no_output_notebook_no_file_manager",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE)NEWLINENEWLINENEWLINE@pipelineNEWLINEdef hello_world_no_output_notebook_no_file_manager_pipeline():NEWLINE hello_world_no_output_notebook_no_file_manager()NEWLINENEWLINENEWLINEhello_world_no_output_notebook = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_no_output_notebook",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_no_output_notebook_pipeline():NEWLINE hello_world_no_output_notebook()NEWLINENEWLINENEWLINEhello_world_output = test_nb_solid("hello_world_output", output_defs=[OutputDefinition(str)])NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_output_pipeline():NEWLINE hello_world_output()NEWLINENEWLINENEWLINEhello_world_explicit_yield = test_nb_solid(NEWLINE "hello_world_explicit_yield", output_defs=[OutputDefinition(str)]NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_explicit_yield_pipeline():NEWLINE hello_world_explicit_yield()NEWLINENEWLINENEWLINEhello_logging = test_nb_solid("hello_logging")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_logging_pipeline():NEWLINE hello_logging()NEWLINENEWLINENEWLINEadd_two_numbers = test_nb_solid(NEWLINE "add_two_numbers",NEWLINE input_defs=[NEWLINE InputDefinition(name="a", dagster_type=Int),NEWLINE InputDefinition(name="b", dagster_type=Int),NEWLINE ],NEWLINE output_defs=[OutputDefinition(Int)],NEWLINE)NEWLINENEWLINENEWLINEmult_two_numbers = test_nb_solid(NEWLINE "mult_two_numbers",NEWLINE input_defs=[NEWLINE InputDefinition(name="a", dagster_type=Int),NEWLINE InputDefinition(name="b", dagster_type=Int),NEWLINE ],NEWLINE output_defs=[OutputDefinition(Int)],NEWLINE)NEWLINENEWLINENEWLINE@solidNEWLINEdef return_one():NEWLINE return 1NEWLINENEWLINENEWLINE@solidNEWLINEdef return_two():NEWLINE return 2NEWLINENEWLINENEWLINE@solidNEWLINEdef return_three():NEWLINE return 3NEWLINENEWLINENEWLINE@solidNEWLINEdef return_four():NEWLINE return 4NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef add_pipeline():NEWLINE add_two_numbers(return_one(), return_two())NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef double_add_pipeline():NEWLINE add_two_numbers.alias("add_two_numbers_1")(return_one(), return_two())NEWLINE add_two_numbers.alias("add_two_numbers_2")(return_three(), return_four())NEWLINENEWLINENEWLINE@solid(input_defs=[], config_schema=Int)NEWLINEdef load_constant(context):NEWLINE return context.solid_configNEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef notebook_dag_pipeline():NEWLINE a = load_constant.alias("load_a")()NEWLINE b = load_constant.alias("load_b")()NEWLINE num, _ = add_two_numbers(a, b)NEWLINE mult_two_numbers(num, b)NEWLINENEWLINENEWLINEerror_notebook = test_nb_solid("error_notebook")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef error_pipeline():NEWLINE error_notebook()NEWLINENEWLINENEWLINEif DAGSTER_PANDAS_PRESENT and SKLEARN_PRESENT and MATPLOTLIB_PRESENT:NEWLINENEWLINE clean_data = test_nb_solid("clean_data", output_defs=[OutputDefinition(DataFrame)])NEWLINENEWLINE # FIXME add an output to thisNEWLINE tutorial_LR = test_nb_solid(NEWLINE "tutorial_LR",NEWLINE input_defs=[InputDefinition(name="df", dagster_type=DataFrame)],NEWLINE )NEWLINENEWLINE tutorial_RF = test_nb_solid(NEWLINE "tutorial_RF",NEWLINE input_defs=[InputDefinition(name="df", dagster_type=DataFrame)],NEWLINE )NEWLINENEWLINE @pipeline(mode_defs=default_mode_defs)NEWLINE def tutorial_pipeline():NEWLINE dfr, _ = clean_data()NEWLINE # FIXME get better names for theseNEWLINE tutorial_LR(dfr)NEWLINE tutorial_RF(dfr)NEWLINENEWLINENEWLINE@solid("resource_solid", required_resource_keys={"list"})NEWLINEdef resource_solid(context):NEWLINE context.resources.list.append("Hello, solid!")NEWLINE return TrueNEWLINENEWLINENEWLINEhello_world_resource = test_nb_solid(NEWLINE "hello_world_resource",NEWLINE input_defs=[InputDefinition("nonce")],NEWLINE required_resource_keys={"list"},NEWLINE)NEWLINENEWLINEhello_world_resource_with_exception = test_nb_solid(NEWLINE "hello_world_resource_with_exception",NEWLINE input_defs=[InputDefinition("nonce")],NEWLINE required_resource_keys={"list"},NEWLINE)NEWLINENEWLINENEWLINEclass FilePickleList:NEWLINE # This is not thread- or anything else-safeNEWLINE def __init__(self, path):NEWLINE self.closed = FalseNEWLINE self.id = str(uuid.uuid4())[-6:]NEWLINE self.path = pathNEWLINE self.list = []NEWLINE if not os.path.exists(self.path):NEWLINE self.write()NEWLINE self.read()NEWLINE self.open()NEWLINENEWLINE def open(self):NEWLINE self.read()NEWLINE self.append("Opened")NEWLINENEWLINE def append(self, obj):NEWLINE self.read()NEWLINE self.list.append(self.id + ": " + obj)NEWLINE self.write()NEWLINENEWLINE def read(self):NEWLINE with open(self.path, "rb") as fd:NEWLINE self.list = pickle.load(fd)NEWLINE return self.listNEWLINENEWLINE def write(self):NEWLINE with open(self.path, "wb") as fd:NEWLINE pickle.dump(self.list, fd, protocol=PICKLE_PROTOCOL)NEWLINENEWLINE def close(self):NEWLINE self.append("Closed")NEWLINE self.closed = TrueNEWLINENEWLINENEWLINE@resource(config_schema=Field(String))NEWLINEdef filepicklelist_resource(init_context):NEWLINE filepicklelist = FilePickleList(init_context.resource_config)NEWLINE try:NEWLINE yield filepicklelistNEWLINE finally:NEWLINE filepicklelist.close()NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE name="test",NEWLINE resource_defs={NEWLINE "list": ResourceDefinition(lambda _: []),NEWLINE "io_manager": fs_io_manager,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE },NEWLINE ),NEWLINE ModeDefinition(NEWLINE name="prod",NEWLINE resource_defs={NEWLINE "list": filepicklelist_resource,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE "io_manager": fs_io_manager,NEWLINE },NEWLINE ),NEWLINE ]NEWLINE)NEWLINEdef resource_pipeline():NEWLINE hello_world_resource(resource_solid())NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "list": filepicklelist_resource,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE "io_manager": fs_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef resource_with_exception_pipeline():NEWLINE hello_world_resource_with_exception(resource_solid())NEWLINENEWLINENEWLINEbad_kernel = test_nb_solid("bad_kernel")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef bad_kernel_pipeline():NEWLINE bad_kernel()NEWLINENEWLINENEWLINEreimport = test_nb_solid(NEWLINE "reimport", input_defs=[InputDefinition("l", List[int])], output_defs=[OutputDefinition(int)]NEWLINE)NEWLINENEWLINENEWLINE@solidNEWLINEdef lister():NEWLINE return [1, 2, 3]NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef reimport_pipeline():NEWLINE reimport(lister())NEWLINENEWLINENEWLINEyield_3 = test_nb_solid("yield_3", output_defs=[OutputDefinition(Int)])NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef yield_3_pipeline():NEWLINE yield_3()NEWLINENEWLINENEWLINEyield_obj = test_nb_solid("yield_obj")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef yield_obj_pipeline():NEWLINE yield_obj()NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef retries_pipeline():NEWLINE test_nb_solid("raise_retry")()NEWLINE test_nb_solid("yield_retry")()NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef failure_pipeline():NEWLINE test_nb_solid("raise_failure")()NEWLINE test_nb_solid("yield_failure")()NEWLINENEWLINENEWLINEyield_something = test_nb_solid(NEWLINE "yield_something",NEWLINE input_defs=[InputDefinition("obj", str)],NEWLINE output_defs=[OutputDefinition(str, "result")],NEWLINE)NEWLINENEWLINENEWLINE@solidNEWLINEdef fan_in(a, b):NEWLINE return f"{a} {b}"NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "io_manager": fs_io_manager,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef fan_in_notebook_pipeline():NEWLINE val_a, _ = yield_something.alias("solid_1")()NEWLINE val_b, _ = yield_something.alias("solid_2")()NEWLINE fan_in(val_a, val_b)NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef fan_in_notebook_pipeline_in_mem():NEWLINE val_a, _ = yield_something.alias("solid_1")()NEWLINE val_b, _ = yield_something.alias("solid_2")()NEWLINE fan_in(val_a, val_b)NEWLINENEWLINENEWLINE@composite_solidNEWLINEdef outer():NEWLINE yield_something()NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "io_manager": fs_io_manager,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef composite_pipeline():NEWLINE outer()NEWLINENEWLINENEWLINE###################################################################################################NEWLINE# Back compatNEWLINE###################################################################################################NEWLINENEWLINEhello_world_legacy = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_legacy",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE output_notebook="notebook",NEWLINE)NEWLINENEWLINENEWLINE@solid(input_defs=[InputDefinition("notebook", dagster_type=FileHandle)])NEWLINEdef load_notebook_legacy(notebook):NEWLINE return os.path.exists(notebook.path_desc)NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "io_manager": fs_io_manager,NEWLINE "file_manager": local_file_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef hello_world_with_output_notebook_pipeline_legacy():NEWLINE notebook = hello_world_legacy()NEWLINE load_notebook_legacy(notebook)NEWLINENEWLINENEWLINE@repositoryNEWLINEdef notebook_repo():NEWLINE pipelines = [NEWLINE bad_kernel_pipeline,NEWLINE error_pipeline,NEWLINE hello_world_pipeline,NEWLINE hello_world_with_custom_tags_and_description_pipeline,NEWLINE hello_world_config_pipeline,NEWLINE hello_world_explicit_yield_pipeline,NEWLINE hello_world_output_pipeline,NEWLINE hello_world_with_output_notebook_pipeline,NEWLINE hello_logging_pipeline,NEWLINE resource_pipeline,NEWLINE resource_with_exception_pipeline,NEWLINE add_pipeline,NEWLINE notebook_dag_pipeline,NEWLINE reimport_pipeline,NEWLINE yield_3_pipeline,NEWLINE yield_obj_pipeline,NEWLINE retries_pipeline,NEWLINE failure_pipeline,NEWLINE fan_in_notebook_pipeline_in_mem,NEWLINE fan_in_notebook_pipeline,NEWLINE hello_world_no_output_notebook_no_file_manager_pipeline,NEWLINE hello_world_with_output_notebook_pipeline_legacy,NEWLINE ]NEWLINE if DAGSTER_PANDAS_PRESENT and SKLEARN_PRESENT and MATPLOTLIB_PRESENT:NEWLINE pipelines += [tutorial_pipeline]NEWLINENEWLINE return pipelinesNEWLINE
# ==============================================================================NEWLINE# zero.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport osNEWLINEimport sysNEWLINENEWLINEERROR = FalseNEWLINENEWLINEdef main(function):NEWLINE try:NEWLINE arguments = sys.argv[1:]NEWLINE assert argumentsNEWLINE for path in arguments:NEWLINE assert os.path.isdir(path)NEWLINE for path in arguments:NEWLINE engine(path, function)NEWLINE except:NEWLINE sys.stdout.write('Usage: %s <directory>' % os.path.basename(sys.argv[0]))NEWLINENEWLINEdef engine(path, function):NEWLINE global ERRORNEWLINE for root, dirs, files in os.walk(path):NEWLINE for name in files:NEWLINE path = os.path.join(root, name)NEWLINE try:NEWLINE function(path)NEWLINE except:NEWLINE sys.stderr.write('%sError: %s' % (ERROR and '\n' or '', path))NEWLINE ERROR = TrueNEWLINENEWLINEdef zero(path):NEWLINE size = os.path.getsize(path)NEWLINE if size:NEWLINE data = open(path, 'wb')NEWLINE todo = sizeNEWLINE if todo >= 2 ** 20:NEWLINE buff = '\x00' * 2 ** 20NEWLINE while todo >= 2 ** 20:NEWLINE data.write(buff)NEWLINE todo = size - data.tell()NEWLINE data.write('\x00' * todo)NEWLINE data.close()NEWLINENEWLINEif __name__ == '__main__':NEWLINE main(zero)NEWLINENEWLINE# ==============================================================================NEWLINE# upper.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef upper(path):NEWLINE root, ext = zero.os.path.splitext(path)NEWLINE upper = ext.upper()NEWLINE if ext != upper:NEWLINE zero.os.rename(path, root + upper)NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(upper)NEWLINENEWLINE# ==============================================================================NEWLINE# untar.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINEimport tarfileNEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(lambda path: tarfile.open(path).extractall(NEWLINE zero.os.path.dirname(path)))NEWLINENEWLINE# ==============================================================================NEWLINE# remove.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(zero.os.remove)NEWLINENEWLINE# ==============================================================================NEWLINE# one.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef one(path):NEWLINE size = zero.os.path.getsize(path)NEWLINE if size:NEWLINE data = open(path, 'wb')NEWLINE todo = sizeNEWLINE if todo >= 2 ** 20:NEWLINE buff = '\xFF' * 2 ** 20NEWLINE while todo >= 2 ** 20:NEWLINE data.write(buff)NEWLINE todo = size - data.tell()NEWLINE data.write('\xFF' * todo)NEWLINE data.close()NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(one)NEWLINENEWLINE# ==============================================================================NEWLINE# lower.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef lower(path):NEWLINE root, ext = zero.os.path.splitext(path)NEWLINE lower = ext.lower()NEWLINE if ext != lower:NEWLINE zero.os.rename(path, root + lower)NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(lower)NEWLINENEWLINE# ==============================================================================NEWLINE# random.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef kaos(path):NEWLINE size = zero.os.path.getsize(path)NEWLINE if size:NEWLINE data = open(path, 'wb')NEWLINE todo = sizeNEWLINE while todo:NEWLINE data.write(zero.os.urandom(min(todo, 2 ** 20)))NEWLINE todo = size - data.tell()NEWLINE data.close()NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(kaos)NEWLINENEWLINE# ==============================================================================NEWLINE# name.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINEimport randomNEWLINENEWLINESTRING = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'NEWLINENEWLINEdef ident(path):NEWLINE d, b = zero.os.path.split(path)NEWLINE zero.os.rename(path, zero.os.path.join(d, ''.join(random.sample(NEWLINE STRING, len(STRING))) + zero.os.path.splitext(b)[1]))NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(ident)NEWLINENEWLINE# ==============================================================================NEWLINE# newlines.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINETABLE = ''.join(map(chr, range(256)))NEWLINEDELETECHARS = ''.join(c for c in TABLE if len(repr(c)) != 6)NEWLINENEWLINEdef convert(path):NEWLINE if not file(path, 'rb').read(2 ** 20).translate(TABLE, DELETECHARS):NEWLINE data = file(path, 'r').read()NEWLINE file(path, 'w').write(data)NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(convert)NEWLINENEWLINE# ==============================================================================NEWLINE# extension.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef bias(path):NEWLINE root, ext = zero.os.path.splitext(path)NEWLINE if not ext[1:]:NEWLINE zero.os.rename(path, root + '.txt')NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(bias)NEWLINE
# coding=utf-8NEWLINE# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***NEWLINE# *** Do not edit by hand unless you're certain you know what you are doing! ***NEWLINENEWLINEimport jsonNEWLINEimport warningsNEWLINEimport pulumiNEWLINEimport pulumi.runtimeNEWLINEfrom typing import UnionNEWLINEfrom .. import utilities, tablesNEWLINENEWLINEclass Registry(pulumi.CustomResource):NEWLINE admin_enabled: pulumi.Output[bool]NEWLINE """NEWLINE Specifies whether the admin user is enabled. Defaults to `false`.NEWLINE """NEWLINE admin_password: pulumi.Output[str]NEWLINE """NEWLINE The Password associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE """NEWLINE admin_username: pulumi.Output[str]NEWLINE """NEWLINE The Username associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE """NEWLINE georeplication_locations: pulumi.Output[list]NEWLINE """NEWLINE A list of Azure locations where the container registry should be geo-replicated.NEWLINE """NEWLINE location: pulumi.Output[str]NEWLINE """NEWLINE Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.NEWLINE """NEWLINE login_server: pulumi.Output[str]NEWLINE """NEWLINE The URL that can be used to log into the container registry.NEWLINE """NEWLINE name: pulumi.Output[str]NEWLINE """NEWLINE Specifies the name of the Container Registry. Changing this forces a new resource to be created.NEWLINE """NEWLINE network_rule_set: pulumi.Output[dict]NEWLINE """NEWLINE A `network_rule_set` block as documented below.NEWLINENEWLINE * `default_action` (`str`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`NEWLINE * `ip_rules` (`list`) - One or more `ip_rule` blocks as defined below.NEWLINE * `action` (`str`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `ipRange` (`str`) - The CIDR block from which requests will match the rule.NEWLINENEWLINE * `virtualNetworks` (`list`) - One or more `virtual_network` blocks as defined below.NEWLINE * `action` (`str`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `subnet_id` (`str`) - The subnet id from which requests will match the rule.NEWLINE """NEWLINE resource_group_name: pulumi.Output[str]NEWLINE """NEWLINE The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.NEWLINE """NEWLINE sku: pulumi.Output[str]NEWLINE """NEWLINE The SKU name of the container registry. Possible values are `Basic`, `Standard` and `Premium`. `Classic` (which was previously `Basic`) is supported only for existing resources.NEWLINE """NEWLINE storage_account_id: pulumi.Output[str]NEWLINE """NEWLINE The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.NEWLINE """NEWLINE tags: pulumi.Output[dict]NEWLINE """NEWLINE A mapping of tags to assign to the resource.NEWLINE """NEWLINE def __init__(__self__, resource_name, opts=None, admin_enabled=None, georeplication_locations=None, location=None, name=None, network_rule_set=None, resource_group_name=None, sku=None, storage_account_id=None, tags=None, __props__=None, __name__=None, __opts__=None):NEWLINE """NEWLINE Manages an Azure Container Registry.NEWLINENEWLINE ## Example UsageNEWLINENEWLINENEWLINENEWLINE ```pythonNEWLINE import pulumiNEWLINE import pulumi_azure as azureNEWLINENEWLINE rg = azure.core.ResourceGroup("rg", location="West US")NEWLINE acr = azure.containerservice.Registry("acr",NEWLINE resource_group_name=rg.name,NEWLINE location=rg.location,NEWLINE sku="Premium",NEWLINE admin_enabled=False,NEWLINE georeplication_locations=[NEWLINE "East US",NEWLINE "West Europe",NEWLINE ])NEWLINE ```NEWLINENEWLINENEWLINE :param str resource_name: The name of the resource.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[bool] admin_enabled: Specifies whether the admin user is enabled. Defaults to `false`.NEWLINE :param pulumi.Input[list] georeplication_locations: A list of Azure locations where the container registry should be geo-replicated.NEWLINE :param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] name: Specifies the name of the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[dict] network_rule_set: A `network_rule_set` block as documented below.NEWLINE :param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] sku: The SKU name of the container registry. Possible values are `Basic`, `Standard` and `Premium`. `Classic` (which was previously `Basic`) is supported only for existing resources.NEWLINE :param pulumi.Input[str] storage_account_id: The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.NEWLINE :param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.NEWLINENEWLINE The **network_rule_set** object supports the following:NEWLINENEWLINE * `default_action` (`pulumi.Input[str]`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`NEWLINE * `ip_rules` (`pulumi.Input[list]`) - One or more `ip_rule` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `ipRange` (`pulumi.Input[str]`) - The CIDR block from which requests will match the rule.NEWLINENEWLINE * `virtualNetworks` (`pulumi.Input[list]`) - One or more `virtual_network` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `subnet_id` (`pulumi.Input[str]`) - The subnet id from which requests will match the rule.NEWLINE """NEWLINE if __name__ is not None:NEWLINE warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)NEWLINE resource_name = __name__NEWLINE if __opts__ is not None:NEWLINE warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)NEWLINE opts = __opts__NEWLINE if opts is None:NEWLINE opts = pulumi.ResourceOptions()NEWLINE if not isinstance(opts, pulumi.ResourceOptions):NEWLINE raise TypeError('Expected resource options to be a ResourceOptions instance')NEWLINE if opts.version is None:NEWLINE opts.version = utilities.get_version()NEWLINE if opts.id is None:NEWLINE if __props__ is not None:NEWLINE raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')NEWLINE __props__ = dict()NEWLINENEWLINE __props__['admin_enabled'] = admin_enabledNEWLINE __props__['georeplication_locations'] = georeplication_locationsNEWLINE __props__['location'] = locationNEWLINE __props__['name'] = nameNEWLINE __props__['network_rule_set'] = network_rule_setNEWLINE if resource_group_name is None:NEWLINE raise TypeError("Missing required property 'resource_group_name'")NEWLINE __props__['resource_group_name'] = resource_group_nameNEWLINE __props__['sku'] = skuNEWLINE __props__['storage_account_id'] = storage_account_idNEWLINE __props__['tags'] = tagsNEWLINE __props__['admin_password'] = NoneNEWLINE __props__['admin_username'] = NoneNEWLINE __props__['login_server'] = NoneNEWLINE super(Registry, __self__).__init__(NEWLINE 'azure:containerservice/registry:Registry',NEWLINE resource_name,NEWLINE __props__,NEWLINE opts)NEWLINENEWLINE @staticmethodNEWLINE def get(resource_name, id, opts=None, admin_enabled=None, admin_password=None, admin_username=None, georeplication_locations=None, location=None, login_server=None, name=None, network_rule_set=None, resource_group_name=None, sku=None, storage_account_id=None, tags=None):NEWLINE """NEWLINE Get an existing Registry resource's state with the given name, id, and optional extraNEWLINE properties used to qualify the lookup.NEWLINENEWLINE :param str resource_name: The unique name of the resulting resource.NEWLINE :param str id: The unique provider ID of the resource to lookup.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[bool] admin_enabled: Specifies whether the admin user is enabled. Defaults to `false`.NEWLINE :param pulumi.Input[str] admin_password: The Password associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE :param pulumi.Input[str] admin_username: The Username associated with the Container Registry Admin account - if the admin account is enabled.NEWLINE :param pulumi.Input[list] georeplication_locations: A list of Azure locations where the container registry should be geo-replicated.NEWLINE :param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] login_server: The URL that can be used to log into the container registry.NEWLINE :param pulumi.Input[str] name: Specifies the name of the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[dict] network_rule_set: A `network_rule_set` block as documented below.NEWLINE :param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the Container Registry. Changing this forces a new resource to be created.NEWLINE :param pulumi.Input[str] sku: The SKU name of the container registry. Possible values are `Basic`, `Standard` and `Premium`. `Classic` (which was previously `Basic`) is supported only for existing resources.NEWLINE :param pulumi.Input[str] storage_account_id: The ID of a Storage Account which must be located in the same Azure Region as the Container Registry.NEWLINE :param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.NEWLINENEWLINE The **network_rule_set** object supports the following:NEWLINENEWLINE * `default_action` (`pulumi.Input[str]`) - The behaviour for requests matching no rules. Either `Allow` or `Deny`. Defaults to `Allow`NEWLINE * `ip_rules` (`pulumi.Input[list]`) - One or more `ip_rule` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `ipRange` (`pulumi.Input[str]`) - The CIDR block from which requests will match the rule.NEWLINENEWLINE * `virtualNetworks` (`pulumi.Input[list]`) - One or more `virtual_network` blocks as defined below.NEWLINE * `action` (`pulumi.Input[str]`) - The behaviour for requests matching this rule. At this time the only supported value is `Allow`NEWLINE * `subnet_id` (`pulumi.Input[str]`) - The subnet id from which requests will match the rule.NEWLINE """NEWLINE opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))NEWLINENEWLINE __props__ = dict()NEWLINENEWLINE __props__["admin_enabled"] = admin_enabledNEWLINE __props__["admin_password"] = admin_passwordNEWLINE __props__["admin_username"] = admin_usernameNEWLINE __props__["georeplication_locations"] = georeplication_locationsNEWLINE __props__["location"] = locationNEWLINE __props__["login_server"] = login_serverNEWLINE __props__["name"] = nameNEWLINE __props__["network_rule_set"] = network_rule_setNEWLINE __props__["resource_group_name"] = resource_group_nameNEWLINE __props__["sku"] = skuNEWLINE __props__["storage_account_id"] = storage_account_idNEWLINE __props__["tags"] = tagsNEWLINE return Registry(resource_name, opts=opts, __props__=__props__)NEWLINE def translate_output_property(self, prop):NEWLINE return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or propNEWLINENEWLINE def translate_input_property(self, prop):NEWLINE return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or propNEWLINENEWLINE
# Generated by Django 3.1.4 on 2021-02-06 21:18NEWLINENEWLINEfrom django.db import migrationsNEWLINENEWLINENEWLINEdef register_micropub_scopes(apps, schema):NEWLINENEWLINE MMicropubScope = apps.get_model("indieweb", "MMicropubScope")NEWLINENEWLINE MMicropubScope.objects.get_or_create(key="create", name="Create")NEWLINE MMicropubScope.objects.get_or_create(key="update", name="Update")NEWLINE MMicropubScope.objects.get_or_create(key="delete", name="Delete")NEWLINE MMicropubScope.objects.get_or_create(key="draft", name="Draft")NEWLINE MMicropubScope.objects.get_or_create(key="media", name="Media")NEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ("indieweb", "0003_ttoken"),NEWLINE ]NEWLINENEWLINE operations = [migrations.RunPython(register_micropub_scopes, reverse_code=migrations.RunPython.noop)]NEWLINE
"""Set up the Python API for dingz devices."""NEWLINEimport osNEWLINENEWLINEimport sysNEWLINENEWLINEfrom setuptools import setup, find_packagesNEWLINENEWLINEhere = os.path.abspath(os.path.dirname(__file__))NEWLINENEWLINEwith open(os.path.join(here, "README.rst"), encoding="utf-8") as readme:NEWLINE long_description = readme.read()NEWLINENEWLINEif sys.argv[-1] == "publish":NEWLINE os.system("python3 setup.py sdist upload")NEWLINE sys.exit()NEWLINENEWLINEsetup(NEWLINE name="python-dingz",NEWLINE version="0.4.0.dev1",NEWLINE description="Python API for interacting with Dingz devices",NEWLINE long_description=long_description,NEWLINE url="https://github.com/home-assistant-ecosystem/python-dingz",NEWLINE author="Fabian Affolter",NEWLINE author_email="[email protected]",NEWLINE license="Apache License 2.0",NEWLINE install_requires=["aiohttp<4", "async_timeout<4", "click"],NEWLINE packages=find_packages(),NEWLINE zip_safe=True,NEWLINE include_package_data=True,NEWLINE entry_points={"console_scripts": ["dingz = dingz.cli:main"]},NEWLINE classifiers=[NEWLINE "Development Status :: 3 - Alpha",NEWLINE "Environment :: Console",NEWLINE "Intended Audience :: Developers",NEWLINE "License :: OSI Approved :: Apache Software License",NEWLINE "Operating System :: MacOS :: MacOS X",NEWLINE "Operating System :: Microsoft :: Windows",NEWLINE "Operating System :: POSIX",NEWLINE "Programming Language :: Python :: 3.7",NEWLINE "Programming Language :: Python :: 3.8",NEWLINE "Topic :: Utilities",NEWLINE ],NEWLINE)NEWLINE
# -*- coding: utf-8 -*-NEWLINE"""NEWLINECreated on Thu Jan 14 12:58:41 2015NEWLINE@author: Tony SaadNEWLINE"""NEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINEimport numpy as npNEWLINEimport argparseNEWLINEimport osNEWLINEfrom xml.dom import minidomNEWLINEfrom shutil import copyfileNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINE#------------------------------------------------------------------------------NEWLINE"""NEWLINEGiven a 3D array A of size (Nx, Ny, Nz) (representative of a CFD mesh), NEWLINEthis function computes a new array B of size (Nx/2, Ny/2, Nz/2)NEWLINEsuch that the entries in B are the averaged values of corresponding cells in A.NEWLINESpecifically, for a cell centered scalar quantity that lives on A, every cellNEWLINEin B corresponds to the average of the 8 cells in A.NEWLINE@author: Tony SaadNEWLINE"""NEWLINEdef average(phi):NEWLINE # get the dimensions of the input arrayNEWLINE shape = phi.shapeNEWLINE nx0 = shape[0]NEWLINE ny0 = shape[1]NEWLINE nz0 = shape[2]NEWLINE # we will average two points in each dimensionNEWLINE nx = nx0/2NEWLINE ny = ny0/2NEWLINE nz = nz0/2NEWLINE phiAv = np.zeros([nx,ny,nz])NEWLINE for iav in range(0,nx):NEWLINE for jav in range(0,ny):NEWLINE for kav in range(0,nz): NEWLINE i = 2*iavNEWLINE j = 2*javNEWLINE k = 2*kavNEWLINE average = (phi[i,j,k] + phi[i+1,j,k] + phi[i,j+1,k] + phi[i,j,k+1] + phi[i+1,j+1,k] + phi[i+1,j,k+1] + phi[i,j+1,k+1] + phi[i+1,j+1,k+1])/8.0NEWLINE# average = (phi[i,j,k] + phi[i,j+1,k] + phi[i,j,k+1] + phi[i,j+1,k+1] )/4.0NEWLINE phiAv[iav,jav,kav] = averageNEWLINE return phiAvNEWLINENEWLINE#------------------------------------------------------------------------------NEWLINEdef main():NEWLINE parser = argparse.ArgumentParser(description=NEWLINE 'Computes spatial order of accuracy without the need of an anlytical solution. The method '+NEWLINE 'is based on computing numerical solutions at refined timesteps and then computing the '+NEWLINE 'order as p = ln[(f3 - f2)/(f2 - f1)]/ln(0.5).' +NEWLINE ' The cleanest way to operate this script is to make a copy of it in a new directory. Then '+NEWLINE 'copy the ups file to that directory and execute the script.' )NEWLINE NEWLINE parser.add_argument('-ups',NEWLINE help='The input file to run.',required=True) NEWLINE NEWLINE parser.add_argument('-levels',NEWLINE help='The number of spatial refinement levels.', type=int) NEWLINE NEWLINE parser.add_argument('-nsteps',NEWLINE help='The number of timesteps. Defaults to 1.', type=int) NEWLINE NEWLINE parser.add_argument('-suspath',NEWLINE help='The path to sus.',required=True)NEWLINE NEWLINE parser.add_argument('-vars', required=True,NEWLINE help='Comma seperated list of variables for which the temporal order is to be computed. example: -vars "var1, my var".')NEWLINE NEWLINE args = parser.parse_args()NEWLINE NEWLINE # if the number of levels is not provided, set it to 3NEWLINE if args.levels is None:NEWLINE args.levels = 3NEWLINE NEWLINE # if the number of levels is <2, then reset it to 3NEWLINE if (args.levels < 2):NEWLINE print 'The number of levels has to be >= 3. Setting levels to 3'NEWLINE args.levels = 3NEWLINE NEWLINE rootups = args.upsNEWLINE nLevels = args.levelsNEWLINE NEWLINE # cleanup the list of variables for which the order is to be computedNEWLINE myvars = [x.strip() for x in args.vars.split(',')]NEWLINE NEWLINE # first makes copies of the ups filesNEWLINE fnames = []NEWLINE basename = os.path.basename(rootups)NEWLINE basename = os.path.splitext(basename)[0]NEWLINE for i in range(0,nLevels):NEWLINE #fname = os.path.splitext(rootups)[0] + '-t' + str(i) + '.ups' NEWLINE fname = basename + '-t' + str(i) + '.ups'NEWLINE fnames.append(fname)NEWLINE copyfile(rootups, fname) NEWLINE NEWLINE # now loop over the copied files and change the dt and the uda nameNEWLINE refinement = 1NEWLINE maxSteps = 1NEWLINE NEWLINE if args.nsteps is not None:NEWLINE maxSteps = args.nstepsNEWLINE NEWLINE args.suspath = os.path.normpath(args.suspath)NEWLINE args.suspath = os.path.abspath(args.suspath)NEWLINE print args.suspathNEWLINE os.system('ln -fs ' + args.suspath + '/sus sus')NEWLINE os.system('ln -fs ' + args.suspath + '/tools/extractors/lineextract lineextract')NEWLINE NEWLINE # find total number of procs and resolutionNEWLINE xmldoc = minidom.parse(rootups)NEWLINE for node in xmldoc.getElementsByTagName('patches'):NEWLINE P = (str(node.firstChild.data).strip()).split(',')NEWLINE P0=int(P[0].split('[')[1])NEWLINE P1=int(P[1])NEWLINE P2=int(P[2].split(']')[0])NEWLINE total_proc = P0*P1*P2NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('resolution'):NEWLINE P = (str(node.firstChild.data).strip()).split(',')NEWLINE Nx=int(P[0].split('[')[1])NEWLINE Ny=int(P[1])NEWLINE Nz=int(P[2].split(']')[0])NEWLINE NEWLINE for fname in fnames:NEWLINE print 'now updating xml for ', fnameNEWLINE basename = os.path.splitext(fname)[0]NEWLINE xmldoc = minidom.parse(fname)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('filebase'):NEWLINE node.firstChild.replaceWholeText(basename + '.uda')NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('resolution'):NEWLINE node.firstChild.replaceWholeText('[' + str(Nx*refinement) + ',' + str(Ny*refinement) + ',' + str(Nz*refinement) + ']')NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('max_Timesteps'):NEWLINE node.firstChild.replaceWholeText(maxSteps*refinement)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('delt_min'):NEWLINE dtmin = float(node.firstChild.data)NEWLINE dtmin = dtmin/refinementNEWLINE node.firstChild.replaceWholeText(dtmin)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('delt_max'):NEWLINE node.firstChild.replaceWholeText(dtmin)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('outputTimestepInterval'):NEWLINE node.firstChild.replaceWholeText('1')NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('maxTime'):NEWLINE node.firstChild.replaceWholeText('100')NEWLINE NEWLINE refinement *= 2NEWLINE f = open(fname, 'w') NEWLINE xmldoc.writexml(f) NEWLINE f.close()NEWLINE NEWLINE # now run the filesNEWLINE counter = 0NEWLINE refinement = 1NEWLINE for fname in fnames:NEWLINE os.system('mpirun -np '+ str(total_proc) + ' ' + './sus' + ' ' + fname + ' > log.txt')NEWLINE udaName = os.path.splitext(fname)[0] + '.uda'NEWLINE # #EXTRACT THE variablesNEWLINE for var in myvars: NEWLINE outFile = str(var) + '-t' + str(counter) + '.txt'NEWLINE the_command = './lineextract -pr 32 -q -v ' + str(var) + ' -timestep ' + str(maxSteps*refinement) + ' -istart 0 0 0 -iend ' + str(Nx*refinement - 1)+' '+str(Ny*refinement -1)+' '+str(Nz*refinement - 1)+ ' -o ' + outFile +' -uda '+udaNameNEWLINE print 'Executing command: ', the_commandNEWLINE os.system(the_command)NEWLINE NEWLINE os.system('rm ' + fname) NEWLINE refinement *= 2NEWLINE counter += 1NEWLINE NEWLINE #now load the data and compute the errorsNEWLINE print '---------------- SPATIAL ORDER -------------------'NEWLINE for var in myvars: NEWLINE phiAll = []NEWLINE refinement = 1NEWLINE for i in range(0,nLevels):NEWLINE datname = str(var) + '-t' + str(i) + '.txt'NEWLINE phi = np.loadtxt(datname)NEWLINE phi = np.reshape(phi[:,3],(Nx*refinement,Ny*refinement,Nz*refinement),'F') # take the last column of phi and reshapeNEWLINE phiAll.append(phi)NEWLINE # phit = average(phi) # average phiNEWLINE # plt.matshow(phi[:,:,0])NEWLINE # plt.matshow(phit[:,:,0])NEWLINE # plt.show()NEWLINE refinement *= 2NEWLINE os.system('rm ' + datname)NEWLINE NEWLINE # local errorsNEWLINE errAll = []NEWLINE for i in range(0,nLevels-1):NEWLINE #phiav = average(phiAll[i+1]) NEWLINE diff = average(phiAll[i+1]) - phiAll[i]NEWLINE #plt.matshow(diff[:,:,0]) NEWLINE shape = diff.shapeNEWLINE size = shape[0]*shape[1]*shape[2]NEWLINE diff = diff.reshape(size)NEWLINE err = np.linalg.norm(diff,np.inf)NEWLINE errAll.append(err)NEWLINE NEWLINE #plt.show() NEWLINE # now compute orderNEWLINE print '-----------------------------' NEWLINE print ' VARIABLE: ', varNEWLINE print '-----------------------------'NEWLINE for i in range(0,nLevels-2):NEWLINE print np.log( errAll[i+1]/errAll[i] ) / np.log(0.5)NEWLINE NEWLINE os.system('rm -rf *.uda*')NEWLINE os.system('rm -rf *.dot')NEWLINE os.system('rm log.txt') NEWLINENEWLINE#------------------------------------------------------------------------------NEWLINEif __name__ == "__main__":NEWLINE main()
import sysNEWLINEimport rdkitNEWLINEfrom argparse import ArgumentParserNEWLINEfrom rdkit import Chem, DataStructsNEWLINEfrom rdkit.Chem import AllChemNEWLINEfrom rdkit.Chem.Scaffolds import MurckoScaffoldNEWLINENEWLINEparser = ArgumentParser()NEWLINEparser.add_argument('--ref_path', required=True)NEWLINEargs = parser.parse_args()NEWLINENEWLINElg = rdkit.RDLogger.logger() NEWLINElg.setLevel(rdkit.RDLogger.CRITICAL)NEWLINENEWLINEpred_data = [line.split()[:3] for line in sys.stdin]NEWLINEpred_mols = [mol for mol,x,y in pred_data if float(x) >= 0.5 and float(y) >= 0.5]NEWLINENEWLINEfraction_actives = len(pred_mols) / len(pred_data)NEWLINEprint('fraction actives:', fraction_actives)NEWLINENEWLINEwith open(args.ref_path) as f:NEWLINE next(f)NEWLINE true_mols = [line.split(',')[0] for line in f]NEWLINEprint('number of active reference', len(true_mols))NEWLINENEWLINEtrue_mols = [Chem.MolFromSmiles(s) for s in true_mols]NEWLINEtrue_mols = [x for x in true_mols if x is not None]NEWLINEtrue_fps = [AllChem.GetMorganFingerprintAsBitVect(x, 3, 2048) for x in true_mols]NEWLINENEWLINEpred_mols = [Chem.MolFromSmiles(s) for s in pred_mols]NEWLINEpred_mols = [x for x in pred_mols if x is not None]NEWLINEpred_fps = [AllChem.GetMorganFingerprintAsBitVect(x, 3, 2048) for x in pred_mols]NEWLINENEWLINEfraction_similar = 0NEWLINEfor i in range(len(pred_fps)):NEWLINE sims = DataStructs.BulkTanimotoSimilarity(pred_fps[i], true_fps)NEWLINE if max(sims) >= 0.4:NEWLINE fraction_similar += 1NEWLINENEWLINEprint('novelty:', 1 - fraction_similar / len(pred_mols))NEWLINENEWLINEsimilarity = 0NEWLINEfor i in range(len(pred_fps)):NEWLINE sims = DataStructs.BulkTanimotoSimilarity(pred_fps[i], pred_fps[:i])NEWLINE similarity += sum(sims)NEWLINENEWLINEn = len(pred_fps) NEWLINEn_pairs = n * (n - 1) / 2NEWLINEdiversity = 1 - similarity / n_pairsNEWLINEprint('diversity:', diversity)NEWLINENEWLINE
import unittestNEWLINENEWLINEfrom py_asciimath.utils.utils import UtilsMatNEWLINENEWLINENEWLINEclass TestUtilsMat(unittest.TestCase):NEWLINE def setUp(self):NEWLINE passNEWLINENEWLINE # Returns True if the string contains 4 a.NEWLINE def test_check_mat_ok_1(self):NEWLINE b, _ = UtilsMat.check_mat("[1,2], [1,2]")NEWLINE self.assertTrue(b)NEWLINENEWLINE def test_check_mat_ok_2(self):NEWLINE b, _ = UtilsMat.check_mat("[], []")NEWLINE self.assertTrue(b)NEWLINENEWLINE def test_check_mat_ok_3(self):NEWLINE b, _ = UtilsMat.check_mat("[[,[,]],[[,],]], [[[[,],],],[,[,[,]]]]")NEWLINE self.assertTrue(b)NEWLINENEWLINE def test_check_mat_ok_4(self):NEWLINE b, _ = UtilsMat.check_mat("[,], [,]")NEWLINE self.assertTrue(b)NEWLINENEWLINE def test_check_mat_fail_1(self):NEWLINE b, _ = UtilsMat.check_mat("[], [,]")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_2(self):NEWLINE b, _ = UtilsMat.check_mat("[,], []")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_3(self):NEWLINE b, _ = UtilsMat.check_mat("[,][,]")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_4(self):NEWLINE b, _ = UtilsMat.check_mat("[,],[")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_5(self):NEWLINE b, _ = UtilsMat.check_mat("[1,2],[1,2,[1,2],[3,4]")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_6(self):NEWLINE b, _ = UtilsMat.check_mat("[,],")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_7(self):NEWLINE b, _ = UtilsMat.check_mat("[,]],")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_8(self):NEWLINE b, _ = UtilsMat.check_mat("[,],,")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_9(self):NEWLINE b, _ = UtilsMat.check_mat("[][]")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_check_mat_fail_10(self):NEWLINE b, _ = UtilsMat.check_mat("[]")NEWLINE self.assertFalse(b)NEWLINENEWLINE def test_get_mat_ok_1(self):NEWLINE s = UtilsMat.get_latex_mat(NEWLINE "\\left[1 , 2\\right] , \\left[1 , 2\\right]"NEWLINE )NEWLINE self.assertEqual(s, "1 & 2 \\\\ 1 & 2")NEWLINENEWLINE def test_get_mat_ok_2(self):NEWLINE s = UtilsMat.get_latex_mat(NEWLINE "\\left[1 , 2\\right] , \\left[1 , \\right]"NEWLINE )NEWLINE self.assertEqual(s, "1 & 2 \\\\ 1 & \\null")NEWLINENEWLINE def test_get_mat_ok_3(self):NEWLINE s = UtilsMat.get_latex_mat("\\left[\\right] , \\left[\\right]")NEWLINE self.assertEqual(s, "\\null \\\\ \\null")NEWLINENEWLINE def test_get_mat_ok_4(self):NEWLINE s = UtilsMat.get_latex_mat("\\left[,\\right] , \\left[,\\right]")NEWLINE self.assertEqual(s, "\\null & \\null \\\\ \\null & \\null")NEWLINENEWLINE def test_check_get_mat_ok_4(self):NEWLINE s = "\\left[2*[x+n], 3(int x dx)\\right], \\left[sqrt(x), a\\right]"NEWLINE b, row_par = UtilsMat.check_mat(s)NEWLINE self.assertTrue(b)NEWLINE self.assertEqual(row_par, ["[", "]"])NEWLINE m = UtilsMat.get_latex_mat(s, row_par)NEWLINE self.assertEqual(m, "2*[x+n] & 3(int x dx) \\\\ sqrt(x) & a")NEWLINENEWLINE def test_check_get_mat_fail_1(self):NEWLINE s = "\\left[2*[x+n], 3(int x dx)\\right, \\left[sqrt(x), a\\right]"NEWLINE b, row_par = UtilsMat.check_mat(s)NEWLINE self.assertFalse(b)NEWLINE self.assertEqual(row_par, [])NEWLINE m = UtilsMat.get_latex_mat(s, row_par)NEWLINE self.assertNotEqual(m, "2*[x+n] & 3(int x dx) \\\\ sqrt(x) & a")NEWLINENEWLINE def test_get_row_par_1(self):NEWLINE s = "{1+2]"NEWLINE i, row_par = UtilsMat.get_row_par(s)NEWLINE self.assertEqual(i, -1)NEWLINE self.assertEqual(row_par, [])NEWLINENEWLINE def test_get_row_par_2(self):NEWLINE s = "{1+2]"NEWLINE ok, row_par = UtilsMat.check_mat(s)NEWLINE self.assertFalse(ok)NEWLINE self.assertEqual(row_par, [])NEWLINENEWLINE def test_get_mathml_mat_1(self):NEWLINE s = "<mrow><mo>[</mo><mrow><mn>1</mn><mo>,</mo><mn>2</mn></mrow></mrow>"NEWLINE ok, row_par = UtilsMat.check_mat(s)NEWLINE self.assertFalse(ok)NEWLINE self.assertEqual(row_par, [])NEWLINE mat = UtilsMat.get_mathml_mat(s, row_par)NEWLINE self.assertEqual(s, mat)NEWLINENEWLINE def test_get_mathml_mat_2(self):NEWLINE s = "<mrow><mo>[</mo><mrow><mn>1</mn><mo>,</mo><mo>[</mo><mrow><mn>2</mn></mrow><mo>]</mo></mrow><mo>]</mo></mrow>"NEWLINE ok, row_par = UtilsMat.check_mat(s)NEWLINE self.assertFalse(ok)NEWLINE self.assertEqual(row_par, [])NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE unittest.main()NEWLINE
from struct import unpackNEWLINENEWLINENEWLINEclass FloatLookup:NEWLINE def __init__(self, data): # data: Tuple[float]NEWLINE self.data = dataNEWLINE self.range_ = len(data)NEWLINENEWLINE @staticmethodNEWLINE def get_lookup_from_double(file) -> 'FloatLookup':NEWLINE range_, = unpack('>i', file.read(4))NEWLINE values = tuple(unpack('>{}d'.format(range_), file.read(range_ * 8)))NEWLINE return FloatLookup(values)NEWLINENEWLINE def get(self, n: int) -> float:NEWLINE if 0 <= n < self.range_:NEWLINE return self.data[n]NEWLINE else:NEWLINE raise ValueError("Value is out of range")
import unittestNEWLINEimport osNEWLINEfrom ...BaseTestCase import BaseTestCaseNEWLINEfrom kombi.Task import TaskNEWLINEfrom kombi.Crawler.Fs import FsCrawlerNEWLINENEWLINEclass ResizeImageTaskTest(BaseTestCase):NEWLINE """Test ResizeImage task."""NEWLINENEWLINE __sourcePath = os.path.join(BaseTestCase.dataTestsDirectory(), "testSeq.0001.exr")NEWLINE __targetPath = os.path.join(BaseTestCase.tempDirectory(), "testToDelete.jpg")NEWLINENEWLINE def testResizeImage(self):NEWLINE """NEWLINE Test that the ResizeImage task works properly.NEWLINE """NEWLINE crawler = FsCrawler.createFromPath(self.__sourcePath)NEWLINE resizeTask = Task.create('resizeImage')NEWLINE resizeTask.add(crawler, self.__targetPath)NEWLINE resizeTask.setOption("width", "480")NEWLINE resizeTask.setOption("height", "270")NEWLINE for convertToRGBA in [False, True]:NEWLINE resizeTask.setOption("convertToRGBA", convertToRGBA)NEWLINE result = resizeTask.output()NEWLINE self.assertEqual(len(result), 1)NEWLINE crawler = result[0]NEWLINE self.assertEqual(crawler.var("width"), 480)NEWLINE self.assertEqual(crawler.var("height"), 270)NEWLINENEWLINE @classmethodNEWLINE def tearDownClass(cls):NEWLINE """NEWLINE Remove the file that was copied.NEWLINE """NEWLINE os.remove(cls.__targetPath)NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE unittest.main()NEWLINE
"""NEWLINE :codeauthor: Megan Wilhite<[email protected]>NEWLINE"""NEWLINENEWLINENEWLINEimport pytestNEWLINEimport salt.modules.mac_service as mac_serviceNEWLINEfrom salt.exceptions import CommandExecutionErrorNEWLINEfrom tests.support.mixins import LoaderModuleMockMixinNEWLINEfrom tests.support.mock import MagicMock, patchNEWLINEfrom tests.support.unit import TestCaseNEWLINENEWLINENEWLINEclass MacServiceTestCase(TestCase, LoaderModuleMockMixin):NEWLINE """NEWLINE TestCase for salt.modules.mac_service moduleNEWLINE """NEWLINENEWLINE def setup_loader_modules(self):NEWLINE return {mac_service: {"__context__": {}}}NEWLINENEWLINE def test_service_disabled_when_enabled(self):NEWLINE """NEWLINE test service.disabled when service is enabledNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE cmd = 'disabled services = {\n\t"com.saltstack.salt.minion" => false\n\t"com.apple.atrun" => false\n{'NEWLINE domain_ret = MagicMock(return_value=("", ""))NEWLINE with patch.object(mac_service, "_get_domain_target", domain_ret):NEWLINE with patch.object(mac_service, "launchctl", MagicMock(return_value=cmd)):NEWLINE assert mac_service.disabled(srv_name) is FalseNEWLINENEWLINE def test_service_disabled_when_disabled(self):NEWLINE """NEWLINE test service.disabled when service is disabledNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE cmd = 'disabled services = {\n\t"com.saltstack.salt.minion" => false\n\t"com.apple.atrun" => true\n{'NEWLINE domain_ret = MagicMock(return_value=("", ""))NEWLINE with patch.object(mac_service, "_get_domain_target", domain_ret):NEWLINE with patch.object(mac_service, "launchctl", MagicMock(return_value=cmd)):NEWLINE assert mac_service.disabled(srv_name) is TrueNEWLINENEWLINE def test_service_disabled_srvname_wrong(self):NEWLINE """NEWLINE test service.disabled when service is just slightly wrongNEWLINE """NEWLINE srv_names = ["com.apple.atru", "com", "apple"]NEWLINE cmd = 'disabled services = {\n\t"com.saltstack.salt.minion" => false\n\t"com.apple.atrun" => true\n}'NEWLINE domain_ret = MagicMock(return_value=("", ""))NEWLINE with patch.object(mac_service, "_get_domain_target", domain_ret):NEWLINE for name in srv_names:NEWLINE with patch.object(NEWLINE mac_service, "launchctl", MagicMock(return_value=cmd)NEWLINE ):NEWLINE assert mac_service.disabled(name) is FalseNEWLINENEWLINE def test_service_disabled_status_upper_case(self):NEWLINE """NEWLINE test service.disabled when disabled status is uppercaseNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE cmd = 'disabled services = {\n\t"com.saltstack.salt.minion" => false\n\t"com.apple.atrun" => True\n{'NEWLINE domain_ret = MagicMock(return_value=("", ""))NEWLINE with patch.object(mac_service, "_get_domain_target", domain_ret):NEWLINE with patch.object(mac_service, "launchctl", MagicMock(return_value=cmd)):NEWLINE assert mac_service.disabled(srv_name) is TrueNEWLINENEWLINE def test_service_enabled_when_enabled(self):NEWLINE """NEWLINE test service.enabled when not disabledNEWLINE """NEWLINE mock_cmd = MagicMock(return_value=False)NEWLINE with patch.dict(mac_service.__salt__, {"service.disabled": mock_cmd}):NEWLINE assert mac_service.enabled("com.apple.atrun") is TrueNEWLINENEWLINE def test_service_enabled_when_disabled(self):NEWLINE """NEWLINE test service.enabled if service is disabledNEWLINE """NEWLINE mock_cmd = MagicMock(return_value=True)NEWLINE with patch.dict(mac_service.__salt__, {"service.disabled": mock_cmd}):NEWLINE assert mac_service.enabled("com.apple.atrun") is FalseNEWLINENEWLINE def test_service_loaded_when_true(self):NEWLINE """NEWLINE test service.loaded with a loaded service.NEWLINE """NEWLINE mock_cmd = MagicMock(return_value="some_service_string")NEWLINE with patch.dict(mac_service.__salt__, {"service.list": mock_cmd}):NEWLINE assert mac_service.loaded("com.apple.atrun") is TrueNEWLINENEWLINE def test_service_loaded_when_false(self):NEWLINE """NEWLINE test service.loaded with an unloaded service.NEWLINE """NEWLINE mock_cmd = MagicMock(side_effect=CommandExecutionError)NEWLINE with patch.dict(mac_service.__salt__, {"service.list": mock_cmd}):NEWLINE assert mac_service.loaded("com.apple.atrun") is FalseNEWLINENEWLINE def test_service_keep_alive_pathstate_file_rm(self):NEWLINE """NEWLINE test _always_running_service when keep_aliveNEWLINE has pathstate set in plist file and file doesn't existNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": {"PathState": {"/private/etc/ntp.conf": True}},NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE with patch("os.path.exists", MagicMock(return_value=False)):NEWLINE assert mac_service._always_running_service(srv_name) is FalseNEWLINENEWLINE def test_service_keep_alive_empty(self):NEWLINE """NEWLINE test _always_running_service when keep_aliveNEWLINE is emptyNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": {},NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE with patch("os.path.exists", MagicMock(return_value=False)):NEWLINE assert mac_service._always_running_service(srv_name) is FalseNEWLINENEWLINE def test_service_keep_alive_pathstate_false(self):NEWLINE """NEWLINE test _always_running_service when keep_aliveNEWLINE has pathstate set in plist file and file is falseNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": {"PathState": {"/private/etc/ntp.conf": False}},NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE with patch("os.path.exists", MagicMock(return_value=False)):NEWLINE assert mac_service._always_running_service(srv_name) is TrueNEWLINENEWLINE def test_service_keep_alive_pathstate(self):NEWLINE """NEWLINE test _always_running_service when keep_aliveNEWLINE has pathstate set in plist fileNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": {"PathState": {"/private/etc/ntp.conf": True}},NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE with patch("os.path.exists", MagicMock(return_value=True)):NEWLINE assert mac_service._always_running_service(srv_name) is TrueNEWLINENEWLINE def test_service_keep_alive(self):NEWLINE """NEWLINE test _always_running_service when keep_alive setNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": True,NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE assert mac_service._always_running_service(srv_name) is TrueNEWLINENEWLINE def test_service_keep_alive_false(self):NEWLINE """NEWLINE test _always_running_service when keep_alive FalseNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": False,NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE assert mac_service._always_running_service(srv_name) is FalseNEWLINENEWLINE def test_service_keep_alive_missing(self):NEWLINE """NEWLINE test _always_running_service when keep_alive not in dictNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE assert mac_service._always_running_service(srv_name) is FalseNEWLINENEWLINE def test_service_keep_alive_wrong_setting(self):NEWLINE """NEWLINE test _always_running_service when keep_aliveNEWLINE has pathstate set in plist fileNEWLINE """NEWLINE srv_name = "com.apple.atrun"NEWLINE info = {NEWLINE "plist": {NEWLINE "EnableTransactions": True,NEWLINE "ProgramArguments": ["/usr/libexec/ntpd-wrapper"],NEWLINE "Label": "org.ntp.ntpd",NEWLINE "KeepAlive": {"Doesnotexist": {"doesnt_exist": True}},NEWLINE }NEWLINE }NEWLINENEWLINE with patch.object(mac_service, "show", MagicMock(return_value=info)):NEWLINE assert mac_service._always_running_service(srv_name) is FalseNEWLINENEWLINE def test_service_name_change_salt_minion(self):NEWLINE srv_name = "salt-minion"NEWLINE info = {NEWLINE "com.saltstack.salt.minion": {NEWLINE "file_name": "com.saltstack.salt.minion.plist",NEWLINE "file_path": "/Library/LaunchDaemons/com.saltstack.salt.minion.plist",NEWLINE "plist": {NEWLINE "HardResourceLimits": {"NumberOfFiles": 100000},NEWLINE "KeepAlive": True,NEWLINE "Label": "com.saltstack.salt.minion",NEWLINE "ProgramArguments": ["/opt/salt/bin/start-salt-minion.sh"],NEWLINE "RunAtLoad": True,NEWLINE "SoftResourceLimits": {"NumberOfFiles": 100000},NEWLINE },NEWLINE }NEWLINE }NEWLINE with patch.dict(NEWLINE mac_service.__utils__,NEWLINE {"mac_utils.available_services": MagicMock(return_value=info)},NEWLINE ):NEWLINE assert (NEWLINE mac_service._get_service(srv_name) == info["com.saltstack.salt.minion"]NEWLINE )NEWLINENEWLINE def test_service_name_change_salt_master(self):NEWLINE srv_name = "salt-master"NEWLINE info = {NEWLINE "com.saltstack.salt.master": {NEWLINE "file_name": "com.saltstack.salt.master.plist",NEWLINE "file_path": "/Library/LaunchDaemons/com.saltstack.salt.master.plist",NEWLINE "plist": {NEWLINE "HardResourceLimits": {"NumberOfFiles": 100000},NEWLINE "KeepAlive": True,NEWLINE "Label": "com.saltstack.salt.master",NEWLINE "ProgramArguments": ["/opt/salt/bin/start-salt-master.sh"],NEWLINE "RunAtLoad": True,NEWLINE "SoftResourceLimits": {"NumberOfFiles": 100000},NEWLINE },NEWLINE }NEWLINE }NEWLINE with patch.dict(NEWLINE mac_service.__utils__,NEWLINE {"mac_utils.available_services": MagicMock(return_value=info)},NEWLINE ):NEWLINE assert (NEWLINE mac_service._get_service(srv_name) == info["com.saltstack.salt.master"]NEWLINE )NEWLINENEWLINE def test_service_name_change_salt_api(self):NEWLINE srv_name = "salt-api"NEWLINE info = {NEWLINE "com.saltstack.salt.api": {NEWLINE "file_name": "com.saltstack.salt.api.plist",NEWLINE "file_path": "/Library/LaunchDaemons/com.saltstack.salt.api.plist",NEWLINE "plist": {NEWLINE "HardResourceLimits": {"NumberOfFiles": 100000},NEWLINE "KeepAlive": True,NEWLINE "Label": "com.saltstack.salt.api",NEWLINE "ProgramArguments": ["/opt/salt/bin/start-salt-api.sh"],NEWLINE "RunAtLoad": True,NEWLINE "SoftResourceLimits": {"NumberOfFiles": 100000},NEWLINE },NEWLINE }NEWLINE }NEWLINE with patch.dict(NEWLINE mac_service.__utils__,NEWLINE {"mac_utils.available_services": MagicMock(return_value=info)},NEWLINE ):NEWLINE assert mac_service._get_service(srv_name) == info["com.saltstack.salt.api"]NEWLINENEWLINE def test_service_name_change_salt_syndic(self):NEWLINE srv_name = "salt-syndic"NEWLINE info = {NEWLINE "com.saltstack.salt.syndic": {NEWLINE "file_name": "com.saltstack.salt.syndic.plist",NEWLINE "file_path": "/Library/LaunchDaemons/com.saltstack.salt.syndic.plist",NEWLINE "plist": {NEWLINE "HardResourceLimits": {"NumberOfFiles": 100000},NEWLINE "KeepAlive": True,NEWLINE "Label": "com.saltstack.salt.syndic",NEWLINE "ProgramArguments": ["/opt/salt/bin/start-salt-syndic.sh"],NEWLINE "RunAtLoad": True,NEWLINE "SoftResourceLimits": {"NumberOfFiles": 100000},NEWLINE },NEWLINE }NEWLINE }NEWLINE with patch.dict(NEWLINE mac_service.__utils__,NEWLINE {"mac_utils.available_services": MagicMock(return_value=info)},NEWLINE ):NEWLINE assert (NEWLINE mac_service._get_service(srv_name) == info["com.saltstack.salt.syndic"]NEWLINE )NEWLINENEWLINE def test_service_restart_already_loaded(self):NEWLINE mock_cmd = MagicMock(return_value=True)NEWLINE salt_dict = {NEWLINE "service.loaded": mock_cmd,NEWLINE "service.stop": mock_cmd,NEWLINE "service.start": mock_cmd,NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE assert mac_service.restart("com.salt") is TrueNEWLINENEWLINE def test_service_restart_not_loaded(self):NEWLINE salt_dict = {NEWLINE "service.loaded": MagicMock(return_value=False),NEWLINE "service.start": MagicMock(return_value=True),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE assert mac_service.restart("com.salt") is TrueNEWLINENEWLINE def test_service_restart_failed_stop(self):NEWLINE salt_dict = {NEWLINE "service.loaded": MagicMock(return_value=True),NEWLINE "service.stop": MagicMock(side_effect=CommandExecutionError),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE with pytest.raises(CommandExecutionError):NEWLINE assert mac_service.restart("com.salt")NEWLINENEWLINE def test_service_restart_failed_start(self):NEWLINE salt_dict = {NEWLINE "service.loaded": MagicMock(return_value=False),NEWLINE "service.start": MagicMock(side_effect=CommandExecutionError),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE with pytest.raises(CommandExecutionError):NEWLINE assert mac_service.restart("com.salt")NEWLINENEWLINE def test_service_status_no_service(self):NEWLINE """NEWLINE Test service status with no service foundNEWLINE """NEWLINE with patch.object(NEWLINE mac_service, "_get_service", MagicMock(side_effect=CommandExecutionError)NEWLINE ):NEWLINE assert mac_service.status("com.salt") is FalseNEWLINENEWLINE @patch.object(mac_service, "_launch_agent", lambda _: False)NEWLINE @patch.object(mac_service, "_get_service", lambda _: {"": ""})NEWLINE @patch.object(mac_service, "_always_running_service", lambda _: True)NEWLINE def test_service_status_on_daemon_with_pid(self):NEWLINE """NEWLINE Test service status on dameon with PID.NEWLINE """NEWLINE mock_service_list = '{\n\t"LimitLoadToSessionType" = "System";\n\t"Label" = "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 0;\n\t"PID" = 218;\n\t"Program" = "/opt/salt";\n\t\t"--disable-keepalive";\n\t);\n};'NEWLINE salt_dict = {NEWLINE "service.list": MagicMock(return_value=mock_service_list),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE assert mac_service.status("com.salt") is TrueNEWLINENEWLINE @patch.object(mac_service, "_launch_agent", lambda _: True)NEWLINE @patch.object(mac_service, "_get_service", lambda _: {"": ""})NEWLINE @patch.object(mac_service, "_always_running_service", lambda _: True)NEWLINE def test_service_status_on_agent_with_pid(self):NEWLINE """NEWLINE Test service status on LaunchAgent with PID.NEWLINE """NEWLINE mock_service_list = '{\n\t"LimitLoadToSessionType" = "Aqua";\n\t"Label" = "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 19968;\n\t"PID" = 218;\n\t"Program" = "/opt/salt";\n\t"ProgramArguments" = (\n\t\t"/opt/salt";\n\t\t"--syslog";\n\t);\n};'NEWLINE salt_dict = {NEWLINE "service.list": MagicMock(return_value=mock_service_list),NEWLINE }NEWLINE utils_dict = {NEWLINE "mac_utils.console_user": MagicMock(return_value="spongebob"),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE with patch.dict(mac_service.__utils__, utils_dict):NEWLINE assert mac_service.status("com.salt") is TrueNEWLINENEWLINE @patch.object(mac_service, "_launch_agent", lambda _: True)NEWLINE @patch.object(mac_service, "_get_service", lambda _: {"": ""})NEWLINE @patch.object(mac_service, "_always_running_service", lambda _: True)NEWLINE def test_service_status_on_agent_with_no_pid_and_should_be_running(self):NEWLINE """NEWLINE Test service status on LaunchAgent with No PID and should be running.NEWLINE """NEWLINE mock_service_list = '{\n\t"LimitLoadToSessionType" = "Aqua";\n\t"Label" = "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 19968;\n\t"Program" = "/opt/salt";\n\t"ProgramArguments" = (\n\t\t"/opt/salt";\n\t\t"--syslog";\n\t);\n};'NEWLINE salt_dict = {NEWLINE "service.list": MagicMock(return_value=mock_service_list),NEWLINE }NEWLINE utils_dict = {NEWLINE "mac_utils.console_user": MagicMock(return_value="spongebob"),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE with patch.dict(mac_service.__utils__, utils_dict):NEWLINE assert mac_service.status("com.salt") is FalseNEWLINENEWLINE @patch.object(mac_service, "_launch_agent", lambda _: False)NEWLINE @patch.object(mac_service, "_get_service", lambda _: {"": ""})NEWLINE @patch.object(mac_service, "_always_running_service", lambda _: True)NEWLINE def test_service_status_on_daemon_with_no_pid_and_should_be_running(self):NEWLINE """NEWLINE Test service status on LaunchDaemon with no PID and anNEWLINE always running service that is loaded.NEWLINE """NEWLINE mock_service_list = '{\n\t"LimitLoadToSessionType" = "System";\n\t"Label" = "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 19968;\n\t"Program" = "/opt/salt.sh";\n\t"ProgramArguments" = (\n\t\t"/opt/salt.sh";\n\t\t"--disable-keepalive";\n\t);\n};'NEWLINE salt_dict = {NEWLINE "service.list": MagicMock(return_value=mock_service_list),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE assert mac_service.status("com.salt") is FalseNEWLINENEWLINE @patch.object(mac_service, "_launch_agent", lambda _: False)NEWLINE @patch.object(mac_service, "_get_service", lambda _: {"": ""})NEWLINE @patch.object(mac_service, "_always_running_service", lambda _: False)NEWLINE def test_service_status_on_daemon_with_no_pid_and_not_always_running(self):NEWLINE """NEWLINE Test service status on LaunchDaemon with no PID and not an alwaysNEWLINE running service.NEWLINE """NEWLINE mock_service_list = '{\n\t"LimitLoadToSessionType" = "System";\n\t"Label" = "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 19968;\n\t"Program" = "/opt/salt.sh";\n\t"ProgramArguments" = (\n\t\t"/opt/salt.sh";\n\t\t"--disable-keepalive";\n\t);\n};'NEWLINE salt_dict = {NEWLINE "service.list": MagicMock(return_value=mock_service_list),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE assert mac_service.status("com.salt") is TrueNEWLINENEWLINE @patch.object(mac_service, "_launch_agent", lambda _: False)NEWLINE @patch.object(mac_service, "_get_service", lambda _: {"": ""})NEWLINE @patch.object(mac_service, "_always_running_service", lambda _: False)NEWLINE def test_service_status_on_daemon_with_failing_list_check(self):NEWLINE """NEWLINE Test service status on LaunchDaemon with no PID on anNEWLINE always running service that is loaded.NEWLINE """NEWLINE mock_service_list = '{\n\t"LimitLoadToSessionType" = "System";\n\t"Label" = "com.salt";\n\t"OnDemand" = false;\n\t"LastExitStatus" = 19968;\n\t"Program" = "/opt/salt.sh";\n\t"ProgramArguments" = (\n\t\t"/opt/salt.sh";\n\t\t"--disable-keepalive";\n\t);\n};'NEWLINE salt_dict = {NEWLINE "service.list": MagicMock(side_effect=CommandExecutionError),NEWLINE }NEWLINE with patch.dict(mac_service.__salt__, salt_dict):NEWLINE assert mac_service.status("com.salt") is FalseNEWLINENEWLINE def test_get_service_on_service_dead(self):NEWLINE """NEWLINE Test service.dead changes.NEWLINE https://github.com/saltstack/salt/issues/57907NEWLINE """NEWLINE utils_dict = {NEWLINE "mac_utils.available_services": MagicMock(return_value={}),NEWLINE }NEWLINE context_dict = {NEWLINE "using_cached_services": True,NEWLINE "service.state": "dead",NEWLINE }NEWLINE name_in_service = MagicMock(side_effect=[{}, {"com.salt": True}])NEWLINE with patch.dict(mac_service.__utils__, utils_dict):NEWLINE with patch.object(mac_service, "_name_in_services", name_in_service):NEWLINE with patch.dict(mac_service.__context__, context_dict):NEWLINE with pytest.raises(CommandExecutionError):NEWLINE assert mac_service._get_service("com.salt")NEWLINE # find the service on a second go with no service.deadNEWLINE with patch.dict(mac_service.__context__, {}):NEWLINE assert mac_service._get_service("com.salt") == {"com.salt": True}NEWLINE
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root forNEWLINE# license information.NEWLINE#NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code isNEWLINE# regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINENEWLINEfrom msrest.service_client import SDKClientNEWLINEfrom msrest import Serializer, DeserializerNEWLINENEWLINEfrom ._configuration import ContainerRegistryManagementClientConfigurationNEWLINEfrom .operations import RegistriesOperationsNEWLINEfrom .operations import OperationsNEWLINEfrom .operations import ReplicationsOperationsNEWLINEfrom .operations import WebhooksOperationsNEWLINEfrom .operations import RunsOperationsNEWLINEfrom .operations import TasksOperationsNEWLINEfrom . import modelsNEWLINENEWLINENEWLINEclass ContainerRegistryManagementClient(SDKClient):NEWLINE """ContainerRegistryManagementClientNEWLINENEWLINE :ivar config: Configuration for client.NEWLINE :vartype config: ContainerRegistryManagementClientConfigurationNEWLINENEWLINE :ivar registries: Registries operationsNEWLINE :vartype registries: azure.mgmt.containerregistry.v2019_05_01.operations.RegistriesOperationsNEWLINE :ivar operations: Operations operationsNEWLINE :vartype operations: azure.mgmt.containerregistry.v2019_05_01.operations.OperationsNEWLINE :ivar replications: Replications operationsNEWLINE :vartype replications: azure.mgmt.containerregistry.v2019_05_01.operations.ReplicationsOperationsNEWLINE :ivar webhooks: Webhooks operationsNEWLINE :vartype webhooks: azure.mgmt.containerregistry.v2019_05_01.operations.WebhooksOperationsNEWLINE :ivar runs: Runs operationsNEWLINE :vartype runs: azure.mgmt.containerregistry.v2019_05_01.operations.RunsOperationsNEWLINE :ivar tasks: Tasks operationsNEWLINE :vartype tasks: azure.mgmt.containerregistry.v2019_05_01.operations.TasksOperationsNEWLINENEWLINE :param credentials: Credentials needed for the client to connect to Azure.NEWLINE :type credentials: :mod:`A msrestazure CredentialsNEWLINE object<msrestazure.azure_active_directory>`NEWLINE :param subscription_id: The Microsoft Azure subscription ID.NEWLINE :type subscription_id: strNEWLINE :param str base_url: Service URLNEWLINE """NEWLINENEWLINE def __init__(NEWLINE self, credentials, subscription_id, base_url=None):NEWLINENEWLINE self.config = ContainerRegistryManagementClientConfiguration(credentials, subscription_id, base_url)NEWLINE super(ContainerRegistryManagementClient, self).__init__(self.config.credentials, self.config)NEWLINENEWLINE client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}NEWLINE self._serialize = Serializer(client_models)NEWLINE self._deserialize = Deserializer(client_models)NEWLINENEWLINE self.registries = RegistriesOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.operations = Operations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.replications = ReplicationsOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.webhooks = WebhooksOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.runs = RunsOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.tasks = TasksOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE
class RenderContentBase():NEWLINE '''NEWLINE This defines the data common for all content type renderersNEWLINE '''NEWLINENEWLINE def __init__(self, question_bundle):NEWLINE self.question_list = question_bundle.splitlines()NEWLINENEWLINE self.close_tokens = ')」}>』'NEWLINE self.tokens = '(「{<『'NEWLINENEWLINE self.ts_reading ='('NEWLINE self.te_reading =')'NEWLINE self.ts_writing ='「'NEWLINE self.te_writing ='」'NEWLINE self.ts_furi = '『'NEWLINE self.te_furi = '』'NEWLINE self.ts_combo ='{'NEWLINE self.te_combo ='}'NEWLINE self.ts_bonus ='<'NEWLINE self.te_bonus ='>'NEWLINE self.split = '|'NEWLINE
# Licensed under a 3-clause BSD style license - see LICENSE.rstNEWLINENEWLINE"""NEWLINEThis module provides utility functions for the models packageNEWLINE"""NEWLINENEWLINENEWLINEfrom collections import dequeNEWLINEfrom collections.abc import MutableMappingNEWLINEfrom inspect import signatureNEWLINENEWLINEimport numpy as npNEWLINENEWLINENEWLINEfrom ..utils import isiterable, check_broadcastNEWLINEfrom ..utils.compat import NUMPY_LT_1_14NEWLINENEWLINEfrom .. import units as uNEWLINENEWLINE__all__ = ['ExpressionTree', 'AliasDict', 'check_broadcast',NEWLINE 'poly_map_domain', 'comb', 'ellipse_extent']NEWLINENEWLINENEWLINEclass ExpressionTree:NEWLINE __slots__ = ['left', 'right', 'value', 'inputs', 'outputs']NEWLINENEWLINE def __init__(self, value, left=None, right=None, inputs=None, outputs=None):NEWLINE self.value = valueNEWLINE self.inputs = inputsNEWLINE self.outputs = outputsNEWLINE self.left = leftNEWLINENEWLINE # Two subtrees can't be the same *object* or else traverse_postorderNEWLINE # breaks, so we just always copy the right subtree to subvert that.NEWLINE if right is not None and left is right:NEWLINE right = right.copy()NEWLINENEWLINE self.right = rightNEWLINENEWLINE def __getstate__(self):NEWLINE # For some reason the default pickle protocol on Python 2 does not justNEWLINE # do this. On Python 3 it's not a problem.NEWLINE return dict((slot, getattr(self, slot)) for slot in self.__slots__)NEWLINENEWLINE def __setstate__(self, state):NEWLINE for slot, value in state.items():NEWLINE setattr(self, slot, value)NEWLINENEWLINE @staticmethodNEWLINE def _recursive_lookup(branch, adict, key):NEWLINE if isinstance(branch, ExpressionTree):NEWLINE return adict[key]NEWLINE else:NEWLINE return branch, keyNEWLINENEWLINE @propertyNEWLINE def inputs_map(self):NEWLINE """NEWLINE Map the names of the inputs to this ExpressionTree to the inputs to the leaf models.NEWLINE """NEWLINE inputs_map = {}NEWLINE if not isinstance(self.value, str): # If we don't have an operator the mapping is trivialNEWLINE return {inp: (self.value, inp) for inp in self.inputs}NEWLINENEWLINE elif self.value == '|':NEWLINE for inp in self.inputs:NEWLINE m, inp2 = self._recursive_lookup(self.left, self.left.inputs_map, inp)NEWLINE inputs_map[inp] = m, inp2NEWLINENEWLINE elif self.value == '&':NEWLINE for i, inp in enumerate(self.inputs):NEWLINE if i < len(self.left.inputs): # Get from leftNEWLINE m, inp2 = self._recursive_lookup(self.left,NEWLINE self.left.inputs_map,NEWLINE self.left.inputs[i])NEWLINE inputs_map[inp] = m, inp2NEWLINE else: # Get from rightNEWLINE m, inp2 = self._recursive_lookup(self.right,NEWLINE self.right.inputs_map,NEWLINE self.right.inputs[i - len(self.left.inputs)])NEWLINE inputs_map[inp] = m, inp2NEWLINENEWLINE else:NEWLINE for inp in self.left.inputs:NEWLINE m, inp2 = self._recursive_lookup(self.left, self.left.inputs_map, inp)NEWLINE inputs_map[inp] = m, inp2NEWLINENEWLINE return inputs_mapNEWLINENEWLINE @propertyNEWLINE def outputs_map(self):NEWLINE """NEWLINE Map the names of the outputs to this ExpressionTree to the outputs to the leaf models.NEWLINE """NEWLINE outputs_map = {}NEWLINE if not isinstance(self.value, str): # If we don't have an operator the mapping is trivialNEWLINE return {out: (self.value, out) for out in self.outputs}NEWLINENEWLINE elif self.value == '|':NEWLINE for out in self.outputs:NEWLINE m, out2 = self._recursive_lookup(self.right, self.right.outputs_map, out)NEWLINE outputs_map[out] = m, out2NEWLINENEWLINE elif self.value == '&':NEWLINE for i, out in enumerate(self.outputs):NEWLINE if i < len(self.left.outputs): # Get from leftNEWLINE m, out2 = self._recursive_lookup(self.left,NEWLINE self.left.outputs_map,NEWLINE self.left.outputs[i])NEWLINE outputs_map[out] = m, out2NEWLINE else: # Get from rightNEWLINE m, out2 = self._recursive_lookup(self.right,NEWLINE self.right.outputs_map,NEWLINE self.right.outputs[i - len(self.left.outputs)])NEWLINE outputs_map[out] = m, out2NEWLINENEWLINE else:NEWLINE for out in self.left.outputs:NEWLINE m, out2 = self._recursive_lookup(self.left, self.left.outputs_map, out)NEWLINE outputs_map[out] = m, out2NEWLINENEWLINE return outputs_mapNEWLINENEWLINE @propertyNEWLINE def isleaf(self):NEWLINE return self.left is None and self.right is NoneNEWLINENEWLINE def traverse_preorder(self):NEWLINE stack = deque([self])NEWLINE while stack:NEWLINE node = stack.pop()NEWLINE yield nodeNEWLINENEWLINE if node.right is not None:NEWLINE stack.append(node.right)NEWLINE if node.left is not None:NEWLINE stack.append(node.left)NEWLINENEWLINE def traverse_inorder(self):NEWLINE stack = deque()NEWLINE node = selfNEWLINE while stack or node is not None:NEWLINE if node is not None:NEWLINE stack.append(node)NEWLINE node = node.leftNEWLINE else:NEWLINE node = stack.pop()NEWLINE yield nodeNEWLINE node = node.rightNEWLINENEWLINE def traverse_postorder(self):NEWLINE stack = deque([self])NEWLINE last = NoneNEWLINE while stack:NEWLINE node = stack[-1]NEWLINE if last is None or node is last.left or node is last.right:NEWLINE if node.left is not None:NEWLINE stack.append(node.left)NEWLINE elif node.right is not None:NEWLINE stack.append(node.right)NEWLINE elif node.left is last and node.right is not None:NEWLINE stack.append(node.right)NEWLINE else:NEWLINE yield stack.pop()NEWLINE last = nodeNEWLINENEWLINE def evaluate(self, operators, getter=None, start=0, stop=None):NEWLINE """Evaluate the expression represented by this tree.NEWLINENEWLINE ``Operators`` should be a dictionary mapping operator names ('tensor',NEWLINE 'product', etc.) to a function that implements that operator for theNEWLINE correct number of operands.NEWLINENEWLINE If given, ``getter`` is a function evaluated on each *leaf* node'sNEWLINE value before applying the operator between them. This could be used,NEWLINE for example, to operate on an attribute of the node values rather thanNEWLINE directly on the node values. The ``getter`` is passed both the indexNEWLINE of the leaf (a count starting at 0 that is incremented after each leafNEWLINE is found) and the leaf node itself.NEWLINENEWLINE The ``start`` and ``stop`` arguments allow evaluating a sub-expressionNEWLINE within the expression tree.NEWLINENEWLINE TODO: Document this better.NEWLINE """NEWLINENEWLINE stack = deque()NEWLINENEWLINE if getter is None:NEWLINE getter = lambda idx, value: valueNEWLINENEWLINE if start is None:NEWLINE start = 0NEWLINENEWLINE leaf_idx = 0NEWLINE for node in self.traverse_postorder():NEWLINE if node.isleaf:NEWLINE # For a "tree" containing just a single operator at the rootNEWLINE # Also push the index of this leaf onto the stack, which willNEWLINE # prove useful for evaluating subexpressionsNEWLINE stack.append((getter(leaf_idx, node.value), leaf_idx))NEWLINE leaf_idx += 1NEWLINE else:NEWLINE operator = operators[node.value]NEWLINENEWLINE if len(stack) < 2:NEWLINE # Skip this operator if there are not enough operands onNEWLINE # the stack; this can happen if some operands were skippedNEWLINE # when evaluating a sub-expressionNEWLINE continueNEWLINENEWLINE right = stack.pop()NEWLINE left = stack.pop()NEWLINE operands = []NEWLINENEWLINE for operand in (left, right):NEWLINE # idx is the leaf index; -1 if not a leaf nodeNEWLINE if operand[-1] == -1:NEWLINE operands.append(operand)NEWLINE else:NEWLINE operand, idx = operandNEWLINE if start <= idx and (stop is None or idx < stop):NEWLINE operands.append((operand, idx))NEWLINENEWLINE if len(operands) == 2:NEWLINE # evaluate the operator with the given operands and placeNEWLINE # the result on the stack (with -1 for the "leaf index"NEWLINE # since this result is not a leaf nodeNEWLINE left, right = operandsNEWLINE stack.append((operator(left[0], right[0]), -1))NEWLINE elif len(operands) == 0:NEWLINE # Just push the left one back on the stackNEWLINE # TODO: Explain and/or refactor this betterNEWLINE # This is here because even if both operands were "skipped"NEWLINE # due to being outside the (start, stop) range, we've onlyNEWLINE # skipped one operator. But there should be at least 2NEWLINE # operators involving these operands, so we push the oneNEWLINE # from the left back onto the stack so that the nextNEWLINE # operator will be skipped as well. Should probably comeNEWLINE # up with an easier to follow way to write this algorithmNEWLINE stack.append(left)NEWLINE else:NEWLINE # one or more of the operands was not included in theNEWLINE # sub-expression slice, so don't evaluate the operator;NEWLINE # instead place left over operands (if any) back on theNEWLINE # stack for later useNEWLINE stack.extend(operands)NEWLINENEWLINE return stack.pop()[0]NEWLINENEWLINE def copy(self):NEWLINE # Hopefully this won't blow the stack for any practical case; if such aNEWLINE # case arises that this won't work then I suppose we can find anNEWLINE # iterative approach.NEWLINENEWLINE children = []NEWLINE for child in (self.left, self.right):NEWLINE if isinstance(child, ExpressionTree):NEWLINE children.append(child.copy())NEWLINE else:NEWLINE children.append(child)NEWLINENEWLINE return self.__class__(self.value, left=children[0], right=children[1])NEWLINENEWLINE def format_expression(self, operator_precedence, format_leaf=None):NEWLINE leaf_idx = 0NEWLINE operands = deque()NEWLINENEWLINE if format_leaf is None:NEWLINE format_leaf = lambda i, l: '[{0}]'.format(i)NEWLINENEWLINE for node in self.traverse_postorder():NEWLINE if node.isleaf:NEWLINE operands.append(format_leaf(leaf_idx, node))NEWLINE leaf_idx += 1NEWLINE continueNEWLINENEWLINE oper_order = operator_precedence[node.value]NEWLINE right = operands.pop()NEWLINE left = operands.pop()NEWLINENEWLINE if (node.left is not None and not node.left.isleaf andNEWLINE operator_precedence[node.left.value] < oper_order):NEWLINE left = '({0})'.format(left)NEWLINE if (node.right is not None and not node.right.isleaf andNEWLINE operator_precedence[node.right.value] < oper_order):NEWLINE right = '({0})'.format(right)NEWLINENEWLINE operands.append(' '.join((left, node.value, right)))NEWLINENEWLINE return ''.join(operands)NEWLINENEWLINENEWLINEclass AliasDict(MutableMapping):NEWLINE """NEWLINE Creates a `dict` like object that wraps an existing `dict` or otherNEWLINE `MutableMapping`, along with a `dict` of *key aliases* that translateNEWLINE between specific keys in this dict to different keys in the underlyingNEWLINE dict.NEWLINENEWLINE In other words, keys that do not have an associated alias are accessed andNEWLINE stored like a normal `dict`. However, a key that has an alias is accessedNEWLINE and stored to the "parent" dict via the alias.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE parent : dict-likeNEWLINE The parent `dict` that aliased keys and accessed from and stored to.NEWLINENEWLINE aliases : dict-likeNEWLINE Maps keys in this dict to their associated keys in the parent dict.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINENEWLINE >>> parent = {'a': 1, 'b': 2, 'c': 3}NEWLINE >>> aliases = {'foo': 'a', 'bar': 'c'}NEWLINE >>> alias_dict = AliasDict(parent, aliases)NEWLINE >>> alias_dict['foo']NEWLINE 1NEWLINE >>> alias_dict['bar']NEWLINE 3NEWLINENEWLINE Keys in the original parent dict are not visible if they were notNEWLINE aliased::NEWLINENEWLINE >>> alias_dict['b']NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE KeyError: 'b'NEWLINENEWLINE Likewise, updates to aliased keys are reflected back in the parent dict::NEWLINENEWLINE >>> alias_dict['foo'] = 42NEWLINE >>> alias_dict['foo']NEWLINE 42NEWLINE >>> parent['a']NEWLINE 42NEWLINENEWLINE However, updates/insertions to keys that are *not* aliased are notNEWLINE reflected in the parent dict::NEWLINENEWLINE >>> alias_dict['qux'] = 99NEWLINE >>> alias_dict['qux']NEWLINE 99NEWLINE >>> 'qux' in parentNEWLINE FalseNEWLINENEWLINE In particular, updates on the `AliasDict` to a key that is equal toNEWLINE one of the aliased keys in the parent dict does *not* update the parentNEWLINE dict. For example, ``alias_dict`` aliases ``'foo'`` to ``'a'``. ButNEWLINE assigning to a key ``'a'`` on the `AliasDict` does not impact theNEWLINE parent::NEWLINENEWLINE >>> alias_dict['a'] = 'nope'NEWLINE >>> alias_dict['a']NEWLINE 'nope'NEWLINE >>> parent['a']NEWLINE 42NEWLINE """NEWLINENEWLINE _store_type = dictNEWLINE """NEWLINE Subclasses may override this to use other mapping types as the underlyingNEWLINE storage, for example an `OrderedDict`. However, even in this caseNEWLINE additional work may be needed to get things like the ordering right.NEWLINE """NEWLINENEWLINE def __init__(self, parent, aliases):NEWLINE self._parent = parentNEWLINE self._store = self._store_type()NEWLINE self._aliases = dict(aliases)NEWLINENEWLINE def __getitem__(self, key):NEWLINE if key in self._aliases:NEWLINE try:NEWLINE return self._parent[self._aliases[key]]NEWLINE except KeyError:NEWLINE raise KeyError(key)NEWLINENEWLINE return self._store[key]NEWLINENEWLINE def __setitem__(self, key, value):NEWLINE if key in self._aliases:NEWLINE self._parent[self._aliases[key]] = valueNEWLINE else:NEWLINE self._store[key] = valueNEWLINENEWLINE def __delitem__(self, key):NEWLINE if key in self._aliases:NEWLINE try:NEWLINE del self._parent[self._aliases[key]]NEWLINE except KeyError:NEWLINE raise KeyError(key)NEWLINE else:NEWLINE del self._store[key]NEWLINENEWLINE def __iter__(self):NEWLINE """NEWLINE First iterates over keys from the parent dict (if the aliased keys areNEWLINE present in the parent), followed by any keys in the local store.NEWLINE """NEWLINENEWLINE for key, alias in self._aliases.items():NEWLINE if alias in self._parent:NEWLINE yield keyNEWLINENEWLINE for key in self._store:NEWLINE yield keyNEWLINENEWLINE def __len__(self):NEWLINE # TODO:NEWLINE # This could be done more efficiently, but at present the use case forNEWLINE # it is narrow if non-existent.NEWLINE return len(list(iter(self)))NEWLINENEWLINE def __repr__(self):NEWLINE # repr() just like any other dict--this should look transparentNEWLINE store_copy = self._store_type()NEWLINE for key, alias in self._aliases.items():NEWLINE if alias in self._parent:NEWLINE store_copy[key] = self._parent[alias]NEWLINENEWLINE store_copy.update(self._store)NEWLINENEWLINE return repr(store_copy)NEWLINENEWLINENEWLINEclass _BoundingBox(tuple):NEWLINE """NEWLINE Base class for models with custom bounding box templates (methods thatNEWLINE return an actual bounding box tuple given some adjustable parameters--seeNEWLINE for example `~astropy.modeling.models.Gaussian1D.bounding_box`).NEWLINENEWLINE On these classes the ``bounding_box`` property still returns a `tuple`NEWLINE giving the default bounding box for that instance of the model. But thatNEWLINE tuple may also be a subclass of this class that is callable, and allowsNEWLINE a new tuple to be returned using a user-supplied value for any adjustableNEWLINE parameters to the bounding box.NEWLINE """NEWLINENEWLINE _model = NoneNEWLINENEWLINE def __new__(cls, input_, _model=None):NEWLINE self = super().__new__(cls, input_)NEWLINE if _model is not None:NEWLINE # Bind this _BoundingBox (most likely a subclass) to a ModelNEWLINE # instance so that its __call__ can access the modelNEWLINE self._model = _modelNEWLINENEWLINE return selfNEWLINENEWLINE def __call__(self, *args, **kwargs):NEWLINE raise NotImplementedError(NEWLINE "This bounding box is fixed by the model and does not have "NEWLINE "adjustable parameters.")NEWLINENEWLINE @classmethodNEWLINE def validate(cls, model, bounding_box):NEWLINE """NEWLINE Validate a given bounding box sequence against the given model (whichNEWLINE may be either a subclass of `~astropy.modeling.Model` or an instanceNEWLINE thereof, so long as the ``.inputs`` attribute is defined.NEWLINENEWLINE Currently this just checks that the bounding_box is either a 2-tupleNEWLINE of lower and upper bounds for 1-D models, or an N-tuple of 2-tuplesNEWLINE for N-D models.NEWLINENEWLINE This also returns a normalized version of the bounding_box input toNEWLINE ensure it is always an N-tuple (even for the 1-D case).NEWLINE """NEWLINENEWLINE nd = model.n_inputsNEWLINENEWLINE if nd == 1:NEWLINE if (not isiterable(bounding_box)NEWLINE or np.shape(bounding_box) not in ((2,), (1, 2))):NEWLINE raise ValueError(NEWLINE "Bounding box for {0} model must be a sequence of length "NEWLINE "2 consisting of a lower and upper bound, or a 1-tuple "NEWLINE "containing such a sequence as its sole element.".format(NEWLINE model.name))NEWLINENEWLINE if len(bounding_box) == 1:NEWLINE return cls((tuple(bounding_box[0]),))NEWLINE else:NEWLINE return cls(tuple(bounding_box))NEWLINE else:NEWLINE if (not isiterable(bounding_box)NEWLINE or np.shape(bounding_box) != (nd, 2)):NEWLINE raise ValueError(NEWLINE "Bounding box for {0} model must be a sequence of length "NEWLINE "{1} (the number of model inputs) consisting of pairs of "NEWLINE "lower and upper bounds for those inputs on which to "NEWLINE "evaluate the model.".format(model.name, nd))NEWLINENEWLINE return cls(tuple(bounds) for bounds in bounding_box)NEWLINENEWLINENEWLINEdef make_binary_operator_eval(oper, f, g):NEWLINE """NEWLINE Given a binary operator (as a callable of two arguments) ``oper`` andNEWLINE two callables ``f`` and ``g`` which accept the same arguments,NEWLINE returns a *new* function that takes the same arguments as ``f`` and ``g``,NEWLINE but passes the outputs of ``f`` and ``g`` in the given ``oper``.NEWLINENEWLINE ``f`` and ``g`` are assumed to return tuples (which may be 1-tuples). TheNEWLINE given operator is applied element-wise to tuple outputs).NEWLINENEWLINE ExampleNEWLINE -------NEWLINENEWLINE >>> from operator import addNEWLINE >>> def prod(x, y):NEWLINE ... return (x * y,)NEWLINE ...NEWLINE >>> sum_of_prod = make_binary_operator_eval(add, prod, prod)NEWLINE >>> sum_of_prod(3, 5)NEWLINE (30,)NEWLINE """NEWLINENEWLINE return lambda inputs, params: \NEWLINE tuple(oper(x, y) for x, y in zip(f(inputs, params),NEWLINE g(inputs, params)))NEWLINENEWLINENEWLINEdef poly_map_domain(oldx, domain, window):NEWLINE """NEWLINE Map domain into window by shifting and scaling.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE oldx : arrayNEWLINE original coordinatesNEWLINE domain : list or tuple of length 2NEWLINE function domainNEWLINE window : list or tuple of length 2NEWLINE range into which to map the domainNEWLINE """NEWLINE domain = np.array(domain, dtype=np.float64)NEWLINE window = np.array(window, dtype=np.float64)NEWLINE scl = (window[1] - window[0]) / (domain[1] - domain[0])NEWLINE off = (window[0] * domain[1] - window[1] * domain[0]) / (domain[1] - domain[0])NEWLINE return off + scl * oldxNEWLINENEWLINENEWLINEdef comb(N, k):NEWLINE """NEWLINE The number of combinations of N things taken k at a time.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE N : int, arrayNEWLINE Number of things.NEWLINE k : int, arrayNEWLINE Number of elements taken.NEWLINENEWLINE """NEWLINE if (k > N) or (N < 0) or (k < 0):NEWLINE return 0NEWLINE val = 1NEWLINE for j in range(min(k, N - k)):NEWLINE val = (val * (N - j)) / (j + 1)NEWLINE return valNEWLINENEWLINENEWLINEdef array_repr_oneline(array):NEWLINE """NEWLINE Represents a multi-dimensional Numpy array flattened onto a single line.NEWLINE """NEWLINE sep = ',' if NUMPY_LT_1_14 else ', 'NEWLINE r = np.array2string(array, separator=sep, suppress_small=True)NEWLINE return ' '.join(l.strip() for l in r.splitlines())NEWLINENEWLINENEWLINEdef combine_labels(left, right):NEWLINE """NEWLINE For use with the join operator &: Combine left input/output labels withNEWLINE right input/output labels.NEWLINENEWLINE If none of the labels conflict then this just returns a sum of tuples.NEWLINE However if *any* of the labels conflict, this appends '0' to the left-handNEWLINE labels and '1' to the right-hand labels so there is no ambiguity).NEWLINE """NEWLINENEWLINE if set(left).intersection(right):NEWLINE left = tuple(l + '0' for l in left)NEWLINE right = tuple(r + '1' for r in right)NEWLINENEWLINE return left + rightNEWLINENEWLINENEWLINEdef ellipse_extent(a, b, theta):NEWLINE """NEWLINE Calculates the extent of a box encapsulating a rotated 2D ellipse.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE a : float or `~astropy.units.Quantity`NEWLINE Major axis.NEWLINE b : float or `~astropy.units.Quantity`NEWLINE Minor axis.NEWLINE theta : float or `~astropy.units.Quantity`NEWLINE Rotation angle. If given as a floating-point value, it is assumed to beNEWLINE in radians.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE offsets : tupleNEWLINE The absolute value of the offset distances from the ellipse center thatNEWLINE define its bounding box region, ``(dx, dy)``.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE .. plot::NEWLINE :include-source:NEWLINENEWLINE import numpy as npNEWLINE import matplotlib.pyplot as pltNEWLINE from astropy.modeling.models import Ellipse2DNEWLINE from astropy.modeling.utils import ellipse_extent, render_modelNEWLINENEWLINE amplitude = 1NEWLINE x0 = 50NEWLINE y0 = 50NEWLINE a = 30NEWLINE b = 10NEWLINE theta = np.pi/4NEWLINENEWLINE model = Ellipse2D(amplitude, x0, y0, a, b, theta)NEWLINENEWLINE dx, dy = ellipse_extent(a, b, theta)NEWLINENEWLINE limits = [x0 - dx, x0 + dx, y0 - dy, y0 + dy]NEWLINENEWLINE model.bounding_box = limitsNEWLINENEWLINE image = render_model(model)NEWLINENEWLINE plt.imshow(image, cmap='binary', interpolation='nearest', alpha=.5,NEWLINE extent = limits)NEWLINE plt.show()NEWLINE """NEWLINENEWLINE t = np.arctan2(-b * np.tan(theta), a)NEWLINE dx = a * np.cos(t) * np.cos(theta) - b * np.sin(t) * np.sin(theta)NEWLINENEWLINE t = np.arctan2(b, a * np.tan(theta))NEWLINE dy = b * np.sin(t) * np.cos(theta) + a * np.cos(t) * np.sin(theta)NEWLINENEWLINE if isinstance(dx, u.Quantity) or isinstance(dy, u.Quantity):NEWLINE return np.abs(u.Quantity([dx, dy]))NEWLINE else:NEWLINE return np.abs([dx, dy])NEWLINENEWLINENEWLINEdef get_inputs_and_params(func):NEWLINE """NEWLINE Given a callable, determine the input variables and theNEWLINE parameters.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE func : callableNEWLINENEWLINE ReturnsNEWLINE -------NEWLINE inputs, params : tupleNEWLINE Each entry is a list of inspect.Parameter objectsNEWLINE """NEWLINE sig = signature(func)NEWLINENEWLINE inputs = []NEWLINE params = []NEWLINE for param in sig.parameters.values():NEWLINE if param.kind in (param.VAR_POSITIONAL, param.VAR_KEYWORD):NEWLINE raise ValueError("Signature must not have *args or **kwargs")NEWLINE if param.default == param.empty:NEWLINE inputs.append(param)NEWLINE else:NEWLINE params.append(param)NEWLINENEWLINE return inputs, paramsNEWLINENEWLINENEWLINEdef _parameter_with_unit(parameter, unit):NEWLINE if parameter.unit is None:NEWLINE return parameter.value * unitNEWLINE else:NEWLINE return parameter.quantity.to(unit)NEWLINENEWLINENEWLINEdef _parameter_without_unit(value, old_unit, new_unit):NEWLINE if old_unit is None:NEWLINE return valueNEWLINE else:NEWLINE return value * old_unit.to(new_unit)NEWLINENEWLINENEWLINEdef _combine_equivalency_dict(keys, eq1=None, eq2=None):NEWLINE # Given two dictionaries that give equivalencies for a set of keys, forNEWLINE # example input value names, return a dictionary that includes all theNEWLINE # equivalenciesNEWLINE eq = {}NEWLINE for key in keys:NEWLINE eq[key] = []NEWLINE if eq1 is not None and key in eq1:NEWLINE eq[key].extend(eq1[key])NEWLINE if eq2 is not None and key in eq2:NEWLINE eq[key].extend(eq2[key])NEWLINE return eqNEWLINENEWLINENEWLINEdef _to_radian(value):NEWLINE """ Convert ``value`` to radian. """NEWLINE if isinstance(value, u.Quantity):NEWLINE return value.to(u.rad)NEWLINE else:NEWLINE return np.deg2rad(value)NEWLINENEWLINENEWLINEdef _to_orig_unit(value, raw_unit=None, orig_unit=None):NEWLINE """ Convert value with ``raw_unit`` to ``orig_unit``. """NEWLINE if raw_unit is not None:NEWLINE return (value * raw_unit).to(orig_unit)NEWLINE else:NEWLINE return np.rad2deg(value)NEWLINE
# -*- coding: utf-8 -*-NEWLINE"""NEWLINECreated on Thu Jan 14 12:58:41 2015NEWLINE@author: Tony SaadNEWLINE"""NEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINEimport numpy as npNEWLINEimport argparseNEWLINEimport osNEWLINEfrom xml.dom import minidomNEWLINEfrom shutil import copyfileNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINE#------------------------------------------------------------------------------NEWLINE"""NEWLINEGiven a 3D array A of size (Nx, Ny, Nz) (representative of a CFD mesh), NEWLINEthis function computes a new array B of size (Nx/2, Ny/2, Nz/2)NEWLINEsuch that the entries in B are the averaged values of corresponding cells in A.NEWLINESpecifically, for a cell centered scalar quantity that lives on A, every cellNEWLINEin B corresponds to the average of the 8 cells in A.NEWLINE@author: Tony SaadNEWLINE"""NEWLINEdef average(phi):NEWLINE # get the dimensions of the input arrayNEWLINE shape = phi.shapeNEWLINE nx0 = shape[0]NEWLINE ny0 = shape[1]NEWLINE nz0 = shape[2]NEWLINE # we will average two points in each dimensionNEWLINE nx = nx0/2NEWLINE ny = ny0/2NEWLINE nz = nz0/2NEWLINE phiAv = np.zeros([nx,ny,nz])NEWLINE for iav in range(0,nx):NEWLINE for jav in range(0,ny):NEWLINE for kav in range(0,nz): NEWLINE i = 2*iavNEWLINE j = 2*javNEWLINE k = 2*kavNEWLINE average = (phi[i,j,k] + phi[i+1,j,k] + phi[i,j+1,k] + phi[i,j,k+1] + phi[i+1,j+1,k] + phi[i+1,j,k+1] + phi[i,j+1,k+1] + phi[i+1,j+1,k+1])/8.0NEWLINE# average = (phi[i,j,k] + phi[i,j+1,k] + phi[i,j,k+1] + phi[i,j+1,k+1] )/4.0NEWLINE phiAv[iav,jav,kav] = averageNEWLINE return phiAvNEWLINENEWLINE#------------------------------------------------------------------------------NEWLINEdef main():NEWLINE parser = argparse.ArgumentParser(description=NEWLINE 'Computes spatial order of accuracy without the need of an anlytical solution. The method '+NEWLINE 'is based on computing numerical solutions at refined timesteps and then computing the '+NEWLINE 'order as p = ln[(f3 - f2)/(f2 - f1)]/ln(0.5).' +NEWLINE ' The cleanest way to operate this script is to make a copy of it in a new directory. Then '+NEWLINE 'copy the ups file to that directory and execute the script.' )NEWLINE NEWLINE parser.add_argument('-ups',NEWLINE help='The input file to run.',required=True) NEWLINE NEWLINE parser.add_argument('-levels',NEWLINE help='The number of spatial refinement levels.', type=int) NEWLINE NEWLINE parser.add_argument('-nsteps',NEWLINE help='The number of timesteps. Defaults to 1.', type=int) NEWLINE NEWLINE parser.add_argument('-suspath',NEWLINE help='The path to sus.',required=True)NEWLINE NEWLINE parser.add_argument('-vars', required=True,NEWLINE help='Comma seperated list of variables for which the temporal order is to be computed. example: -vars "var1, my var".')NEWLINE NEWLINE args = parser.parse_args()NEWLINE NEWLINE # if the number of levels is not provided, set it to 3NEWLINE if args.levels is None:NEWLINE args.levels = 3NEWLINE NEWLINE # if the number of levels is <2, then reset it to 3NEWLINE if (args.levels < 2):NEWLINE print 'The number of levels has to be >= 3. Setting levels to 3'NEWLINE args.levels = 3NEWLINE NEWLINE rootups = args.upsNEWLINE nLevels = args.levelsNEWLINE NEWLINE # cleanup the list of variables for which the order is to be computedNEWLINE myvars = [x.strip() for x in args.vars.split(',')]NEWLINE NEWLINE # first makes copies of the ups filesNEWLINE fnames = []NEWLINE basename = os.path.basename(rootups)NEWLINE basename = os.path.splitext(basename)[0]NEWLINE for i in range(0,nLevels):NEWLINE #fname = os.path.splitext(rootups)[0] + '-t' + str(i) + '.ups' NEWLINE fname = basename + '-t' + str(i) + '.ups'NEWLINE fnames.append(fname)NEWLINE copyfile(rootups, fname) NEWLINE NEWLINE # now loop over the copied files and change the dt and the uda nameNEWLINE refinement = 1NEWLINE maxSteps = 1NEWLINE NEWLINE if args.nsteps is not None:NEWLINE maxSteps = args.nstepsNEWLINE NEWLINE args.suspath = os.path.normpath(args.suspath)NEWLINE args.suspath = os.path.abspath(args.suspath)NEWLINE print args.suspathNEWLINE os.system('ln -fs ' + args.suspath + '/sus sus')NEWLINE os.system('ln -fs ' + args.suspath + '/tools/extractors/lineextract lineextract')NEWLINE NEWLINE # find total number of procs and resolutionNEWLINE xmldoc = minidom.parse(rootups)NEWLINE for node in xmldoc.getElementsByTagName('patches'):NEWLINE P = (str(node.firstChild.data).strip()).split(',')NEWLINE P0=int(P[0].split('[')[1])NEWLINE P1=int(P[1])NEWLINE P2=int(P[2].split(']')[0])NEWLINE total_proc = P0*P1*P2NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('resolution'):NEWLINE P = (str(node.firstChild.data).strip()).split(',')NEWLINE Nx=int(P[0].split('[')[1])NEWLINE Ny=int(P[1])NEWLINE Nz=int(P[2].split(']')[0])NEWLINE NEWLINE for fname in fnames:NEWLINE print 'now updating xml for ', fnameNEWLINE basename = os.path.splitext(fname)[0]NEWLINE xmldoc = minidom.parse(fname)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('filebase'):NEWLINE node.firstChild.replaceWholeText(basename + '.uda')NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('resolution'):NEWLINE node.firstChild.replaceWholeText('[' + str(Nx*refinement) + ',' + str(Ny*refinement) + ',' + str(Nz*refinement) + ']')NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('max_Timesteps'):NEWLINE node.firstChild.replaceWholeText(maxSteps*refinement)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('delt_min'):NEWLINE dtmin = float(node.firstChild.data)NEWLINE dtmin = dtmin/refinementNEWLINE node.firstChild.replaceWholeText(dtmin)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('delt_max'):NEWLINE node.firstChild.replaceWholeText(dtmin)NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('outputTimestepInterval'):NEWLINE node.firstChild.replaceWholeText('1')NEWLINE NEWLINE for node in xmldoc.getElementsByTagName('maxTime'):NEWLINE node.firstChild.replaceWholeText('100')NEWLINE NEWLINE refinement *= 2NEWLINE f = open(fname, 'w') NEWLINE xmldoc.writexml(f) NEWLINE f.close()NEWLINE NEWLINE # now run the filesNEWLINE counter = 0NEWLINE refinement = 1NEWLINE for fname in fnames:NEWLINE os.system('mpirun -np '+ str(total_proc) + ' ' + './sus' + ' ' + fname + ' > log.txt')NEWLINE udaName = os.path.splitext(fname)[0] + '.uda'NEWLINE # #EXTRACT THE variablesNEWLINE for var in myvars: NEWLINE outFile = str(var) + '-t' + str(counter) + '.txt'NEWLINE the_command = './lineextract -pr 32 -q -v ' + str(var) + ' -timestep ' + str(maxSteps*refinement) + ' -istart 0 0 0 -iend ' + str(Nx*refinement - 1)+' '+str(Ny*refinement -1)+' '+str(Nz*refinement - 1)+ ' -o ' + outFile +' -uda '+udaNameNEWLINE print 'Executing command: ', the_commandNEWLINE os.system(the_command)NEWLINE NEWLINE os.system('rm ' + fname) NEWLINE refinement *= 2NEWLINE counter += 1NEWLINE NEWLINE #now load the data and compute the errorsNEWLINE print '---------------- SPATIAL ORDER -------------------'NEWLINE for var in myvars: NEWLINE phiAll = []NEWLINE refinement = 1NEWLINE for i in range(0,nLevels):NEWLINE datname = str(var) + '-t' + str(i) + '.txt'NEWLINE phi = np.loadtxt(datname)NEWLINE phi = np.reshape(phi[:,3],(Nx*refinement,Ny*refinement,Nz*refinement),'F') # take the last column of phi and reshapeNEWLINE phiAll.append(phi)NEWLINE # phit = average(phi) # average phiNEWLINE # plt.matshow(phi[:,:,0])NEWLINE # plt.matshow(phit[:,:,0])NEWLINE # plt.show()NEWLINE refinement *= 2NEWLINE os.system('rm ' + datname)NEWLINE NEWLINE # local errorsNEWLINE errAll = []NEWLINE for i in range(0,nLevels-1):NEWLINE #phiav = average(phiAll[i+1]) NEWLINE diff = average(phiAll[i+1]) - phiAll[i]NEWLINE #plt.matshow(diff[:,:,0]) NEWLINE shape = diff.shapeNEWLINE size = shape[0]*shape[1]*shape[2]NEWLINE diff = diff.reshape(size)NEWLINE err = np.linalg.norm(diff,np.inf)NEWLINE errAll.append(err)NEWLINE NEWLINE #plt.show() NEWLINE # now compute orderNEWLINE print '-----------------------------' NEWLINE print ' VARIABLE: ', varNEWLINE print '-----------------------------'NEWLINE for i in range(0,nLevels-2):NEWLINE print np.log( errAll[i+1]/errAll[i] ) / np.log(0.5)NEWLINE NEWLINE os.system('rm -rf *.uda*')NEWLINE os.system('rm -rf *.dot')NEWLINE os.system('rm log.txt') NEWLINENEWLINE#------------------------------------------------------------------------------NEWLINEif __name__ == "__main__":NEWLINE main()
from django.apps import AppConfigNEWLINENEWLINENEWLINEclass GiftConfig(AppConfig):NEWLINE name = 'gift'NEWLINE
# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""UI tests for /bundle page"""NEWLINENEWLINEimport osNEWLINEfrom typing import ListNEWLINENEWLINEimport allureNEWLINEimport pytestNEWLINEfrom adcm_client.objects import ADCMClient, BundleNEWLINEfrom adcm_pytest_plugin import utilsNEWLINEfrom adcm_pytest_plugin.utils import catch_failedNEWLINEfrom selenium.common.exceptions import ElementClickInterceptedExceptionNEWLINENEWLINEfrom tests.conftest import DUMMY_CLUSTER_BUNDLENEWLINEfrom tests.ui_tests.app.app import ADCMTestNEWLINEfrom tests.ui_tests.app.page.admin.page import AdminIntroPageNEWLINEfrom tests.ui_tests.app.page.bundle.page import BundlePageNEWLINEfrom tests.ui_tests.app.page.bundle_list.page import BundleListPage, BundleInfoNEWLINEfrom tests.ui_tests.app.page.cluster_list.page import ClusterListPageNEWLINEfrom tests.ui_tests.app.page.host_list.page import HostListPageNEWLINENEWLINELICENSE_FP = os.path.join(utils.get_data_dir(__file__), 'license.txt')NEWLINENEWLINECLUSTER_CE_CONFIG = DUMMY_CLUSTER_BUNDLENEWLINENEWLINECLUSTER_EE_CONFIG = [NEWLINE {NEWLINE **CLUSTER_CE_CONFIG[0],NEWLINE 'description': 'enterprise description',NEWLINE 'license': 'license.txt',NEWLINE 'edition': 'enterprise',NEWLINE }NEWLINE]NEWLINENEWLINEPROVIDER_CONFIG = [NEWLINE {NEWLINE 'type': 'provider',NEWLINE 'name': 'test_provider',NEWLINE 'version': '2.15-dev',NEWLINE },NEWLINE {NEWLINE 'type': 'host',NEWLINE 'name': 'Test Host',NEWLINE 'description': 'Test Host Description',NEWLINE 'version': '2.15-dev',NEWLINE },NEWLINE]NEWLINENEWLINENEWLINEdef _assert_bundle_info_value(attribute: str, actual_info: BundleInfo, expected_info: BundleInfo):NEWLINE actual_value = getattr(actual_info, attribute)NEWLINE expected_value = getattr(expected_info, attribute)NEWLINE assert actual_value == expected_value, f"Bundle's {attribute} should be {expected_value}, not {actual_value}"NEWLINENEWLINENEWLINE# pylint: [email protected]('Check bundle list is empty')NEWLINEdef _check_bundle_list_is_empty(page: BundleListPage):NEWLINE assert (row_count := page.table.row_count) == 0, f'Bundle list should be empty, but {row_count} records was found'[email protected]('Check bundle is listed in table')NEWLINEdef _open_bundle_list_and_check_info(page: BundleListPage, expected_info: BundleInfo):NEWLINE """NEWLINE Open bundle list page, check that exactly 1 row is presented and check it's infoNEWLINE """NEWLINE page.open()NEWLINE assert (NEWLINE row_count := page.table.row_countNEWLINE ) == 1, f'Bundle list should have exactly 1 record, but {row_count} was found'NEWLINE bundle_info = page.get_bundle_info()NEWLINE check_bundle_info_is_equal(bundle_info, expected_info)[email protected]('Check bundle info')NEWLINEdef check_bundle_info_is_equal(actual_info: BundleInfo, expected_info: BundleInfo):NEWLINE """Assert bundle attrs values"""NEWLINE for attr in ('name', 'description', 'version', 'edition'):NEWLINE _assert_bundle_info_value(attr, actual_info, expected_info)[email protected]()NEWLINE# pylint: disable-next=unused-argumentNEWLINEdef page(app_fs: ADCMTest, login_to_adcm_over_api) -> BundleListPage:NEWLINE """Get BundleListPage after authorization"""NEWLINE return BundleListPage(app_fs.driver, app_fs.adcm.url).open()[email protected]("Upload bundles")[email protected]()NEWLINEdef upload_bundles(create_bundle_archives: List[str], sdk_client_fs: ADCMClient) -> List[Bundle]:NEWLINE """Upload bundles to ADCM"""NEWLINE return [sdk_client_fs.upload_from_fs(path) for path in create_bundle_archives][email protected]()NEWLINEdef _create_cluster(upload_bundles: List[Bundle]):NEWLINE """Upload bundles and create cluster from first bundle"""NEWLINE upload_bundles[0].cluster_create('Best Cluster Ever')[email protected]()NEWLINEdef test_ce_bundle_upload(create_bundle_archives: List[str], page: BundleListPage):NEWLINE """Upload community bundle"""NEWLINE bundle_params = BundleInfo(NEWLINE name="test_cluster", version="1.5", edition="community", description="community description"NEWLINE )NEWLINE page.upload_bundle(create_bundle_archives[0])NEWLINE bundle_info = page.get_bundle_info()NEWLINE check_bundle_info_is_equal(bundle_info, bundle_params)[email protected]()[email protected]("create_bundle_archives", [([CLUSTER_EE_CONFIG], LICENSE_FP)], indirect=True)NEWLINEdef test_ee_bundle_upload(create_bundle_archives: List[str], page: BundleListPage):NEWLINE """Upload enterprise bundle and accept licence"""NEWLINE bundle_params = BundleInfo(NEWLINE name='test_cluster',NEWLINE version='1.5',NEWLINE edition='enterprise',NEWLINE description='enterprise description',NEWLINE )NEWLINE page.upload_bundle(create_bundle_archives[0])NEWLINE page.accept_licence()NEWLINE bundle_info = page.get_bundle_info()NEWLINE check_bundle_info_is_equal(bundle_info, bundle_params)[email protected]()NEWLINEdef test_delete_bundle(create_bundle_archives: List[str], page: BundleListPage):NEWLINE """Upload bundle and delete it"""NEWLINE with allure.step('Upload bundle'):NEWLINE page.upload_bundle(create_bundle_archives[0])NEWLINE assert page.table.row_count == 1, 'One bundle should be uploaded'NEWLINE with allure.step('Delete bundle'):NEWLINE page.delete_bundle()NEWLINE assert page.table.row_count == 0, 'No bundle should be listed in the table'[email protected](NEWLINE "create_bundle_archives", [([CLUSTER_CE_CONFIG, CLUSTER_EE_CONFIG], LICENSE_FP)], indirect=TrueNEWLINE)NEWLINEdef test_two_bundles(create_bundle_archives: List[str], page: BundleListPage):NEWLINE """Upload two bundles"""NEWLINE with allure.step('Upload 1st bundle'), page.table.wait_rows_change():NEWLINE page.upload_bundle(create_bundle_archives[0])NEWLINE with allure.step('Upload 2nd bundle'), page.table.wait_rows_change():NEWLINE page.upload_bundle(create_bundle_archives[1])NEWLINE with allure.step('Check there are exactly 2 rows'):NEWLINE rows = page.table.row_countNEWLINE assert rows == 2, f'Row amount should be 2, but only {rows} is presented'[email protected]("https://arenadata.atlassian.net/browse/ADCM-2010")[email protected](reason="Not worked using selenoid https://github.com/aerokube/selenoid/issues/844")[email protected](NEWLINE "create_bundle_archives", [([CLUSTER_CE_CONFIG, CLUSTER_EE_CONFIG], LICENSE_FP)], indirect=TrueNEWLINE)NEWLINEdef test_accept_license_with_two_bundles_upload_at_once(create_bundle_archives: List[str], page: BundleListPage):NEWLINE """Upload two bundles and accept license"""NEWLINE with page.table.wait_rows_change():NEWLINE page.upload_bundles(create_bundle_archives)NEWLINE with catch_failed(ElementClickInterceptedException, "License was not accepted by single button click"):NEWLINE page.accept_licence(row_num=1)[email protected]()NEWLINEdef test_open_bundle_from_table(page: BundleListPage, upload_bundles: List[Bundle]):NEWLINE """Test open bundle object page from list of bundles"""NEWLINE with allure.step('Open bundle object page from bundle list'):NEWLINE page.click_bundle_in_row(page.table.get_row())NEWLINE with allure.step('Check object page is opened'):NEWLINE object_page = BundlePage(page.driver, page.base_url, upload_bundles[0].id)NEWLINE object_page.wait_page_is_opened()[email protected]()NEWLINEdef test_open_main_menu_on_bundle_page(page: BundleListPage, upload_bundles: List[Bundle]):NEWLINE """Open main menu on bundle detailed page"""NEWLINE with allure.step('Open bundle object page'):NEWLINE object_page = BundlePage(page.driver, page.base_url, upload_bundles[0].id)NEWLINE object_page.open()NEWLINE object_page.open_main_menu()NEWLINE object_page.check_all_main_menu_fields_are_presented()[email protected]('upload_bundles')NEWLINEdef test_open_adcm_main_menu(page: BundleListPage):NEWLINE """Open main menu by clicking on the menu icon in toolbar"""NEWLINE page.click_on_home_button_on_tooltip()NEWLINE AdminIntroPage(page.driver, page.base_url).wait_page_is_opened()[email protected]("_create_cluster")NEWLINEdef test_delete_bundle_with_created_cluster(page: BundleListPage):NEWLINE """NEWLINE Bundle should not be deleted if an object defined in it is createdNEWLINE """NEWLINE page.delete_bundle()NEWLINE page.check_at_least_one_bundle_is_presented()[email protected]()[email protected](NEWLINE "create_bundle_archives",NEWLINE [[PROVIDER_CONFIG]],NEWLINE indirect=True,NEWLINE ids=['provider_bundle'],NEWLINE)NEWLINEdef test_upload_provider_bundle_from_another_page(NEWLINE page: BundleListPage, app_fs: ADCMTest, create_bundle_archives: List[str]NEWLINE):NEWLINE """NEWLINE Upload bundle from host list and check it is presented in tableNEWLINE """NEWLINE expected_info = BundleInfo(name='test_provider', version='2.15-dev', edition='community', description='')NEWLINE _check_bundle_list_is_empty(page)NEWLINE with allure.step('Create bundle from host creation popup'):NEWLINE host_list_page = HostListPage(app_fs.driver, app_fs.adcm.url).open()NEWLINE host_list_page.upload_bundle_from_host_create_popup(create_bundle_archives[0])NEWLINE _open_bundle_list_and_check_info(page, expected_info)[email protected]()[email protected](NEWLINE "create_bundle_archives",NEWLINE [[CLUSTER_CE_CONFIG]],NEWLINE indirect=True,NEWLINE ids=['cluster_bundle'],NEWLINE)NEWLINEdef test_upload_cluster_bundle_from_another_page(NEWLINE page: BundleListPage, app_fs: ADCMTest, create_bundle_archives: List[str]NEWLINE):NEWLINE """Upload bundle from cluster list and check it is presented in table"""NEWLINE expected_info = BundleInfo(NEWLINE name='test_cluster', version='1.5', edition='community', description='community description'NEWLINE )NEWLINE _check_bundle_list_is_empty(page)NEWLINE with allure.step('Create bundle from cluster creation popup'):NEWLINE cluster_page = ClusterListPage(app_fs.driver, app_fs.adcm.url).open()NEWLINE cluster_page.upload_bundle_from_cluster_create_popup(create_bundle_archives[0])NEWLINE _open_bundle_list_and_check_info(page, expected_info)[email protected](NEWLINE "create_bundle_archives",NEWLINE [[[{'type': 'cluster', 'name': f'ihavetodance-{i}', 'version': f'{i}-ver'}] for i in range(12)]],NEWLINE indirect=True,NEWLINE)[email protected]("upload_bundles")NEWLINEdef test_bundle_list_pagination(page: BundleListPage):NEWLINE """Upload 12 bundles and check pagination"""NEWLINE params = {'on_first_page': 10, 'on_second_page': 2}NEWLINE page.close_info_popup()NEWLINE page.table.check_pagination(params['on_second_page'])NEWLINE
# Copyright (C) 2020-2021 Intel CorporationNEWLINE# SPDX-License-Identifier: Apache-2.0NEWLINENEWLINE"""You may copy this file as the starting point of your own model."""NEWLINENEWLINEimport numpy as npNEWLINEfrom logging import getLoggerNEWLINEfrom torchvision.datasets import ImageFolderNEWLINEfrom torchvision.transforms import ToTensorNEWLINEfrom torch.utils.data import random_splitNEWLINEfrom urllib.request import urlretrieveNEWLINEfrom hashlib import sha384NEWLINEfrom os import path, makedirsNEWLINEfrom zipfile import ZipFileNEWLINEfrom tqdm import tqdmNEWLINEimport torchNEWLINEfrom collections.abc import IterableNEWLINENEWLINElogger = getLogger(__name__)NEWLINENEWLINENEWLINEclass HistologyDataset(ImageFolder):NEWLINE """Colorectal Histology Dataset."""NEWLINENEWLINE URL = "https://zenodo.org/record/53169/files/Kather_" \NEWLINE "texture_2016_image_tiles_5000.zip?download=1"NEWLINE FILENAME = "Kather_texture_2016_image_tiles_5000.zip"NEWLINE FOLDER_NAME = "Kather_texture_2016_image_tiles_5000"NEWLINE ZIP_SHA384 = '7d86abe1d04e68b77c055820c2a4c582a1d25d2983e38ab724e'\NEWLINE 'ac75affce8b7cb2cbf5ba68848dcfd9d84005d87d6790'NEWLINE DEFAULT_PATH = path.join(path.expanduser('~'), '.openfl', 'data')NEWLINENEWLINE def __init__(self, root: str = DEFAULT_PATH, **kwargs) -> None:NEWLINE """Initialize."""NEWLINE makedirs(root, exist_ok=True)NEWLINE filepath = path.join(root, HistologyDataset.FILENAME)NEWLINE if not path.exists(filepath):NEWLINE self.pbar = tqdm(total=None)NEWLINE urlretrieve(HistologyDataset.URL, filepath, self.report_hook) # nosecNEWLINE assert sha384(open(filepath, 'rb').read( # nosecNEWLINE path.getsize(filepath))).hexdigest() == HistologyDataset.ZIP_SHA384NEWLINE with ZipFile(filepath, 'r') as f:NEWLINE f.extractall(root)NEWLINENEWLINE super(HistologyDataset, self).__init__(NEWLINE path.join(root, HistologyDataset.FOLDER_NAME), **kwargs)NEWLINENEWLINE def report_hook(self, count, block_size, total_size):NEWLINE """Update progressbar."""NEWLINE if self.pbar.total is None and total_size:NEWLINE self.pbar.total = total_sizeNEWLINE progress_bytes = count * block_sizeNEWLINE self.pbar.update(progress_bytes - self.pbar.n)NEWLINENEWLINE def __getitem__(self, index):NEWLINE """Allow getting items by slice index."""NEWLINE if isinstance(index, Iterable):NEWLINE return [super(HistologyDataset, self).__getitem__(i) for i in index]NEWLINE else:NEWLINE return super(HistologyDataset, self).__getitem__(index)NEWLINENEWLINENEWLINEdef one_hot(labels, classes):NEWLINE """NEWLINE One Hot encode a vector.NEWLINENEWLINE Args:NEWLINE labels (list): List of labels to onehot encodeNEWLINE classes (int): Total number of categorical classesNEWLINENEWLINE Returns:NEWLINE np.array: Matrix of one-hot encoded labelsNEWLINE """NEWLINE return np.eye(classes)[labels]NEWLINENEWLINENEWLINEdef _load_raw_datashards(shard_num, collaborator_count, train_split_ratio=0.8):NEWLINE """NEWLINE Load the raw data by shard.NEWLINENEWLINE Returns tuples of the dataset shard divided into training and validation.NEWLINENEWLINE Args:NEWLINE shard_num (int): The shard number to useNEWLINE collaborator_count (int): The number of collaborators in the federationNEWLINENEWLINE Returns:NEWLINE 2 tuples: (image, label) of the training, validation datasetNEWLINE """NEWLINE dataset = HistologyDataset(transform=ToTensor())NEWLINE n_train = int(train_split_ratio * len(dataset))NEWLINE n_valid = len(dataset) - n_trainNEWLINE ds_train, ds_val = random_split(NEWLINE dataset, lengths=[n_train, n_valid], generator=torch.manual_seed(0))NEWLINENEWLINE # create the shardsNEWLINE X_train, y_train = list(zip(*ds_train[shard_num::collaborator_count]))NEWLINE X_train, y_train = np.stack(X_train), np.array(y_train)NEWLINENEWLINE X_valid, y_valid = list(zip(*ds_val[shard_num::collaborator_count]))NEWLINE X_valid, y_valid = np.stack(X_valid), np.array(y_valid)NEWLINENEWLINE return (X_train, y_train), (X_valid, y_valid)NEWLINENEWLINENEWLINEdef load_histology_shard(shard_num, collaborator_count,NEWLINE categorical=False, channels_last=False, **kwargs):NEWLINE """NEWLINE Load the Histology dataset.NEWLINENEWLINE Args:NEWLINE shard_num (int): The shard to use from the datasetNEWLINE collaborator_count (int): The number of collaborators in the federationNEWLINE categorical (bool): True = convert the labels to one-hot encodedNEWLINE vectors (Default = True)NEWLINE channels_last (bool): True = The input images have the channelsNEWLINE last (Default = True)NEWLINE **kwargs: Additional parameters to pass to the functionNEWLINENEWLINE Returns:NEWLINE list: The input shapeNEWLINE int: The number of classesNEWLINE numpy.ndarray: The training dataNEWLINE numpy.ndarray: The training labelsNEWLINE numpy.ndarray: The validation dataNEWLINE numpy.ndarray: The validation labelsNEWLINE """NEWLINE img_rows, img_cols = 150, 150NEWLINE num_classes = 8NEWLINENEWLINE (X_train, y_train), (X_valid, y_valid) = _load_raw_datashards(NEWLINE shard_num, collaborator_count)NEWLINENEWLINE if channels_last:NEWLINE X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, 3)NEWLINE X_valid = X_valid.reshape(X_valid.shape[0], img_rows, img_cols, 3)NEWLINE input_shape = (img_rows, img_cols, 3)NEWLINE else:NEWLINE X_train = X_train.reshape(X_train.shape[0], 3, img_rows, img_cols)NEWLINE X_valid = X_valid.reshape(X_valid.shape[0], 3, img_rows, img_cols)NEWLINE input_shape = (3, img_rows, img_cols)NEWLINENEWLINE logger.info(f'Histology > X_train Shape : {X_train.shape}')NEWLINE logger.info(f'Histology > y_train Shape : {y_train.shape}')NEWLINE logger.info(f'Histology > Train Samples : {X_train.shape[0]}')NEWLINE logger.info(f'Histology > Valid Samples : {X_valid.shape[0]}')NEWLINENEWLINE if categorical:NEWLINE # convert class vectors to binary class matricesNEWLINE y_train = one_hot(y_train, num_classes)NEWLINE y_valid = one_hot(y_valid, num_classes)NEWLINENEWLINE return input_shape, num_classes, X_train, y_train, X_valid, y_validNEWLINE
#!/usr/bin/env pythonNEWLINENEWLINEimport jsonNEWLINEimport sysNEWLINEimport requestsNEWLINEfrom bs4 import BeautifulSoupNEWLINENEWLINEresponse = requests.get('http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstances.html')NEWLINEsoup = BeautifulSoup(response.text)NEWLINENEWLINEsection_h3 = soup.find(id='query-DescribeInstances-filters')NEWLINEsection_div = section_h3.find_parent('div', attrs={'class': 'section'})NEWLINENEWLINEfilter_names = []NEWLINEfor term in section_div.select('div.variablelist dt span.term'):NEWLINE filter_name = term.get_text()NEWLINE if not filter_name.startswith('tag:'):NEWLINE filter_names.append(filter_name)NEWLINEfilter_names.sort()NEWLINENEWLINEjson.dump(filter_names, sys.stdout, indent=4)NEWLINE
"""NEWLINE Models to manage users and profilesNEWLINE"""NEWLINEfrom django.db import modelsNEWLINEfrom django.contrib.auth.models import UserNEWLINEfrom django.conf import settingsNEWLINEfrom django.utils.translation import gettext_lazy as _NEWLINENEWLINEfrom geotrek.common.utils import reifyNEWLINENEWLINENEWLINEclass Structure(models.Model):NEWLINE """NEWLINE Represents an organisational structure, to which users are related.NEWLINE """NEWLINE name = models.CharField(max_length=256, verbose_name=_("Nom"), db_index=True)NEWLINENEWLINE def __str__(self):NEWLINE return self.nameNEWLINENEWLINE class Meta:NEWLINE verbose_name = _("Structure")NEWLINE verbose_name_plural = _("Structures")NEWLINE ordering = ['name']NEWLINE permissions = (("can_bypass_structure", _("Can bypass structure")),)NEWLINENEWLINENEWLINEdef default_structure():NEWLINE """ Create default structure if necessary """NEWLINE return Structure.objects.get_or_create(name=settings.DEFAULT_STRUCTURE_NAME)[0]NEWLINENEWLINENEWLINEdef default_structure_pk():NEWLINE return default_structure().pkNEWLINENEWLINENEWLINEclass StructureRelated(models.Model):NEWLINE """NEWLINE A mixin used for any entities that belong to a structureNEWLINE """NEWLINE structure = models.ForeignKey(Structure, default=default_structure_pk, on_delete=models.CASCADE,NEWLINE verbose_name=_("Related structure"))NEWLINENEWLINE check_structure_in_forms = TrueNEWLINENEWLINE def same_structure(self, user):NEWLINE """ Returns True if the user is in the same structure or hasNEWLINE bypass_structure permission, False otherwise. """NEWLINE return (user.profile.structure == self.structureNEWLINE or user.is_superuserNEWLINE or user.has_perm('authent.can_bypass_structure'))NEWLINENEWLINE class Meta:NEWLINE abstract = TrueNEWLINE verbose_name = _("Related structures")NEWLINE verbose_name_plural = _("Related structure")NEWLINENEWLINENEWLINEclass StructureOrNoneRelated(models.Model):NEWLINE """NEWLINE A mixin used for any entities that belong to a structure or None entityNEWLINE """NEWLINE structure = models.ForeignKey(Structure, on_delete=models.CASCADE,NEWLINE verbose_name=_("Related structure"), blank=True, null=True)NEWLINENEWLINE objects = models.Manager()NEWLINE check_structure_in_forms = TrueNEWLINENEWLINE class Meta:NEWLINE abstract = TrueNEWLINE verbose_name = _("Related structures")NEWLINE verbose_name_plural = _("Related structure")NEWLINENEWLINENEWLINEclass UserProfile(StructureRelated):NEWLINE """NEWLINE A custom user profileNEWLINE """NEWLINE user = models.OneToOneField(User, unique=True, on_delete=models.CASCADE)NEWLINE extended_username = models.CharField(blank=True, max_length=200, default="", verbose_name=_('Extended username'))NEWLINENEWLINE class Meta:NEWLINE verbose_name = _("User's profile")NEWLINE verbose_name_plural = _("User's profiles")NEWLINENEWLINE def __str__(self):NEWLINE return _("Profile for %s") % self.userNEWLINENEWLINENEWLINEUser.profile = reify(lambda u: UserProfile.objects.get_or_create(user=u)[0])NEWLINE
import argparseNEWLINEimport osNEWLINEfrom math import log10NEWLINENEWLINEimport pandas as pdNEWLINEimport torch.optim as optimNEWLINEimport torch.utils.dataNEWLINEimport torchvision.utils as utilsNEWLINEfrom torch.autograd import VariableNEWLINEfrom torch.utils.data import DataLoaderNEWLINEfrom tqdm import tqdmNEWLINENEWLINEimport pytorch_ssimNEWLINEfrom data_utils import TrainDatasetFromFolder, ValDatasetFromFolder, display_transformNEWLINEfrom loss import GeneratorLossNEWLINEfrom model import GeneratorNEWLINE# from PGD import *NEWLINE# from model import DiscriminatorNEWLINEimport timeNEWLINEfrom torch.utils.tensorboard import writerNEWLINEimport torch.nn.functional as FNEWLINEimport torch.nn as nnNEWLINEfrom distillmodel import DiscriminatorNEWLINENEWLINEparser = argparse.ArgumentParser('PGDSRGAN') # progressive growing discriminator SRGANNEWLINENEWLINEparser.add_argument('--fsize', default=128, type=int)NEWLINEparser.add_argument('--crop_size', default=96, type=int, help='training images crop size')NEWLINEparser.add_argument('--upscale_factor', default=4, type=int, choices=[2, 4, 8],NEWLINE help='super resolution upscale factor')NEWLINEparser.add_argument('--num_epochs', default=80, type=int, help='train epoch number')NEWLINEparser.add_argument('--batch_size', default=32, type=int)NEWLINEparser.add_argument('--TICK', type=int, default=1000)NEWLINEparser.add_argument('--trans_tick', type=int, default=200)NEWLINEparser.add_argument('--stabile_tick', type=int, default=100)NEWLINEparser.add_argument('--is_fade', type=bool, default=False)NEWLINEparser.add_argument('--grow', type=int, default=0)NEWLINEparser.add_argument('--max_grow', type=int, default=3)NEWLINEparser.add_argument('--when_to_grow', type=int, default=256) # discriminator 증가 언제NEWLINEparser.add_argument('--version', type=int, default=0) # 1/4, 1/2, 1 -> 1, 2, 3로 주자NEWLINEparser.add_argument('--kd_range', type=int, default=5)NEWLINEparser.add_argument('--kd1', type=int, default=12)NEWLINEparser.add_argument('--kd2', type=int, default=42)NEWLINENEWLINENEWLINEdef distill_loss(y, label, score, T, alpha):NEWLINE return nn.KLDivLoss()(F.log_softmax(y / T),NEWLINE F.softmax(score / T)) * (T * T * 2.0 + alpha) + \NEWLINE F.binary_cross_entropy(y, label) * (1 - alpha)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE opt = parser.parse_args()NEWLINENEWLINE CROP_SIZE = opt.crop_sizeNEWLINE UPSCALE_FACTOR = opt.upscale_factorNEWLINE NUM_EPOCHS = opt.num_epochsNEWLINE batch_size = opt.batch_sizeNEWLINE count_image_number = 0NEWLINE trns_tick = opt.trans_tickNEWLINE stab_tick = opt.stabile_tickNEWLINE is_fade = opt.is_fadeNEWLINE change_iter = opt.when_to_growNEWLINE version = opt.versionNEWLINE kd_range = opt.kd_rangeNEWLINE kd1 = opt.kd1NEWLINE kd2 = opt.kd2NEWLINE cur_grow = 0NEWLINENEWLINE delta = 1.0 / (2 * trns_tick + 2 * stab_tick)NEWLINE d_alpha = 1.0 * batch_size / trns_tick / opt.TICKNEWLINENEWLINE fadein = {'dis': is_fade}NEWLINENEWLINE writer = writer.SummaryWriter('runs/distill')NEWLINENEWLINE train_set = TrainDatasetFromFolder('../data/train/gt', crop_size=CROP_SIZE,NEWLINE upscale_factor=UPSCALE_FACTOR,NEWLINE batch_size=batch_size)NEWLINE val_set = ValDatasetFromFolder('../data/val/gt',NEWLINE upscale_factor=UPSCALE_FACTOR)NEWLINE train_loader = DataLoader(dataset=train_set, num_workers=4, batch_size=batch_size, shuffle=True)NEWLINE val_loader = DataLoader(dataset=val_set, num_workers=1, batch_size=1, shuffle=False)NEWLINENEWLINE netG = Generator(UPSCALE_FACTOR)NEWLINE print('# generator parameters:', sum(param.numel() for param in netG.parameters()))NEWLINE # netD = Discriminator(opt)NEWLINE netD = Discriminator(opt)NEWLINENEWLINE print('# discriminator parameters:', sum(param.numel() for param in netD.parameters()))NEWLINE generator_criterion = GeneratorLoss()NEWLINENEWLINE print(torch.cuda.is_available())NEWLINENEWLINE if torch.cuda.is_available():NEWLINE netG.cuda()NEWLINE netD.cuda()NEWLINE print(netD)NEWLINE generator_criterion.cuda()NEWLINENEWLINE optimizerG = optim.Adam(netG.parameters())NEWLINE optimizerD = optim.Adam(netD.parameters())NEWLINENEWLINE results = {'d_loss': [], 'g_loss': [], 'd_score': [], 'g_score': [], 'psnr': [], 'ssim': []}NEWLINENEWLINE start = time.time()NEWLINE cnt = 0NEWLINE ncnt = 0NEWLINE distill_batch = 0NEWLINENEWLINE for epoch in range(1, NUM_EPOCHS + 1):NEWLINE epoch_flag = 0NEWLINENEWLINE train_bar = tqdm(train_loader, leave=True)NEWLINE running_results = {'batch_sizes': 0, 'd_loss': 0, 'g_loss': 0, 'd_score': 0, 'g_score': 0, 'distill_loss': 0}NEWLINENEWLINE netG.train()NEWLINE netD.train()NEWLINENEWLINE if kd1 < epoch <= kd1 + kd_range + 1 or kd2 < epoch <= kd2 + kd_range + 1:NEWLINE writer.add_scalar("loss/KD_loss", running_results['distill_loss'] / distill_batch,NEWLINE epoch - 1)NEWLINENEWLINE i = 0NEWLINE for data, target in train_bar: # train epoch (lr, hr)NEWLINE count_image_number += batch_sizeNEWLINE i += 1NEWLINENEWLINE g_update_first = TrueNEWLINE running_results['batch_sizes'] += batch_sizeNEWLINENEWLINE if kd1 <= epoch <= (kd1 + kd_range - 1) or kd2 <= epoch <= (kd2 + kd_range - 1): # discriminator KDNEWLINE epoch_flag = 1NEWLINE distill_batch += batch_sizeNEWLINE if (epoch == kd1 and cnt == 0) or (epoch == kd2 and cnt == 0):NEWLINE print("KD Phase start!")NEWLINE cnt = cnt + 1NEWLINE ncnt = 0NEWLINE opt.version = opt.version + 1NEWLINE student = Discriminator(opt)NEWLINE optimizersD = optim.Adam(student.parameters())NEWLINE print(student)NEWLINE student.cuda()NEWLINENEWLINE netG.eval()NEWLINE netD.eval()NEWLINE student.train()NEWLINE real_img = Variable(target)NEWLINE real_img = real_img.cuda()NEWLINENEWLINE z = Variable(data)NEWLINE z = z.cuda()NEWLINENEWLINE fake_img = netG(z) # lr->hrNEWLINENEWLINE netD.zero_grad()NEWLINENEWLINE teacher_fake_out = netD(fake_img).mean().reshape(1) # 학습해야함NEWLINE student_fake_out = student(fake_img).mean().reshape(1)NEWLINENEWLINE student_real_out = student(real_img).mean().reshape(1)NEWLINE teacher_real_out = netD(real_img).mean().reshape(1)NEWLINENEWLINE one = torch.Tensor([1]).reshape(1)NEWLINE one = one.cuda()NEWLINENEWLINE zero = torch.Tensor([0]).reshape(1)NEWLINE zero = zero.cuda()NEWLINENEWLINE distill_real_loss = distill_loss(student_real_out, one, teacher_real_out, 10, 0.5)NEWLINE distill_fake_loss = distill_loss(student_fake_out, zero, teacher_fake_out, 10, 0.5)NEWLINENEWLINE total_distill_loss = 0.3*distill_real_loss + 0.7*distill_fake_lossNEWLINE optimizerD.zero_grad()NEWLINE optimizersD.zero_grad()NEWLINE # writer.add_scalar("loss/distill_loss", total_distill_loss, epoch)NEWLINENEWLINE running_results['distill_loss'] += total_distill_lossNEWLINENEWLINE total_distill_loss.backward()NEWLINE optimizerD.step()NEWLINE optimizersD.step()NEWLINENEWLINE if (epoch == kd1 + kd_range - 1 and ncnt == 0 and i == len(train_loader)) or (NEWLINE epoch == kd2 + kd_range - 1 and ncnt == 0 and i == len(train_loader)): # +1NEWLINE print('netD is dumped with Student\n')NEWLINE netD = studentNEWLINE optimizerD = optimizersDNEWLINE epoch_flag = 0NEWLINE cnt = 0NEWLINE ncnt = 1NEWLINENEWLINE ############################NEWLINE # (1) Update D network: maximize D(x)-1-D(G(z))NEWLINE ###########################NEWLINE if epoch < kd1 or (kd1 + kd_range - 1 < epoch < kd2) or epoch > kd2 + kd_range - 1:NEWLINENEWLINE real_img = Variable(target)NEWLINE if torch.cuda.is_available():NEWLINE real_img = real_img.cuda()NEWLINE z = Variable(data)NEWLINE if torch.cuda.is_available():NEWLINE z = z.cuda()NEWLINE fake_img = netG(z)NEWLINENEWLINE netD.zero_grad()NEWLINE real_out = netD(real_img).mean()NEWLINE fake_out = netD(fake_img).mean()NEWLINE d_loss = 1 - real_out + fake_outNEWLINENEWLINE d_loss.backward(retain_graph=True)NEWLINE optimizerD.step()NEWLINENEWLINE ############################NEWLINE # (2) Update G network: minimize 1-D(G(z)) + Perception Loss + Image Loss + TV LossNEWLINE ###########################NEWLINE netG.zero_grad()NEWLINE ## The two lines below are added to prevent runetime error in Google Colab ##NEWLINE fake_img = netG(z)NEWLINE fake_out = netD(fake_img).mean()NEWLINE ##NEWLINE g_loss = generator_criterion(fake_out, fake_img, real_img)NEWLINENEWLINE g_loss.backward()NEWLINENEWLINE optimizerG.step()NEWLINENEWLINE # loss for current batch before optimizationNEWLINE running_results['g_loss'] += g_loss.item() * batch_sizeNEWLINE running_results['d_loss'] += d_loss.item() * batch_sizeNEWLINE running_results['d_score'] += real_out.item() * batch_sizeNEWLINE running_results['g_score'] += fake_out.item() * batch_sizeNEWLINENEWLINE # train_bar.set_description(desc='[%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f' % (NEWLINE # epoch, NUM_EPOCHS, running_results['d_loss'] / running_results['batch_sizes'],NEWLINE # running_results['g_loss'] / running_results['batch_sizes'],NEWLINE # running_results['d_score'] / running_results['batch_sizes'],NEWLINE # running_results['g_score'] / running_results['batch_sizes']))NEWLINENEWLINE if epoch < kd1 or (kd1 + kd_range - 1 < epoch < kd2) or epoch > kd2 + kd_range - 1:NEWLINE print('[%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f' % (NEWLINE epoch, NUM_EPOCHS, running_results['d_loss'] / running_results['batch_sizes'],NEWLINE running_results['g_loss'] / running_results['batch_sizes'],NEWLINE running_results['d_score'] / running_results['batch_sizes'],NEWLINE running_results['g_score'] / running_results['batch_sizes']))NEWLINENEWLINE netG.eval()NEWLINE out_path = 'training_results/SRF_' + '/'NEWLINE if not os.path.exists(out_path):NEWLINE os.makedirs(out_path)NEWLINENEWLINE with torch.no_grad():NEWLINE val_bar = tqdm(val_loader)NEWLINE valing_results = {'mse': 0, 'ssims': 0, 'psnr': 0, 'ssim': 0, 'batch_sizes': 0}NEWLINE val_images = []NEWLINE for val_lr, val_hr_restore, val_hr in val_bar:NEWLINE batch_size = val_lr.size(0)NEWLINE valing_results['batch_sizes'] += batch_sizeNEWLINE lr = val_lrNEWLINE hr = val_hrNEWLINE if torch.cuda.is_available():NEWLINE lr = lr.cuda()NEWLINE hr = hr.cuda()NEWLINE sr = netG(lr)NEWLINENEWLINE batch_mse = ((sr - hr) ** 2).data.mean()NEWLINE valing_results['mse'] += batch_mse * batch_sizeNEWLINE batch_ssim = pytorch_ssim.ssim(sr, hr).item()NEWLINE valing_results['ssims'] += batch_ssim * batch_sizeNEWLINE valing_results['psnr'] = 10 * log10(NEWLINE (hr.max() ** 2) / (valing_results['mse'] / valing_results['batch_sizes']))NEWLINE valing_results['ssim'] = valing_results['ssims'] / valing_results['batch_sizes']NEWLINE # val_bar.set_description(NEWLINE # desc='[converting LR images to SR images] PSNR: %.4f dB SSIM: %.4f' % (NEWLINE # valing_results['psnr'], valing_results['ssim']))NEWLINENEWLINE val_images.extend(NEWLINE [display_transform()(val_hr_restore.squeeze(0)), display_transform()(hr.data.cpu().squeeze(0)),NEWLINE display_transform()(sr.data.cpu().squeeze(0))]) # bicubic, gt, srNEWLINE print('PSNR: %.4f dB SSIM: %.4f' % (valing_results['psnr'], valing_results['ssim']))NEWLINE val_images = torch.stack(val_images)NEWLINE val_images = torch.chunk(val_images, val_images.size(0) // 15)NEWLINE val_save_bar = tqdm(val_images)NEWLINE index = 1NEWLINE print('[saving training results]')NEWLINE for image in val_save_bar:NEWLINE image = utils.make_grid(image, nrow=3, padding=5)NEWLINE utils.save_image(image, out_path + 'epoch_%d_index_%d.png' % (epoch, index), padding=5)NEWLINE index += 1NEWLINENEWLINE # save model parametersNEWLINE torch.save(netG.state_dict(), 'epochs/netG_epoch_%d_%d.pth' % (UPSCALE_FACTOR, epoch))NEWLINE torch.save(netD.state_dict(), 'epochs/netD_epoch_%d_%d.pth' % (UPSCALE_FACTOR, epoch))NEWLINE # save loss\scores\psnr\ssimNEWLINE results['d_loss'].append(running_results['d_loss'] / running_results['batch_sizes'])NEWLINE results['g_loss'].append(running_results['g_loss'] / running_results['batch_sizes'])NEWLINE results['d_score'].append(running_results['d_score'] / running_results['batch_sizes'])NEWLINE results['g_score'].append(running_results['g_score'] / running_results['batch_sizes'])NEWLINE results['psnr'].append(valing_results['psnr'])NEWLINE results['ssim'].append(valing_results['ssim'])NEWLINENEWLINE writer.add_scalar('VAL/psnr', valing_results['psnr'], epoch)NEWLINE writer.add_scalar('VAL/ssim', valing_results['ssim'], epoch)NEWLINE writer.add_scalar("loss/G_loss", running_results['g_loss'] / running_results['batch_sizes'], epoch)NEWLINE writer.add_scalar("loss/D_loss", running_results['d_loss'] / running_results['batch_sizes'], epoch)NEWLINENEWLINE writer.flush()NEWLINE writer.close()NEWLINE end = time.time()NEWLINE print('time elapsed: ', end - start)NEWLINE
# coding=utf-8NEWLINE# Copyright 2020 The HuggingFace NLP Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE""" BLEU metric. """NEWLINENEWLINEimport nlpNEWLINENEWLINEfrom .nmt_bleu import compute_bleu # From: https://github.com/tensorflow/nmt/blob/master/nmt/scripts/bleu.pyNEWLINENEWLINENEWLINE_CITATION = """\NEWLINE@INPROCEEDINGS{Papineni02bleu:a,NEWLINE author = {Kishore Papineni and Salim Roukos and Todd Ward and Wei-jing Zhu},NEWLINE title = {BLEU: a Method for Automatic Evaluation of Machine Translation},NEWLINE booktitle = {},NEWLINE year = {2002},NEWLINE pages = {311--318}NEWLINE}NEWLINE@inproceedings{lin-och-2004-orange,NEWLINE title = "{ORANGE}: a Method for Evaluating Automatic Evaluation Metrics for Machine Translation",NEWLINE author = "Lin, Chin-Yew andNEWLINE Och, Franz Josef",NEWLINE booktitle = "{COLING} 2004: Proceedings of the 20th International Conference on Computational Linguistics",NEWLINE month = "aug 23{--}aug 27",NEWLINE year = "2004",NEWLINE address = "Geneva, Switzerland",NEWLINE publisher = "COLING",NEWLINE url = "https://www.aclweb.org/anthology/C04-1072",NEWLINE pages = "501--507",NEWLINE}NEWLINE"""NEWLINENEWLINE_DESCRIPTION = """\NEWLINEBLEU (bilingual evaluation understudy) is an algorithm for evaluating the quality of text which has been machine-translated from one natural language to another.NEWLINEQuality is considered to be the correspondence between a machine's output and that of a human: "the closer a machine translation is to a professional human translation,NEWLINEthe better it is" – this is the central idea behind BLEU. BLEU was one of the first metrics to claim a high correlation with human judgements of quality, andNEWLINEremains one of the most popular automated and inexpensive metrics.NEWLINENEWLINEScores are calculated for individual translated segments—generally sentences—by comparing them with a set of good quality reference translations.NEWLINEThose scores are then averaged over the whole corpus to reach an estimate of the translation's overall quality. Intelligibility or grammatical correctnessNEWLINEare not taken into account[citation needed].NEWLINENEWLINEBLEU's output is always a number between 0 and 1. This value indicates how similar the candidate text is to the reference texts, with values closer to 1NEWLINErepresenting more similar texts. Few human translations will attain a score of 1, since this would indicate that the candidate is identical to one of theNEWLINEreference translations. For this reason, it is not necessary to attain a score of 1. Because there are more opportunities to match, adding additionalNEWLINEreference translations will increase the BLEU score.NEWLINE"""NEWLINENEWLINE_KWARGS_DESCRIPTION = """NEWLINEComputes BLEU score of translated segments against one or more references.NEWLINEArgs:NEWLINE predictions: list of translations to score.NEWLINE Each translation should be tokenized into a list of tokens.NEWLINE references: list of lists of references for each translation.NEWLINE Each reference should be tokenized into a list of tokens.NEWLINE max_order: Maximum n-gram order to use when computing BLEU score.NEWLINE smooth: Whether or not to apply Lin et al. 2004 smoothing.NEWLINEReturns:NEWLINE 'bleu': bleu score,NEWLINE 'precisions': geometric mean of n-gram precisions,NEWLINE 'brevity_penalty': brevity penalty,NEWLINE 'length_ratio': ratio of lengths,NEWLINE 'translation_length': translation_length,NEWLINE 'reference_length': reference_lengthNEWLINE"""NEWLINENEWLINEclass Bleu(nlp.Metric):NEWLINE def _info(self):NEWLINE return nlp.MetricInfo(NEWLINE description=_DESCRIPTION,NEWLINE citation=_CITATION,NEWLINE inputs_description=_KWARGS_DESCRIPTION,NEWLINE features=nlp.Features({NEWLINE 'predictions': nlp.Sequence(nlp.Value('string', id='token'), id='sequence'),NEWLINE 'references': nlp.Sequence(nlp.Sequence(nlp.Value('string', id='token'), id='sequence'), id='references'),NEWLINE }),NEWLINE codebase_urls=["https://github.com/tensorflow/nmt/blob/master/nmt/scripts/bleu.py"],NEWLINE reference_urls=["https://en.wikipedia.org/wiki/BLEU",NEWLINE "https://towardsdatascience.com/evaluating-text-output-in-nlp-bleu-at-your-own-risk-e8609665a213"]NEWLINE )NEWLINENEWLINE def _compute(self, predictions, references, max_order=4, smooth=False):NEWLINE score = compute_bleu(reference_corpus=references, translation_corpus=predictions, max_order=max_order, smooth=smooth)NEWLINE (bleu, precisions, bp, ratio, translation_length, reference_length) = scoreNEWLINE return {'bleu': bleu,NEWLINE 'precisions': precisions,NEWLINE 'brevity_penalty': bp,NEWLINE 'length_ratio': ratio,NEWLINE 'translation_length': translation_length,NEWLINE 'reference_length': reference_length}NEWLINE
# Copyright 2020 Google LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# https://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE""" Helper functions to create test resources.NEWLINENEWLINE"""NEWLINEfrom vm_network_migration_end_to_end_tests.utils import *NEWLINENEWLINENEWLINEclass TestResourceCreator:NEWLINE def __init__(self, google_api_interface):NEWLINE self.google_api_interface = google_api_interfaceNEWLINE self.legacy_network_name = 'end-to-end-test-legacy-network'NEWLINE self.network_name = 'end-to-end-test-vpc-network'NEWLINE self.subnetwork_name = 'end-to-end-test-subnet-network'NEWLINE try:NEWLINE self.legacy_network_selfLink = \NEWLINE self.google_api_interface.get_network(self.legacy_network_name)[NEWLINE 'selfLink']NEWLINE except:NEWLINE self.legacy_network_selfLink = \NEWLINE self.google_api_interface.create_legacy_network(NEWLINE 'end-to-end-test-legacy-network')['targetLink']NEWLINE try:NEWLINE self.network_selfLink = \NEWLINE self.google_api_interface.get_network(self.network_name)[NEWLINE 'selfLink']NEWLINE except:NEWLINE self.network_selfLink = self.google_api_interface.create_non_auto_network(NEWLINE self.network_name)NEWLINE try:NEWLINE self.subnetwork_selfLink = \NEWLINE self.google_api_interface.get_subnetwork(self.subnetwork_name)[NEWLINE 'selfLink']NEWLINE except:NEWLINE self.subnetwork_selfLink = self.google_api_interface.create_subnetwork_using_random_ip_range(NEWLINE self.subnetwork_name, self.network_selfLink)['targetLink']NEWLINE print('Created subnetwork: ', self.subnetwork_selfLink)NEWLINENEWLINE self.legacy_template_name = 'end-to-end-test-legacy-template'NEWLINE try:NEWLINE self.legacy_instance_template_selfLink = \NEWLINE self.google_api_interface.get_instance_template_body(NEWLINE self.legacy_template_name)['selfLink']NEWLINE except:NEWLINE self.legacy_instance_template_selfLink = \NEWLINE self.create_instance_template(NEWLINE 'sample_instance_template.json',NEWLINE 'end-to-end-test-legacy-template')[NEWLINE 'targetLink']NEWLINENEWLINE self.healthcheck_name = 'end-to-end-test-tcp-80-health-check'NEWLINE try:NEWLINE self.tcp_80_health_check_selfLink = \NEWLINE self.google_api_interface.get_healthcheck(NEWLINE self.healthcheck_name)[NEWLINE 'selfLink']NEWLINENEWLINE except:NEWLINE self.tcp_80_health_check_selfLink = \NEWLINE self.create_tcp_80_health_check(self.healthcheck_name)[NEWLINE 'targetLink']NEWLINENEWLINE def create_tcp_80_health_check(self, healthcheck_name):NEWLINENEWLINE config = {NEWLINE "name": healthcheck_name,NEWLINE "description": "",NEWLINE "checkIntervalSec": 5,NEWLINE "timeoutSec": 5,NEWLINE "unhealthyThreshold": 2,NEWLINE "healthyThreshold": 2,NEWLINE "type": "TCP",NEWLINE "tcpHealthCheck": {NEWLINE "port": 80,NEWLINE "proxyHeader": "NONE"NEWLINE },NEWLINE "kind": "compute#healthCheck"NEWLINE }NEWLINE return self.google_api_interface.add_healthcheck(config)NEWLINENEWLINE def create_instance_template(self, instance_template_file,NEWLINE instance_template_name):NEWLINE config = read_json_file(NEWLINE instance_template_file)NEWLINE config['name'] = instance_template_nameNEWLINE config['properties']['networkInterfaces'][0][NEWLINE 'network'] = self.legacy_network_selfLinkNEWLINENEWLINE return self.google_api_interface.create_instance_template(config)NEWLINENEWLINE def add_additional_disk_to_instance(self, instance_name, disk_name,NEWLINE disk_file):NEWLINE disk_config = read_json_file(NEWLINE disk_file)NEWLINE disk_config['name'] = disk_nameNEWLINE disk_selfLink = self.google_api_interface.create_disk(disk_config)[NEWLINE 'targetLink']NEWLINE self.google_api_interface.attach_disk(instance_name, disk_selfLink)NEWLINENEWLINE def create_instance_using_template(self, instance_name, template_selfLink):NEWLINE instance_configs = {NEWLINE "name": instance_nameNEWLINE }NEWLINE return self.google_api_interface.create_instance(instance_configs,NEWLINE template_selfLink)NEWLINENEWLINE def create_unmanaged_instance_group(self,NEWLINE unmanaged_instance_group_name,NEWLINE list_of_instance_names):NEWLINE unmanaged_instance_group_configs = {NEWLINE "name": unmanaged_instance_group_name,NEWLINE "description": ""NEWLINE }NEWLINENEWLINE return self.google_api_interface.create_unmanaged_instance_group_with_instances(NEWLINE unmanaged_instance_group_configs, list_of_instance_names)NEWLINENEWLINE def create_regional_managed_instance_group(self, instance_template_selfLink,NEWLINE group_name,NEWLINE managed_instance_group_file_name,NEWLINE autoscaler_file_name=None):NEWLINE managed_instance_group_configs = read_json_file(NEWLINE managed_instance_group_file_name)NEWLINENEWLINE managed_instance_group_configs[NEWLINE 'instanceTemplate'] = instance_template_selfLinkNEWLINE managed_instance_group_configs['name'] = group_nameNEWLINE operation = self.google_api_interface.create_multi_zone_managed_instance_group(NEWLINE managed_instance_group_configs)NEWLINE instance_group_selfLink = operation['targetLink']NEWLINE if autoscaler_file_name != None:NEWLINE autoscaler_configs = read_json_file(autoscaler_file_name)NEWLINE autoscaler_configs['target'] = instance_group_selfLinkNEWLINE autoscaler_configs['name'] = group_nameNEWLINE self.google_api_interface.create_region_autoscaler(NEWLINE autoscaler_configs)NEWLINE return operationNEWLINENEWLINE def create_target_pool_with_health_check(self, target_pool_file_name,NEWLINE target_pool_name,NEWLINE instance_group_name_list,NEWLINE instance_selfLinks,NEWLINE health_check_selfLink=None):NEWLINE target_pool_configs = read_json_file(target_pool_file_name)NEWLINE target_pool_configs['name'] = target_pool_nameNEWLINE if health_check_selfLink != None:NEWLINE target_pool_configs['healthChecks'] = [health_check_selfLink]NEWLINE operation = \NEWLINE self.google_api_interface.create_target_pool(target_pool_configs)NEWLINE target_pool_selfLink = operation['targetLink']NEWLINE for regional_instance_group in instance_group_name_list:NEWLINE self.google_api_interface.regional_instance_group_set_target_pool(NEWLINE regional_instance_group,NEWLINE target_pool_selfLink)NEWLINE for instance_selfLink in instance_selfLinks:NEWLINE self.google_api_interface.add_instances_to_target_pool(NEWLINE target_pool_configs[NEWLINE 'name'],NEWLINE instance_selfLink)NEWLINE return operationNEWLINENEWLINE def create_global_backend_service(self, backend_service_file_name,NEWLINE backend_service_name,NEWLINE instance_group_selfLinks):NEWLINE backend_service_configs = read_json_file(backend_service_file_name)NEWLINE backend_service_configs['name'] = backend_service_nameNEWLINE backend_service_configs['healthChecks'] = [NEWLINE self.tcp_80_health_check_selfLink]NEWLINE for instance_group_selfLink in instance_group_selfLinks:NEWLINE backend_service_configs['backends'].append({NEWLINE "description": "",NEWLINE "group": instance_group_selfLink,NEWLINE "balancingMode": "UTILIZATION",NEWLINE "maxUtilization": 0.8,NEWLINE "capacityScaler": 1NEWLINE })NEWLINE return self.google_api_interface.create_global_backend_service(NEWLINE backend_service_configs)NEWLINENEWLINE def create_regional_backend_service(self, backend_service_file_name,NEWLINE backend_service_name,NEWLINE instance_group_selfLinks):NEWLINE backend_service_configs = read_json_file(backend_service_file_name)NEWLINE backend_service_configs['name'] = backend_service_nameNEWLINE backend_service_configs['healthChecks'] = [NEWLINE self.tcp_80_health_check_selfLink]NEWLINE for instance_group_selfLink in instance_group_selfLinks:NEWLINE backend_service_configs['backends'].append({NEWLINE "description": "",NEWLINE "group": instance_group_selfLink,NEWLINE "balancingMode": "CONNECTION"NEWLINE })NEWLINE backend_service_configs['network'] = self.legacy_network_selfLinkNEWLINE return self.google_api_interface.create_regional_backend_service(NEWLINE backend_service_configs)NEWLINENEWLINE def create_urlmapping(self, url_mapping_name, backend_service_selfLink):NEWLINE url_configs = {NEWLINE "name": url_mapping_name,NEWLINE "defaultService": backend_service_selfLink,NEWLINE "kind": "compute#urlMap"NEWLINE }NEWLINE return self.google_api_interface.create_urlmapping(url_configs)NEWLINENEWLINE def create_urlmapping_using_two_backend_service(self, url_mapping_name,NEWLINE backend_service_selfLinks):NEWLINE url_configs = {NEWLINE "name": url_mapping_name,NEWLINE "hostRules": [NEWLINE {NEWLINE "hosts": [NEWLINE "www.example.come"NEWLINE ],NEWLINE "pathMatcher": "path-matcher-1"NEWLINE }NEWLINE ],NEWLINE "pathMatchers": [NEWLINE {NEWLINE "name": "path-matcher-1",NEWLINE "defaultService": backend_service_selfLinks[1],NEWLINE "pathRules": [NEWLINE {NEWLINE "service": backend_service_selfLinks[1],NEWLINE "paths": [NEWLINE "/test/*"NEWLINE ]NEWLINE }NEWLINE ]NEWLINE }NEWLINE ],NEWLINE "defaultService": backend_service_selfLinks[0],NEWLINE }NEWLINE return self.google_api_interface.create_urlmapping(url_configs)NEWLINENEWLINE def create_http_target_proxy(self, target_proxy_name, urlmapping_selfLink):NEWLINE return self.google_api_interface.create_http_proxy(target_proxy_name,NEWLINE urlmapping_selfLink)NEWLINENEWLINE def create_global_forwarding_rule_with_target(self,NEWLINE forwarding_rule_file_name,NEWLINE forwarding_rule_name,NEWLINE target_selfLink,NEWLINE network_selfLink=None):NEWLINE forwarding_rule_configs = read_json_file(forwarding_rule_file_name)NEWLINE forwarding_rule_configs['name'] = forwarding_rule_nameNEWLINE forwarding_rule_configs['target'] = target_selfLinkNEWLINE if network_selfLink != None:NEWLINE forwarding_rule_configs['network'] = network_selfLinkNEWLINE return self.google_api_interface.create_global_forwarding_rule(NEWLINE forwarding_rule_configs)NEWLINENEWLINE def create_global_forwarding_rule_with_backend_service(self,NEWLINE forwarding_rule_file_name,NEWLINE forwarding_rule_name,NEWLINE backend_service_selfLink):NEWLINE forwarding_rule_configs = read_json_file(forwarding_rule_file_name)NEWLINE forwarding_rule_configs['name'] = forwarding_rule_nameNEWLINE forwarding_rule_configs['backendService'] = backend_service_selfLinkNEWLINE return self.google_api_interface.create_global_forwarding_rule(NEWLINE forwarding_rule_configs)NEWLINENEWLINE def create_regional_forwarding_rule_with_target(self,NEWLINE forwarding_rule_file_name,NEWLINE forwarding_rule_name,NEWLINE target_selfLink):NEWLINE forwarding_rule_configs = read_json_file(forwarding_rule_file_name)NEWLINE forwarding_rule_configs['name'] = forwarding_rule_nameNEWLINE forwarding_rule_configs['target'] = target_selfLinkNEWLINE if 'backendService' in forwarding_rule_configs:NEWLINE del forwarding_rule_configs['backendService']NEWLINE forwarding_rule_configs['network'] = self.legacy_network_selfLinkNEWLINE return self.google_api_interface.create_regional_forwarding_rule(NEWLINE forwarding_rule_configs)NEWLINENEWLINE def create_regional_forwarding_rule_with_backend_service(self,NEWLINE forwarding_rule_file_name,NEWLINE forwarding_rule_name,NEWLINE backend_service_selfLink):NEWLINE forwarding_rule_configs = read_json_file(forwarding_rule_file_name)NEWLINE forwarding_rule_configs['name'] = forwarding_rule_nameNEWLINE forwarding_rule_configs['backendService'] = backend_service_selfLinkNEWLINE forwarding_rule_configs['network'] = self.legacy_network_selfLinkNEWLINE return self.google_api_interface.create_regional_forwarding_rule(NEWLINE forwarding_rule_configs)NEWLINENEWLINE def create_a_target_instance(self, target_instance_name, instance_selfLink):NEWLINE target_instance_configs = {NEWLINE "name": target_instance_name,NEWLINE "description": "",NEWLINE "natPolicy": "NO_NAT",NEWLINE "instance": instance_selfLinkNEWLINE }NEWLINE return self.google_api_interface.create_target_instance(NEWLINE target_instance_configs)NEWLINE
# GENERATED BY KOMAND SDK - DO NOT EDITNEWLINEfrom .action import SketchifyNEWLINE
from django_asservio_core.models import (NEWLINE CodeDictionary, NameDictionary,NEWLINE Dictionary, DescriptionDictionaryNEWLINE)NEWLINENEWLINENEWLINEclass Code(CodeDictionary):NEWLINE """Code dictionary."""NEWLINE passNEWLINENEWLINENEWLINEclass Name(NameDictionary):NEWLINE """Name dictionary."""NEWLINE passNEWLINENEWLINENEWLINEclass Description(DescriptionDictionary):NEWLINE """Description dictionary."""NEWLINE passNEWLINENEWLINENEWLINEclass Info(Dictionary):NEWLINE """Regular dictionary."""NEWLINE passNEWLINE
# This file is part of the Astrometry.net suite.NEWLINE# Licensed under a 3-clause BSD style license - see LICENSENEWLINENEWLINE# Generates FITS tables from CSV lists of OpenNGC entries and names.NEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport csvNEWLINENEWLINEfrom astrometry.util.fits import fits_tableNEWLINEimport numpy as npNEWLINENEWLINENEWLINEdef convert_openngc_entries():NEWLINE entries = []NEWLINENEWLINE with open('openngc-entries.csv') as f:NEWLINE for is_ngc, num, ra, dec, size in csv.reader(f, delimiter=';'):NEWLINE is_ngc = (is_ngc == '1')NEWLINE num = int(num)NEWLINE ra = float(ra) if ra else 0.0NEWLINE dec = float(dec) if dec else 0.0NEWLINENEWLINE # Convert from diameter in arcmins to radius in degrees.NEWLINE radius = float(size) / (2.0 * 60.0) if size else 0.0NEWLINENEWLINE entries.append({NEWLINE 'is_ngc': is_ngc,NEWLINE 'ra': ra,NEWLINE 'dec': dec,NEWLINE 'radius': radius,NEWLINE 'num': num,NEWLINE })NEWLINENEWLINE T = fits_table()NEWLINE for key in ['is_ngc', 'ra', 'dec', 'radius', 'num']:NEWLINE T.set(key, [x[key] for x in entries])NEWLINENEWLINE T.to_np_arrays()NEWLINENEWLINE T.name = np.array(['NGC %i' % n if isngc else 'IC %i' % nNEWLINE for n, isngc in zip(T.num, T.is_ngc)])NEWLINENEWLINE for key in ['ra', 'dec', 'radius']:NEWLINE T.set(key, T.get(key).astype(np.float32))NEWLINE T.num = T.num.astype(np.int16)NEWLINENEWLINE units_dict = {NEWLINE 'ra': 'deg',NEWLINE 'dec': 'deg',NEWLINE 'radius': 'deg',NEWLINE }NEWLINENEWLINE NGC = T[T.is_ngc]NEWLINE NGC.rename('num', 'ngcnum')NEWLINE NGC.delete_column('is_ngc')NEWLINE units = [units_dict.get(c, '') for c in NGC.get_columns()]NEWLINE NGC.writeto('openngc-ngc.fits', units=units)NEWLINENEWLINE IC = T[np.logical_not(T.is_ngc)]NEWLINE IC.rename('num', 'icnum')NEWLINE IC.delete_column('is_ngc')NEWLINE units = [units_dict.get(c, '') for c in IC.get_columns()]NEWLINE IC.writeto('openngc-ic.fits', units=units)NEWLINENEWLINENEWLINEdef convert_openngc_names():NEWLINE names = []NEWLINENEWLINE with open('openngc-names.csv') as f:NEWLINE for is_ngc, num, name in csv.reader(f, delimiter=';'):NEWLINENEWLINE is_ngc = bool(is_ngc)NEWLINENEWLINE num = int(num)NEWLINENEWLINE identifier = '%s%d' % ('' if is_ngc else 'I', num)NEWLINENEWLINE names.append({NEWLINE 'Object': name,NEWLINE 'Name': identifier,NEWLINE })NEWLINENEWLINE T = fits_table()NEWLINE for key in ['Object', 'Name']:NEWLINE T.set(key, [x[key] for x in names])NEWLINE T.writeto('openngc-names.fits')NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE convert_openngc_entries()NEWLINE convert_openngc_names()NEWLINE
import reNEWLINENEWLINE_TESTS = [NEWLINE "//REL/RELIDO",NEWLINE "TS//SI-G/TK//RS/OC/NF",NEWLINE "TS//SI-ABC-DEF//OC/NF",NEWLINE "TS//SI-G ABCD EFGH-XYZ//OC/NF",NEWLINE "TS//ANB/SI/TK/XNB//NF",NEWLINE "TS//SAR-BP-123/CA-XYZ YYY//NF",NEWLINE "TS//RD-CNWDI//NF",NEWLINE "S//FRD-SIGMA 14 18//REL",NEWLINE "//CTS//BOHEMIA",NEWLINE "//DEU S//NF",NEWLINE "//NS//ATOMAL//OC",NEWLINE "//JOINT S//REL",NEWLINE "TS//FGI DEU GBR//REL TO USA, DEU, GBR",NEWLINE "//FGI S//NF",NEWLINE "S//NF",NEWLINE "S//NF/PR",NEWLINE "U//SSI",NEWLINE]NEWLINENEWLINE_PATTERN = "^(U?|C|(S|TS)?(\/\/(((\w|\-)+)(\s(\w|\-)+)*)((\/(\w|\-)+)(\s(\w|\-)+)*)*)?)\/\/((((\w|\-)+)|(REL( TO ((\w|\-)+)(,\s?((\w|\-)+))*)?))((\/((\w|\-)+)|(REL( TO ((\w|\-)+)(,(\w|\-)+)*)?))*))$"NEWLINENEWLINEdef main():NEWLINE prog = re.compile(_PATTERN)NEWLINE for s in _TESTS:NEWLINE result = prog.match(s)NEWLINE print(s + " " + str(result))NEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINENEWLINENEWLINE
#!/usr/bin/env pythonNEWLINE#-*- coding:utf-8 -*-NEWLINE# Author: Donny You([email protected])NEWLINE# Evaluation of cityscape.NEWLINENEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport fnmatchNEWLINEimport argparseNEWLINEimport platformNEWLINEimport sysNEWLINENEWLINEtry:NEWLINE from itertools import izipNEWLINEexcept ImportError:NEWLINE izip = zipNEWLINENEWLINE# Cityscapes importsNEWLINEfrom val.scripts.seg.cityscape.evaluation.csHelpers import *NEWLINENEWLINE# C SupportNEWLINE# Enable the cython support for faster evaluation, this is necessary for speeding up your model resultsNEWLINE# Only tested for Ubuntu 64bit OSNEWLINECSUPPORT = TrueNEWLINE# Check if C-Support is available for better performanceNEWLINEif CSUPPORT:NEWLINE try:NEWLINE import val.scripts.seg.cityscape.evaluation.addToConfusionMatrix as addToConfusionMatrixNEWLINE except:NEWLINE CSUPPORT = FalseNEWLINENEWLINENEWLINE# A class to collect all bunch of dataNEWLINEclass CArgs(object):NEWLINE def __init__(self, data_path=None, out_path=None, predict_path=None):NEWLINE # Where to look for Cityscapes, note that data path is equal to gt pathNEWLINE if 'CITYSCAPES_DATASET' in os.environ:NEWLINE self.cityscapesPath = os.environ['CITYSCAPES_DATASET']NEWLINE else:NEWLINE self.cityscapesPath = os.path.join(data_path)NEWLINENEWLINE if 'CITYSCAPES_EXPORT_DIR' in os.environ:NEWLINE export_dir = os.environ['CITYSCAPES_EXPORT_DIR']NEWLINE if not os.path.isdir(export_dir):NEWLINE raise ValueError("CITYSCAPES_EXPORT_DIR {} is not a directory".format(export_dir))NEWLINE self.exportFile = "{}/resultPixelLevelSemanticLabeling.json".format(export_dir)NEWLINE else:NEWLINE self.exportFile = os.path.join(out_path, "evaluationResults", "resultPixelLevelSemanticLabeling.json")NEWLINE # Parameters that should be modified by userNEWLINE self.groundTruthSearch = os.path.join( self.cityscapesPath, "*", "*_gtFine_labelIds.png" )NEWLINENEWLINE # Remaining paramsNEWLINE self.evalInstLevelScore = TrueNEWLINE self.evalPixelAccuracy = FalseNEWLINE self.evalLabels = []NEWLINE self.printRow = 5NEWLINE self.normalized = TrueNEWLINE self.colorized = hasattr(sys.stderr, "isatty") and sys.stderr.isatty() and platform.system()=='Linux'NEWLINE self.bold = colors.BOLD if self.colorized else ""NEWLINE self.nocol = colors.ENDC if self.colorized else ""NEWLINE self.JSONOutput = TrueNEWLINE self.quiet = FalseNEWLINENEWLINE self.avgClassSize = {NEWLINE "bicycle" : 4672.3249222261 ,NEWLINE "caravan" : 36771.8241758242 ,NEWLINE "motorcycle" : 6298.7200839748 ,NEWLINE "rider" : 3930.4788056518 ,NEWLINE "bus" : 35732.1511111111 ,NEWLINE "train" : 67583.7075812274 ,NEWLINE "car" : 12794.0202738185 ,NEWLINE "person" : 3462.4756337644 ,NEWLINE "truck" : 27855.1264367816 ,NEWLINE "trailer" : 16926.9763313609 ,NEWLINE }NEWLINENEWLINE # store some parameters for finding predictions in the args variableNEWLINE # the values are filled when the method getPrediction is first calledNEWLINE self.predictionPath = predict_pathNEWLINE self.predictionWalk = NoneNEWLINENEWLINENEWLINE## method partNEWLINEdef getPrediction( args, groundTruthFile ):NEWLINE # determine the prediction path, if the method is first calledNEWLINE if not args.predictionPath:NEWLINE rootPath = NoneNEWLINE if 'CITYSCAPES_RESULTS' in os.environ:NEWLINE rootPath = os.environ['CITYSCAPES_RESULTS']NEWLINE elif 'CITYSCAPES_DATASET' in os.environ:NEWLINE rootPath = os.path.join( os.environ['CITYSCAPES_DATASET'] , "results" )NEWLINE else:NEWLINE rootPath = os.path.join(os.path.dirname(os.path.realpath(__file__)),'..','..','results')NEWLINENEWLINE if not os.path.isdir(rootPath):NEWLINE printError("Could not find a result root folder. Please read the instructions of this method.")NEWLINENEWLINE args.predictionPath = rootPathNEWLINENEWLINE # walk the prediction path, if not happened yetNEWLINE if not args.predictionWalk:NEWLINE walk = []NEWLINE for root, dirnames, filenames in os.walk(args.predictionPath):NEWLINE walk.append( (root,filenames) )NEWLINE args.predictionWalk = walkNEWLINENEWLINE csFile = getCsFileInfo(groundTruthFile)NEWLINE filePattern = "{}_{}_{}*.png".format( csFile.city , csFile.sequenceNb , csFile.frameNb )NEWLINENEWLINE predictionFile = NoneNEWLINE for root, filenames in args.predictionWalk:NEWLINE for filename in fnmatch.filter(filenames, filePattern):NEWLINE if not predictionFile:NEWLINE predictionFile = os.path.join(root, filename)NEWLINE else:NEWLINE printError("Found multiple predictions for ground truth {}".format(groundTruthFile))NEWLINENEWLINE if not predictionFile:NEWLINE printError("Found no prediction for ground truth {}".format(groundTruthFile))NEWLINENEWLINE return predictionFileNEWLINENEWLINE# Generate empty confusion matrix and create list of relevant labelsNEWLINEdef generateMatrix(args):NEWLINE args.evalLabels = []NEWLINE for label in labels:NEWLINE if (label.id < 0):NEWLINE continueNEWLINE # we append all found labels, regardless of being ignoredNEWLINE args.evalLabels.append(label.id)NEWLINE maxId = max(args.evalLabels)NEWLINE # We use longlong type to be sure that there are no overflowsNEWLINE return np.zeros(shape=(maxId + 1, maxId + 1), dtype=np.ulonglong)NEWLINENEWLINENEWLINEdef generateInstanceStats(args):NEWLINE instanceStats = {}NEWLINE instanceStats["classes"] = {}NEWLINE instanceStats["categories"] = {}NEWLINE for label in labels:NEWLINE if label.hasInstances and not label.ignoreInEval:NEWLINE instanceStats["classes"][label.name] = {}NEWLINE instanceStats["classes"][label.name]["tp"] = 0.0NEWLINE instanceStats["classes"][label.name]["tpWeighted"] = 0.0NEWLINE instanceStats["classes"][label.name]["fn"] = 0.0NEWLINE instanceStats["classes"][label.name]["fnWeighted"] = 0.0NEWLINE for category in category2labels:NEWLINE labelIds = []NEWLINE allInstances = TrueNEWLINE for label in category2labels[category]:NEWLINE if label.id < 0:NEWLINE continueNEWLINE if not label.hasInstances:NEWLINE allInstances = FalseNEWLINE breakNEWLINE labelIds.append(label.id)NEWLINE if not allInstances:NEWLINE continueNEWLINENEWLINE instanceStats["categories"][category] = {}NEWLINE instanceStats["categories"][category]["tp"] = 0.0NEWLINE instanceStats["categories"][category]["tpWeighted"] = 0.0NEWLINE instanceStats["categories"][category]["fn"] = 0.0NEWLINE instanceStats["categories"][category]["fnWeighted"] = 0.0NEWLINE instanceStats["categories"][category]["labelIds"] = labelIdsNEWLINENEWLINE return instanceStatsNEWLINENEWLINENEWLINE# Get absolute or normalized value from field in confusion matrix.NEWLINEdef getMatrixFieldValue(confMatrix, i, j, args):NEWLINE if args.normalized:NEWLINE rowSum = confMatrix[i].sum()NEWLINE if (rowSum == 0):NEWLINE return float('nan')NEWLINE return float(confMatrix[i][j]) / rowSumNEWLINE else:NEWLINE return confMatrix[i][j]NEWLINENEWLINENEWLINE# Calculate and return IOU score for a particular labelNEWLINEdef getIouScoreForLabel(label, confMatrix, args):NEWLINE if id2label[label].ignoreInEval:NEWLINE return float('nan')NEWLINENEWLINE # the number of true positive pixels for this labelNEWLINE # the entry on the diagonal of the confusion matrixNEWLINE tp = np.longlong(confMatrix[label, label])NEWLINENEWLINE # the number of false negative pixels for this labelNEWLINE # the row sum of the matching row in the confusion matrixNEWLINE # minus the diagonal entryNEWLINE fn = np.longlong(confMatrix[label, :].sum()) - tpNEWLINENEWLINE # the number of false positive pixels for this labelsNEWLINE # Only pixels that are not on a pixel with ground truth label that is ignoredNEWLINE # The column sum of the corresponding column in the confusion matrixNEWLINE # without the ignored rows and without the actual label of interestNEWLINE notIgnored = [l for l in args.evalLabels if not id2label[l].ignoreInEval and not l == label]NEWLINE fp = np.longlong(confMatrix[notIgnored, label].sum())NEWLINENEWLINE # the denominator of the IOU scoreNEWLINE denom = (tp + fp + fn)NEWLINE if denom == 0:NEWLINE return float('nan')NEWLINENEWLINE # return IOUNEWLINE return float(tp) / denomNEWLINENEWLINENEWLINE# Calculate and return IOU score for a particular labelNEWLINEdef getInstanceIouScoreForLabel(label, confMatrix, instStats, args):NEWLINE if id2label[label].ignoreInEval:NEWLINE return float('nan')NEWLINENEWLINE labelName = id2label[label].nameNEWLINE if not labelName in instStats["classes"]:NEWLINE return float('nan')NEWLINENEWLINE tp = instStats["classes"][labelName]["tpWeighted"]NEWLINE fn = instStats["classes"][labelName]["fnWeighted"]NEWLINE # false postives computed as aboveNEWLINE notIgnored = [l for l in args.evalLabels if not id2label[l].ignoreInEval and not l == label]NEWLINE fp = np.longlong(confMatrix[notIgnored, label].sum())NEWLINENEWLINE # the denominator of the IOU scoreNEWLINE denom = (tp + fp + fn)NEWLINE if denom == 0:NEWLINE return float('nan')NEWLINENEWLINE # return IOUNEWLINE return float(tp) / denomNEWLINENEWLINENEWLINE# Calculate prior for a particular class id.NEWLINEdef getPrior(label, confMatrix):NEWLINE return float(confMatrix[label, :].sum()) / confMatrix.sum()NEWLINENEWLINENEWLINE# Get average of scores.NEWLINE# Only computes the average over valid entries.NEWLINEdef getScoreAverage(scoreList, args):NEWLINE validScores = 0NEWLINE scoreSum = 0.0NEWLINE for score in scoreList:NEWLINE if not math.isnan(scoreList[score]):NEWLINE validScores += 1NEWLINE scoreSum += scoreList[score]NEWLINE if validScores == 0:NEWLINE return float('nan')NEWLINE return scoreSum / validScoresNEWLINENEWLINENEWLINE# Calculate and return IOU score for a particular categoryNEWLINEdef getIouScoreForCategory(category, confMatrix, args):NEWLINE # All labels in this categoryNEWLINE labels = category2labels[category]NEWLINE # The IDs of all valid labels in this categoryNEWLINE labelIds = [label.id for label in labels if not label.ignoreInEval and label.id in args.evalLabels]NEWLINE # If there are no valid labels, then return NaNNEWLINE if not labelIds:NEWLINE return float('nan')NEWLINENEWLINE # the number of true positive pixels for this categoryNEWLINE # this is the sum of all entries in the confusion matrixNEWLINE # where row and column belong to a label ID of this categoryNEWLINE tp = np.longlong(confMatrix[labelIds, :][:, labelIds].sum())NEWLINENEWLINE # the number of false negative pixels for this categoryNEWLINE # that is the sum of all rows of labels within this categoryNEWLINE # minus the number of true positive pixelsNEWLINE fn = np.longlong(confMatrix[labelIds, :].sum()) - tpNEWLINENEWLINE # the number of false positive pixels for this categoryNEWLINE # we count the column sum of all labels within this categoryNEWLINE # while skipping the rows of ignored labels and of labels within this categoryNEWLINE notIgnoredAndNotInCategory = [l for l in args.evalLabels ifNEWLINE not id2label[l].ignoreInEval and id2label[l].category != category]NEWLINE fp = np.longlong(confMatrix[notIgnoredAndNotInCategory, :][:, labelIds].sum())NEWLINENEWLINE # the denominator of the IOU scoreNEWLINE denom = (tp + fp + fn)NEWLINE if denom == 0:NEWLINE return float('nan')NEWLINENEWLINE # return IOUNEWLINE return float(tp) / denomNEWLINENEWLINENEWLINE# Calculate and return IOU score for a particular categoryNEWLINEdef getInstanceIouScoreForCategory(category, confMatrix, instStats, args):NEWLINE if not category in instStats["categories"]:NEWLINE return float('nan')NEWLINE labelIds = instStats["categories"][category]["labelIds"]NEWLINENEWLINE tp = instStats["categories"][category]["tpWeighted"]NEWLINE fn = instStats["categories"][category]["fnWeighted"]NEWLINENEWLINE # the number of false positive pixels for this categoryNEWLINE # same as aboveNEWLINE notIgnoredAndNotInCategory = [l for l in args.evalLabels ifNEWLINE not id2label[l].ignoreInEval and id2label[l].category != category]NEWLINE fp = np.longlong(confMatrix[notIgnoredAndNotInCategory, :][:, labelIds].sum())NEWLINENEWLINE # the denominator of the IOU scoreNEWLINE denom = (tp + fp + fn)NEWLINE if denom == 0:NEWLINE return float('nan')NEWLINENEWLINE # return IOUNEWLINE return float(tp) / denomNEWLINENEWLINENEWLINE# create a dictionary containing all relevant resultsNEWLINEdef createResultDict(confMatrix, classScores, classInstScores, categoryScores, categoryInstScores,NEWLINE perImageStats, args):NEWLINE # write JSON result fileNEWLINE wholeData = {}NEWLINE wholeData["confMatrix"] = confMatrix.tolist()NEWLINE wholeData["priors"] = {}NEWLINE wholeData["labels"] = {}NEWLINE for label in args.evalLabels:NEWLINE wholeData["priors"][id2label[label].name] = getPrior(label, confMatrix)NEWLINE wholeData["labels"][id2label[label].name] = labelNEWLINE wholeData["classScores"] = classScoresNEWLINE wholeData["classInstScores"] = classInstScoresNEWLINE wholeData["categoryScores"] = categoryScoresNEWLINE wholeData["categoryInstScores"] = categoryInstScoresNEWLINE wholeData["averageScoreClasses"] = getScoreAverage(classScores, args)NEWLINE wholeData["averageScoreInstClasses"] = getScoreAverage(classInstScores, args)NEWLINE wholeData["averageScoreCategories"] = getScoreAverage(categoryScores, args)NEWLINE wholeData["averageScoreInstCategories"] = getScoreAverage(categoryInstScores, args)NEWLINENEWLINE if perImageStats:NEWLINE wholeData["perImageScores"] = perImageStatsNEWLINENEWLINE return wholeDataNEWLINENEWLINENEWLINEdef writeJSONFile(wholeData, args):NEWLINE path = os.path.dirname(args.exportFile)NEWLINE ensurePath(path)NEWLINE writeDict2JSON(wholeData, args.exportFile)NEWLINENEWLINENEWLINE# Print confusion matrixNEWLINEdef printConfMatrix(confMatrix, args):NEWLINE # print lineNEWLINE print("\b{text:{fill}>{width}}".format(width=15, fill='-', text=" "), end=' ')NEWLINE for label in args.evalLabels:NEWLINE print("\b{text:{fill}>{width}}".format(width=args.printRow + 2, fill='-', text=" "), end=' ')NEWLINE print("\b{text:{fill}>{width}}".format(width=args.printRow + 3, fill='-', text=" "))NEWLINENEWLINE # print label namesNEWLINE print("\b{text:>{width}} |".format(width=13, text=""), end=' ')NEWLINE for label in args.evalLabels:NEWLINE print("\b{text:^{width}} |".format(width=args.printRow, text=id2label[label].name[0]), end=' ')NEWLINE print("\b{text:>{width}} |".format(width=6, text="Prior"))NEWLINENEWLINE # print lineNEWLINE print("\b{text:{fill}>{width}}".format(width=15, fill='-', text=" "), end=' ')NEWLINE for label in args.evalLabels:NEWLINE print("\b{text:{fill}>{width}}".format(width=args.printRow + 2, fill='-', text=" "), end=' ')NEWLINE print("\b{text:{fill}>{width}}".format(width=args.printRow + 3, fill='-', text=" "))NEWLINENEWLINE # print matrixNEWLINE for x in range(0, confMatrix.shape[0]):NEWLINE if (not x in args.evalLabels):NEWLINE continueNEWLINE # get prior of this labelNEWLINE prior = getPrior(x, confMatrix)NEWLINE # skip if label does not exist in ground truthNEWLINE if prior < 1e-9:NEWLINE continueNEWLINENEWLINE # print nameNEWLINE name = id2label[x].nameNEWLINE if len(name) > 13:NEWLINE name = name[:13]NEWLINE print("\b{text:>{width}} |".format(width=13, text=name), end=' ')NEWLINE # print matrix contentNEWLINE for y in range(0, len(confMatrix[x])):NEWLINE if (not y in args.evalLabels):NEWLINE continueNEWLINE matrixFieldValue = getMatrixFieldValue(confMatrix, x, y, args)NEWLINE print(getColorEntry(matrixFieldValue, args) + "\b{text:>{width}.2f} ".format(width=args.printRow,NEWLINE text=matrixFieldValue) + args.nocol,NEWLINE end=' ')NEWLINE # print priorNEWLINE print(getColorEntry(prior, args) + "\b{text:>{width}.4f} ".format(width=6, text=prior) + args.nocol)NEWLINE # print lineNEWLINE print("\b{text:{fill}>{width}}".format(width=15, fill='-', text=" "), end=' ')NEWLINE for label in args.evalLabels:NEWLINE print("\b{text:{fill}>{width}}".format(width=args.printRow + 2, fill='-', text=" "), end=' ')NEWLINE print("\b{text:{fill}>{width}}".format(width=args.printRow + 3, fill='-', text=" "), end=' ')NEWLINENEWLINENEWLINE# Print intersection-over-union scores for all classes.NEWLINEdef printClassScores(scoreList, instScoreList, args):NEWLINE if (args.quiet):NEWLINE returnNEWLINE print(args.bold + "classes IoU nIoU" + args.nocol)NEWLINE print("--------------------------------")NEWLINE for label in args.evalLabels:NEWLINE if (id2label[label].ignoreInEval):NEWLINE continueNEWLINE labelName = str(id2label[label].name)NEWLINE iouStr = getColorEntry(scoreList[labelName], args) + "{val:>5.3f}".format(NEWLINE val=scoreList[labelName]) + args.nocolNEWLINE niouStr = getColorEntry(instScoreList[labelName], args) + "{val:>5.3f}".format(NEWLINE val=instScoreList[labelName]) + args.nocolNEWLINE print("{:<14}: ".format(labelName) + iouStr + " " + niouStr)NEWLINENEWLINENEWLINE# Print intersection-over-union scores for all categorys.NEWLINEdef printCategoryScores(scoreDict, instScoreDict, args):NEWLINE if (args.quiet):NEWLINE returnNEWLINE print(args.bold + "categories IoU nIoU" + args.nocol)NEWLINE print("--------------------------------")NEWLINE for categoryName in scoreDict:NEWLINE if all(label.ignoreInEval for label in category2labels[categoryName]):NEWLINE continueNEWLINE iouStr = getColorEntry(scoreDict[categoryName], args) + "{val:>5.3f}".format(NEWLINE val=scoreDict[categoryName]) + args.nocolNEWLINE niouStr = getColorEntry(instScoreDict[categoryName], args) + "{val:>5.3f}".format(NEWLINE val=instScoreDict[categoryName]) + args.nocolNEWLINE print("{:<14}: ".format(categoryName) + iouStr + " " + niouStr)NEWLINENEWLINENEWLINEclass EvalPixel():NEWLINE def __init__(self, args, predictionImgList = None, groundTruthImgList = None):NEWLINE self.args = argsNEWLINE self.predictionImgList = predictionImgListNEWLINE self.groundTruthImgList = groundTruthImgListNEWLINE if predictionImgList is None or groundTruthImgList is None:NEWLINE self.groundTruthImgList, self.predictionImgList = self.getDefaultData(self.args)NEWLINENEWLINE # evaluate image in two listsNEWLINE def evaluateImgLists(self,predictionImgList, groundTruthImgList, args):NEWLINE if len(predictionImgList) != len(groundTruthImgList):NEWLINE printError("List of images for prediction and groundtruth are not of equal size.")NEWLINE confMatrix = generateMatrix(args)NEWLINE instStats = generateInstanceStats(args)NEWLINE perImageStats = {}NEWLINE nbPixels = 0NEWLINENEWLINE if not args.quiet:NEWLINE print("Evaluating {} pairs of images...".format(len(predictionImgList)))NEWLINENEWLINE # Evaluate all pairs of images and save them into a matrixNEWLINE for i in range(len(predictionImgList)):NEWLINE predictionImgFileName = predictionImgList[i]NEWLINE groundTruthImgFileName = groundTruthImgList[i]NEWLINE # print "Evaluate ", predictionImgFileName, "<>", groundTruthImgFileNameNEWLINE nbPixels += self.evaluatePair(predictionImgFileName, groundTruthImgFileName, confMatrix, instStats,NEWLINE perImageStats, args)NEWLINENEWLINE # sanity checkNEWLINE if confMatrix.sum() != nbPixels:NEWLINE printError(NEWLINE 'Number of analyzed pixels and entries in confusion matrix disagree: contMatrix {}, pixels {}'.format(NEWLINE confMatrix.sum(), nbPixels))NEWLINENEWLINE if not args.quiet:NEWLINE print("\rImages Processed: {}".format(i + 1), end=' ')NEWLINE sys.stdout.flush()NEWLINE if not args.quiet:NEWLINE print("\n")NEWLINENEWLINE # sanity checkNEWLINE if confMatrix.sum() != nbPixels:NEWLINE printError(NEWLINE 'Number of analyzed pixels and entries in confusion matrix disagree: contMatrix {}, pixels {}'.format(NEWLINE confMatrix.sum(), nbPixels))NEWLINENEWLINE # print confusion matrixNEWLINE if (not args.quiet):NEWLINE printConfMatrix(confMatrix, args)NEWLINENEWLINE # Calculate IOU scores on class level from matrixNEWLINE classScoreList = {}NEWLINE for label in args.evalLabels:NEWLINE labelName = id2label[label].nameNEWLINE classScoreList[labelName] = getIouScoreForLabel(label, confMatrix, args)NEWLINENEWLINE # Calculate instance IOU scores on class level from matrixNEWLINE classInstScoreList = {}NEWLINE for label in args.evalLabels:NEWLINE labelName = id2label[label].nameNEWLINE classInstScoreList[labelName] = getInstanceIouScoreForLabel(label, confMatrix, instStats, args)NEWLINENEWLINE # Print IOU scoresNEWLINE if (not args.quiet):NEWLINE print("")NEWLINE print("")NEWLINE printClassScores(classScoreList, classInstScoreList, args)NEWLINE iouAvgStr = getColorEntry(getScoreAverage(classScoreList, args), args) + "{avg:5.3f}".format(NEWLINE avg=getScoreAverage(classScoreList, args)) + args.nocolNEWLINE niouAvgStr = getColorEntry(getScoreAverage(classInstScoreList, args), args) + "{avg:5.3f}".format(NEWLINE avg=getScoreAverage(classInstScoreList, args)) + args.nocolNEWLINE print("--------------------------------")NEWLINE print("Score Average : " + iouAvgStr + " " + niouAvgStr)NEWLINE print("--------------------------------")NEWLINE print("")NEWLINENEWLINE # Calculate IOU scores on category level from matrixNEWLINE categoryScoreList = {}NEWLINE for category in category2labels.keys():NEWLINE categoryScoreList[category] = getIouScoreForCategory(category, confMatrix, args)NEWLINENEWLINE # Calculate instance IOU scores on category level from matrixNEWLINE categoryInstScoreList = {}NEWLINE for category in category2labels.keys():NEWLINE categoryInstScoreList[category] = getInstanceIouScoreForCategory(category, confMatrix, instStats, args)NEWLINENEWLINE # Print IOU scoresNEWLINE if (not args.quiet):NEWLINE print("")NEWLINE printCategoryScores(categoryScoreList, categoryInstScoreList, args)NEWLINE iouAvgStr = getColorEntry(getScoreAverage(categoryScoreList, args), args) + "{avg:5.3f}".format(NEWLINE avg=getScoreAverage(categoryScoreList, args)) + args.nocolNEWLINE niouAvgStr = getColorEntry(getScoreAverage(categoryInstScoreList, args), args) + "{avg:5.3f}".format(NEWLINE avg=getScoreAverage(categoryInstScoreList, args)) + args.nocolNEWLINE print("--------------------------------")NEWLINE print("Score Average : " + iouAvgStr + " " + niouAvgStr)NEWLINE print("--------------------------------")NEWLINE print("")NEWLINENEWLINE # write result fileNEWLINE allResultsDict = createResultDict(confMatrix, classScoreList, classInstScoreList, categoryScoreList,NEWLINE categoryInstScoreList, perImageStats, args)NEWLINE writeJSONFile(allResultsDict, args)NEWLINENEWLINE # return confusion matrixNEWLINE return allResultsDictNEWLINENEWLINE # Main evaluation method. Evaluates pairs of prediction and ground truthNEWLINE # images which are passed as arguments.NEWLINE def evaluatePair(self,predictionImgFileName, groundTruthImgFileName, confMatrix, instanceStats, perImageStats, args):NEWLINE # Loading all resources for evaluation.NEWLINE try:NEWLINE predictionImg = Image.open(predictionImgFileName)NEWLINE predictionNp = np.array(predictionImg)NEWLINE except:NEWLINE printError("Unable to load " + predictionImgFileName)NEWLINE try:NEWLINE groundTruthImg = Image.open(groundTruthImgFileName)NEWLINE groundTruthNp = np.array(groundTruthImg)NEWLINE except:NEWLINE printError("Unable to load " + groundTruthImgFileName)NEWLINE # load ground truth instances, if neededNEWLINE if args.evalInstLevelScore:NEWLINE groundTruthInstanceImgFileName = groundTruthImgFileName.replace("labelIds", "instanceIds")NEWLINE try:NEWLINE instanceImg = Image.open(groundTruthInstanceImgFileName)NEWLINE instanceNp = np.array(instanceImg)NEWLINE except:NEWLINE printError("Unable to load " + groundTruthInstanceImgFileName)NEWLINENEWLINE # Check for equal image sizesNEWLINE if (predictionImg.size[0] != groundTruthImg.size[0]):NEWLINE printError(NEWLINE "Image widths of " + predictionImgFileName + " and " + groundTruthImgFileName + " are not equal.")NEWLINE if (predictionImg.size[1] != groundTruthImg.size[1]):NEWLINE printError(NEWLINE "Image heights of " + predictionImgFileName + " and " + groundTruthImgFileName + " are not equal.")NEWLINE if (len(predictionNp.shape) != 2):NEWLINE printError("Predicted image has multiple channels.")NEWLINENEWLINE imgWidth = predictionImg.size[0]NEWLINE imgHeight = predictionImg.size[1]NEWLINE nbPixels = imgWidth * imgHeightNEWLINENEWLINE # Evaluate imagesNEWLINE if (CSUPPORT):NEWLINE # using cythonNEWLINE confMatrix = addToConfusionMatrix.cEvaluatePair(predictionNp, groundTruthNp, confMatrix, args.evalLabels)NEWLINE else:NEWLINE # the slower python wayNEWLINE for (groundTruthImgPixel, predictionImgPixel) in izip(groundTruthImg.getdata(), predictionImg.getdata()):NEWLINE if (not groundTruthImgPixel in args.evalLabels):NEWLINE printError("Unknown label with id {:}".format(groundTruthImgPixel))NEWLINENEWLINE confMatrix[groundTruthImgPixel][predictionImgPixel] += 1NEWLINENEWLINE if args.evalInstLevelScore:NEWLINE # Generate category masksNEWLINE categoryMasks = {}NEWLINE for category in instanceStats["categories"]:NEWLINE categoryMasks[category] = np.in1d(predictionNp,NEWLINE instanceStats["categories"][category]["labelIds"]).reshape(NEWLINE predictionNp.shape)NEWLINENEWLINE instList = np.unique(instanceNp[instanceNp > 1000])NEWLINE for instId in instList:NEWLINE labelId = int(instId / 1000)NEWLINE label = id2label[labelId]NEWLINE if label.ignoreInEval:NEWLINE continueNEWLINENEWLINE mask = instanceNp == instIdNEWLINE instSize = np.count_nonzero(mask)NEWLINENEWLINE tp = np.count_nonzero(predictionNp[mask] == labelId)NEWLINE fn = instSize - tpNEWLINENEWLINE weight = args.avgClassSize[label.name] / float(instSize)NEWLINE tpWeighted = float(tp) * weightNEWLINE fnWeighted = float(fn) * weightNEWLINENEWLINE instanceStats["classes"][label.name]["tp"] += tpNEWLINE instanceStats["classes"][label.name]["fn"] += fnNEWLINE instanceStats["classes"][label.name]["tpWeighted"] += tpWeightedNEWLINE instanceStats["classes"][label.name]["fnWeighted"] += fnWeightedNEWLINENEWLINE category = label.categoryNEWLINE if category in instanceStats["categories"]:NEWLINE catTp = 0NEWLINE catTp = np.count_nonzero(np.logical_and(mask, categoryMasks[category]))NEWLINE catFn = instSize - catTpNEWLINENEWLINE catTpWeighted = float(catTp) * weightNEWLINE catFnWeighted = float(catFn) * weightNEWLINENEWLINE instanceStats["categories"][category]["tp"] += catTpNEWLINE instanceStats["categories"][category]["fn"] += catFnNEWLINE instanceStats["categories"][category]["tpWeighted"] += catTpWeightedNEWLINE instanceStats["categories"][category]["fnWeighted"] += catFnWeightedNEWLINENEWLINE if args.evalPixelAccuracy:NEWLINE notIgnoredLabels = [l for l in args.evalLabels if not id2label[l].ignoreInEval]NEWLINE notIgnoredPixels = np.in1d(groundTruthNp, notIgnoredLabels, invert=True).reshape(groundTruthNp.shape)NEWLINE erroneousPixels = np.logical_and(notIgnoredPixels, (predictionNp != groundTruthNp))NEWLINE perImageStats[predictionImgFileName] = {}NEWLINE perImageStats[predictionImgFileName]["nbNotIgnoredPixels"] = np.count_nonzero(notIgnoredPixels)NEWLINE perImageStats[predictionImgFileName]["nbCorrectPixels"] = np.count_nonzero(erroneousPixels)NEWLINENEWLINE return nbPixelsNEWLINENEWLINENEWLINE # launch the processNEWLINE def run(self):NEWLINE self.evaluateImgLists(self.predictionImgList, self.groundTruthImgList, self.args)NEWLINENEWLINE # get the default dataNEWLINE def getDefaultData(self, args):NEWLINE groundTruthImgList, predictionImgList = [], []NEWLINE groundTruthImgList = glob.glob(args.groundTruthSearch)NEWLINE if not groundTruthImgList:NEWLINE printError("Cannot find any ground truth images to use for evaluation. Searched for: {}".format(NEWLINE args.groundTruthSearch))NEWLINE # get the corresponding prediction for each ground truth imagNEWLINE for gt in groundTruthImgList:NEWLINE predictionImgList.append(getPrediction(args, gt))NEWLINE return groundTruthImgList, predictionImgListNEWLINENEWLINENEWLINEclass CityScapeEvaluator(object):NEWLINENEWLINE def evaluate(self, pred_dir=None, gt_dir=None):NEWLINE """NEWLINE :param pred_dir: directory of model output results(must be consistent with val directory)NEWLINE :param gt_dir: directory of cityscape data(root)NEWLINE :return:NEWLINE """NEWLINE pred_path = pred_dirNEWLINE data_path = gt_dirNEWLINE print("evaluate the result...")NEWLINE args = CArgs(data_path=data_path, out_path=data_path, predict_path=pred_path)NEWLINE ob = EvalPixel(args)NEWLINE ob.run()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE # python cityscape_evaluator.py --gt_dir ~/DataSet/CityScape/gtFine/valNEWLINE # --pred_dir ~/Projects/PyTorchCV/val/results/seg/cityscape/test_dir/image/labelNEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument('--gt_dir', default=None, type=str,NEWLINE dest='gt_dir', help='The directory of ground truth.')NEWLINE parser.add_argument('--pred_dir', default=None, type=str,NEWLINE dest='pred_dir', help='The directory of predicted labels.')NEWLINENEWLINE args = parser.parse_args()NEWLINENEWLINE cityscape_evaluator = CityScapeEvaluator()NEWLINE cityscape_evaluator.evaluate(pred_dir=args.pred_dir, gt_dir=args.gt_dir)NEWLINE
# Copyright 2021 University College London. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ==============================================================================NEWLINE"""Keras objects registry.NEWLINENEWLINEKeras Declarative maintains its own object registry. There are a few differencesNEWLINEwith respect to the Keras registry:NEWLINENEWLINE * It includes non-serializable objects such as callbacks.NEWLINE * It does not prepend package prefixes to object names.NEWLINE * It supports objects of type `ObjectConfig` as identifiers.NEWLINE"""NEWLINENEWLINEimport inspectNEWLINENEWLINEimport tensorflow as tfNEWLINENEWLINEfrom keras_declarative import config as config_moduleNEWLINEfrom keras_declarative import hyperparamsNEWLINEfrom keras_declarative import predicatesNEWLINEfrom keras_declarative import utilNEWLINENEWLINENEWLINEdef get_list(get_fn):NEWLINE """Returns a function that retrieves a list of objects.NEWLINENEWLINE Args:NEWLINE get_fn: The get function to be used for individual identifiers.NEWLINENEWLINE Returns:NEWLINE A function that retrieves an object or a list of objects.NEWLINE """NEWLINE def get_list_fn(identifier):NEWLINE """Retrieves a list of objects.NEWLINENEWLINE Args:NEWLINE identifier: An object identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A list of Keras objects as class instances.NEWLINE """NEWLINE if isinstance(identifier, list):NEWLINE return [get_fn(ident) for ident in identifier]NEWLINE return get_fn(identifier)NEWLINE return get_list_fnNEWLINENEWLINENEWLINEdef get_nest(get_fn):NEWLINE """Returns a function that retrieves a nested structure of objects.NEWLINENEWLINE Nests include lists and dictionaries.NEWLINENEWLINE Args:NEWLINE get_fn: The get function to be used for individual identifiers.NEWLINENEWLINE Returns:NEWLINE A function that retrieves an object or a list of objects.NEWLINE """NEWLINE def get_nest_fn(identifier):NEWLINE """Retrieves a nested structure of objects.NEWLINENEWLINE Args:NEWLINE identifier: An object identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A list of Keras objects as class instances.NEWLINE """NEWLINE if isinstance(identifier, hyperparams.ParamsDict):NEWLINE identifier = identifier.as_dict()NEWLINE def _parse_nest(nest):NEWLINE if is_object_config(nest):NEWLINE return get_fn(nest)NEWLINE if isinstance(nest, dict):NEWLINE return {key: _parse_nest(value) for key, value in nest.items()}NEWLINE if isinstance(nest, list):NEWLINE return [_parse_nest(value) for value in nest]NEWLINE return get_fn(nest)NEWLINE return _parse_nest(identifier)NEWLINE return get_nest_fnNEWLINENEWLINENEWLINEdef get_callback(identifier):NEWLINE """Retrieve a Keras callback as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: A callback identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A Keras callback as a class instance.NEWLINE """NEWLINE return _get(identifier, _CALLBACK_OBJECTS, 'callback')NEWLINENEWLINENEWLINEdef get_layer(identifier):NEWLINE """Retrieve a Keras layer as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: A layer identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A Keras layer as a class instance.NEWLINE """NEWLINE return _get(identifier, _LAYER_OBJECTS, 'layer')NEWLINENEWLINENEWLINEdef get_loss(identifier):NEWLINE """Retrieve a Keras loss as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: A loss identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A Keras loss as a class instance.NEWLINE """NEWLINE return _get(identifier, _LOSS_OBJECTS, 'loss')NEWLINENEWLINENEWLINEdef get_metric(identifier):NEWLINE """Retrieve a Keras metric as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: A metric identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A Keras metric as a class instance.NEWLINE """NEWLINE return _get(identifier, _METRIC_OBJECTS, 'metric')NEWLINENEWLINENEWLINEdef get_optimizer(identifier):NEWLINE """Retrieve a Keras optimizer as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: An optimizer identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A Keras optimizer as a class instance.NEWLINE """NEWLINE return _get(identifier, _OPTIMIZER_OBJECTS, 'optimizer')NEWLINENEWLINENEWLINEdef get_predicate(identifier):NEWLINE """Retrieve a predicate as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: A predicate identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A predicate as a class instance.NEWLINE """NEWLINE return _get(identifier, _PREDICATE_OBJECTS, 'predicate')NEWLINENEWLINENEWLINEdef get_strategy(identifier):NEWLINE """Retrieve a TF distribution strategy as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: A strategy identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINENEWLINE Returns:NEWLINE A TF distribution strategy as a class instance.NEWLINE """NEWLINE return _get(identifier, _STRATEGY_OBJECTS, 'strategy')NEWLINENEWLINENEWLINEdef _get(identifier, objects, objtype):NEWLINE """Retrieve an object as a class instance.NEWLINENEWLINE Args:NEWLINE identifier: An object identifier. Must be a string, a dictionary, anNEWLINE `ObjectConfig` or `None`.NEWLINE objects: A dictionary with the registered objects.NEWLINE objtype: A string with the type of object being retrieved. This is only usedNEWLINE to format error messages.NEWLINENEWLINE Returns:NEWLINE An instance of the object identified by `identifier`.NEWLINENEWLINE Raises:NEWLINE ValueError: If the identifier is invalid.NEWLINE RuntimeError: If an error occurs while initializing the object.NEWLINE """NEWLINE # If object is an external object, don't try to resolve it.NEWLINE if isinstance(identifier, util.ExternalObject):NEWLINE return identifierNEWLINENEWLINE if isinstance(identifier, config_module.ObjectConfig):NEWLINE identifier = identifier.as_dict()NEWLINENEWLINE if not identifier: # Might be `None` or an empty dict.NEWLINE return NoneNEWLINENEWLINE class_name, config = class_and_config_for_serialized_object(identifier)NEWLINENEWLINE if class_name not in objects:NEWLINE raise ValueError(f"No known {objtype} with name: {class_name}")NEWLINE obj = objects[class_name]NEWLINENEWLINE try:NEWLINE return obj(**config)NEWLINE except Exception as e:NEWLINE raise RuntimeError(NEWLINE f"An error occurred while initializing {class_name} with parameters: "NEWLINE f"{config}") from eNEWLINENEWLINENEWLINEdef class_and_config_for_serialized_object(identifier):NEWLINE """Returns the class name and config for a serialized object.NEWLINENEWLINE Args:NEWLINE identifier: An object identifier. Must be a string, a dictionary or anNEWLINE `ObjectConfig`.NEWLINENEWLINE Returns:NEWLINE A tuple containing the class name and its keyword arguments.NEWLINENEWLINE Raises:NEWLINE ValueError: If the identifier is invalid.NEWLINE """NEWLINE if isinstance(identifier, config_module.ObjectConfig):NEWLINE identifier = identifier.as_dict()NEWLINENEWLINE if isinstance(identifier, str):NEWLINE class_name, config = identifier, {}NEWLINENEWLINE elif isinstance(identifier, dict):NEWLINE if 'class_name' not in identifier or 'config' not in identifier:NEWLINE raise ValueError(NEWLINE f"Invalid identifier: {identifier}. Value is not a valid "NEWLINE f"configuration dictionary.")NEWLINE class_name = identifier['class_name']NEWLINE config = identifier['config']NEWLINENEWLINE else:NEWLINE raise ValueError(NEWLINE f"Invalid identifier: {identifier}. Value must be a string, a "NEWLINE f"dictionary or an `ObjectConfig`.")NEWLINENEWLINE return class_name, configNEWLINENEWLINENEWLINEdef is_object_config(config):NEWLINE """Check if input is a valid object configuration dict.NEWLINENEWLINE Args:NEWLINE config: The object to check.NEWLINENEWLINE Returns:NEWLINE True if input is a valid object configuration dict, false otherwise.NEWLINE """NEWLINE # A str or None are valid object configs.NEWLINE if isinstance(config, (str, type(None))):NEWLINE return TrueNEWLINENEWLINE # Otherwise, must be a dict or an object of type `ParamsDict`.NEWLINE if not isinstance(config, (dict, hyperparams.ParamsDict)):NEWLINE return FalseNEWLINENEWLINE # If a dict, must have two keys: class_name and config.NEWLINE d = config.as_dict() if isinstance(config, hyperparams.ParamsDict) else configNEWLINE if set(d.keys()) != {'class_name', 'config'}:NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINENEWLINEdef _find_objects(modules, objtype):NEWLINE """Finds objects of a certain type on the given modules.NEWLINENEWLINE Args:NEWLINE modules: A list of modules to search for objects.NEWLINE objtype: The type of objects to be searched for.NEWLINENEWLINE Returns:NEWLINE A dictionary containing the found objects.NEWLINE """NEWLINE objects = {}NEWLINE for module in modules:NEWLINE members = inspect.getmembers(module)NEWLINE for name, value in members:NEWLINE if inspect.isclass(value) and issubclass(value, objtype):NEWLINE objects[name] = valueNEWLINE return objectsNEWLINENEWLINENEWLINE_CALLBACK_MODULES = [NEWLINE tf.keras.callbacksNEWLINE]NEWLINENEWLINE_LAYER_MODULES = [NEWLINE tf.keras.layersNEWLINE]NEWLINENEWLINE_LOSS_MODULES = [NEWLINE tf.keras.losses,NEWLINE]NEWLINENEWLINE_METRIC_MODULES = [NEWLINE tf.keras.metrics,NEWLINE]NEWLINENEWLINE_OPTIMIZER_MODULES = [NEWLINE tf.keras.optimizersNEWLINE]NEWLINENEWLINE_PREDICATE_MODULES = [NEWLINE predicatesNEWLINE]NEWLINENEWLINE_STRATEGY_MODULES = [NEWLINE tf.distributeNEWLINE]NEWLINENEWLINENEWLINE# Try to discover objects from TensorFlow MRI, if it is installed.NEWLINEtry:NEWLINE import tensorflow_mri as tfmriNEWLINE _CALLBACK_MODULES.append(tfmri.callbacks)NEWLINE _LAYER_MODULES.extend([tfmri.layers])NEWLINE _LOSS_MODULES.append(tfmri.losses)NEWLINE _METRIC_MODULES.append(tfmri.metrics)NEWLINEexcept ImportError:NEWLINE passNEWLINENEWLINENEWLINE# Try to discover objects from TF Playground, if it is installed.NEWLINEtry:NEWLINE import tf_playground as tfpgNEWLINE _CALLBACK_MODULES.append(tfpg.callbacks)NEWLINE _LAYER_MODULES.append(tfpg.layers)NEWLINE _LOSS_MODULES.append(tfpg.losses)NEWLINE _METRIC_MODULES.append(tfpg.metrics)NEWLINEexcept ImportError:NEWLINE passNEWLINENEWLINENEWLINE_CALLBACK_OBJECTS = NoneNEWLINE_LAYER_OBJECTS = NoneNEWLINE_LOSS_OBJECTS = NoneNEWLINE_METRIC_OBJECTS = NoneNEWLINE_OPTIMIZER_OBJECTS = NoneNEWLINE_PREDICATE_OBJECTS = NoneNEWLINE_STRATEGY_OBJECTS = NoneNEWLINENEWLINENEWLINEdef discover_objects(custom_modules=None):NEWLINE """Discover Keras objects.NEWLINENEWLINE By default, this function searches for Keras objects in core TensorFlow andNEWLINE TensorFlow MRI (if installed).NEWLINENEWLINE Args:NEWLINE custom_modules: A list of custom modules to be searched for Keras objects.NEWLINE """NEWLINE global _CALLBACK_OBJECTSNEWLINE global _LAYER_OBJECTSNEWLINE global _LOSS_OBJECTSNEWLINE global _METRIC_OBJECTSNEWLINE global _OPTIMIZER_OBJECTSNEWLINE global _PREDICATE_OBJECTSNEWLINE global _STRATEGY_OBJECTSNEWLINENEWLINE custom_modules = custom_modules or []NEWLINENEWLINE _CALLBACK_OBJECTS = _find_objects(_CALLBACK_MODULES + custom_modules,NEWLINE tf.keras.callbacks.Callback)NEWLINENEWLINE _LAYER_OBJECTS = _find_objects(_LAYER_MODULES + custom_modules,NEWLINE tf.keras.layers.Layer)NEWLINENEWLINE _LOSS_OBJECTS = _find_objects(_LOSS_MODULES + custom_modules,NEWLINE tf.keras.losses.Loss)NEWLINENEWLINE _METRIC_OBJECTS = _find_objects(_METRIC_MODULES + custom_modules,NEWLINE tf.keras.metrics.Metric)NEWLINENEWLINE _OPTIMIZER_OBJECTS = _find_objects(_OPTIMIZER_MODULES + custom_modules,NEWLINE tf.keras.optimizers.Optimizer)NEWLINENEWLINE _PREDICATE_OBJECTS = _find_objects(_PREDICATE_MODULES, predicates.Predicate)NEWLINENEWLINE _STRATEGY_OBJECTS = _find_objects(_STRATEGY_MODULES, tf.distribute.Strategy)NEWLINENEWLINEdiscover_objects()NEWLINE
#!/usr/bin/env python3NEWLINE# Copyright 2020-present NAVER Corp. Under BSD 3-clause licenseNEWLINENEWLINEimport argparseNEWLINEimport osNEWLINEimport loggingNEWLINEimport pathlibNEWLINENEWLINEimport path_to_kapture_localization # noqa: F401NEWLINEimport kapture_localization.utils.loggingNEWLINEfrom kapture_localization.utils.pairsfile import get_ordered_pairs_from_fileNEWLINENEWLINEimport kapture_localization.utils.path_to_kapture # noqa: F401NEWLINEimport kaptureNEWLINEimport kapture.utils.loggingNEWLINEfrom kapture.io.csv import table_to_fileNEWLINENEWLINElogger = kapture_localization.utils.logging.getLogger()NEWLINENEWLINENEWLINEdef slice_pairsfile(pairsfile_path: str,NEWLINE output_path: str,NEWLINE topk: int,NEWLINE threshold: float,NEWLINE startk: int,NEWLINE skip_if_na: bool):NEWLINE logger.info('slice_pairsfile...')NEWLINE similarity_dict = get_ordered_pairs_from_file(pairsfile_path)NEWLINENEWLINE # apply topk override + skip_if_naNEWLINE image_pairs = []NEWLINE for name_query, paired_images in sorted(similarity_dict.items()):NEWLINE paired_images_threshold = [x for x in paired_images if x[1] >= threshold]NEWLINE if startk + topk > len(paired_images_threshold):NEWLINE logger.debug(NEWLINE f'image {name_query} has {len(paired_images_threshold)} pairs, 'NEWLINE f'less than topk={topk} (with startk={startk})')NEWLINE if skip_if_na:NEWLINE logger.debug(f'skipping {name_query}')NEWLINE continueNEWLINE paired_images_threshold = paired_images_threshold[startk:startk+topk]NEWLINE for name_map, score in paired_images_threshold:NEWLINE image_pairs.append((name_query, name_map, score))NEWLINENEWLINE if len(image_pairs) > 0:NEWLINE os.umask(0o002)NEWLINE p = pathlib.Path(output_path)NEWLINE os.makedirs(str(p.parent.resolve()), exist_ok=True)NEWLINE with open(output_path, 'w') as fid:NEWLINE table_to_file(fid, image_pairs, header='# query_image, map_image, score')NEWLINE else:NEWLINE logger.info('no pairs written')NEWLINE logger.info('all done')NEWLINENEWLINENEWLINEdef slice_pairsfile_command_line():NEWLINE parser = argparse.ArgumentParser(description='Apply topk override / threshold on a pairsfile',NEWLINE formatter_class=argparse.ArgumentDefaultsHelpFormatter)NEWLINE parser_verbosity = parser.add_mutually_exclusive_group()NEWLINE parser_verbosity.add_argument('-v', '--verbose', nargs='?', default=logging.WARNING, const=logging.INFO,NEWLINE action=kapture.utils.logging.VerbosityParser,NEWLINE help='verbosity level (debug, info, warning, critical, ... or int value) [warning]')NEWLINE parser_verbosity.add_argument('-q', '--silent', '--quiet',NEWLINE action='store_const', dest='verbose', const=logging.CRITICAL)NEWLINE parser.add_argument('-i', '--input', required=True, help='path to input pairsfile')NEWLINE parser.add_argument('-o', '--output', required=True, help='path to output pairsfile')NEWLINE parser.add_argument('--topk',NEWLINE default=float('inf'),NEWLINE type=int,NEWLINE help='override pairfile topk with this one (must be inferior or equal)')NEWLINE parser.add_argument('--threshold', type=float, default=0,NEWLINE help='the minimum score threshold for pairs to be used')NEWLINE parser.add_argument('--startk',NEWLINE default=0,NEWLINE type=int,NEWLINE help='start position of topk')NEWLINE parser.add_argument('--skip-if-na', action='store_true', default=False,NEWLINE help='Skip query image if startk + topk greater than available pairs (i.e. na, not available)')NEWLINE args = parser.parse_args()NEWLINE logger.setLevel(args.verbose)NEWLINE if args.verbose <= logging.DEBUG:NEWLINE # also let kapture express its logsNEWLINE kapture.utils.logging.getLogger().setLevel(args.verbose)NEWLINENEWLINE logger.debug('kapture_slice_pairsfile.py \\\n' + ''.join(['\n\t{:13} = {}'.format(k, v)NEWLINE for k, v in vars(args).items()]))NEWLINE slice_pairsfile(args.input, args.output, args.topk, args.threshold, args.startk, args.skip_if_na)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE slice_pairsfile_command_line()NEWLINE
# Generated by Django 3.0 on 2020-04-21 20:13NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINEimport django.db.models.deletionNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE initial = TrueNEWLINENEWLINE dependencies = [NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.CreateModel(NEWLINE name='Hospital',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('name', models.CharField(max_length=254, verbose_name='Nome')),NEWLINE ('city', models.CharField(max_length=254, verbose_name='Cidade')),NEWLINE ('phonenumber', models.CharField(max_length=16, verbose_name='Telefone')),NEWLINE ('email', models.EmailField(max_length=254, verbose_name='E-mail')),NEWLINE ],NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name='Patient',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('name', models.CharField(max_length=254, verbose_name='Nome')),NEWLINE ('birthday', models.DateField(verbose_name='Data de Nascimento')),NEWLINE ('airways', models.CharField(choices=[('VM', 'Ventilação Mecânica'), ('AA', 'Ar Ambiente'), ('VNI', 'Ventilação não Invasiva')], default='AA', max_length=24, verbose_name='Vias Aéreas')),NEWLINE ('status', models.CharField(choices=[('S', 'Suspeito'), ('C', 'Confirmado'), ('D', 'Descartado')], default='S', max_length=10, verbose_name='Status COVID')),NEWLINE ('hospitalization_date', models.DateField(verbose_name='Data de Internação')),NEWLINE ('departure_date', models.DateField(verbose_name='Data de Saída')),NEWLINE ('cns', models.CharField(blank=True, default='', max_length=30, verbose_name='Carteira Nacional do SUS')),NEWLINE ('sisreg', models.CharField(blank=True, default='', max_length=30, verbose_name='Número no sistema Sisreg')),NEWLINE ('departure_reason', models.CharField(choices=[('A', 'Alta'), ('O', 'Óbito')], default='A', max_length=5, verbose_name='Motivo da Saída')),NEWLINE ('hospital', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hospitals.Hospital', verbose_name='Hospital')),NEWLINE ],NEWLINE ),NEWLINE ]NEWLINE
# -*- coding: utf-8 -*-NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINEimport wagtail.core.fieldsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [("puput", "0001_initial")]NEWLINENEWLINE operations = [NEWLINE migrations.AlterField(NEWLINE model_name="blogpage",NEWLINE name="description",NEWLINE field=models.CharField(NEWLINE max_length=255,NEWLINE help_text="The blog description that will appear under the title.",NEWLINE verbose_name="Description",NEWLINE blank=True,NEWLINE ),NEWLINE ),NEWLINE migrations.AlterField(NEWLINE model_name="category",NEWLINE name="description",NEWLINE field=models.CharField(max_length=500, verbose_name="Description", blank=True),NEWLINE ),NEWLINE migrations.AlterField(NEWLINE model_name="category",NEWLINE name="name",NEWLINE field=models.CharField(max_length=80, unique=True, verbose_name="Category name"),NEWLINE ),NEWLINE migrations.AlterField(NEWLINE model_name="category",NEWLINE name="parent",NEWLINE field=models.ForeignKey(NEWLINE to="puput.Category",NEWLINE related_name="children",NEWLINE null=True,NEWLINE verbose_name="Parent category",NEWLINE blank=True,NEWLINE on_delete=models.SET_NULL,NEWLINE ),NEWLINE ),NEWLINE migrations.AlterField(NEWLINE model_name="entrypage",NEWLINE name="excerpt",NEWLINE field=wagtail.core.fields.RichTextField(NEWLINE help_text="Entry excerpt to be displayed on entries list. If this field is not filled, a truncate version of body text will be used.",NEWLINE verbose_name="excerpt",NEWLINE blank=True,NEWLINE ),NEWLINE ),NEWLINE ]NEWLINE
import numpy as np # linear algebraNEWLINEimport pandas as pd # data processingNEWLINEimport datetime as dt # date and time processing functionsNEWLINEimport matplotlib.pyplot as plt # basic plotting NEWLINEimport matplotlib.dates as mdates # date processing in matplotlibNEWLINEfrom matplotlib.offsetbox import AnchoredTextNEWLINEimport mpld3NEWLINEplt.style.use('ggplot') # use ggplot styleNEWLINENEWLINE# read in the data from the provided csv fileNEWLINEdf = pd.read_csv('./static/seaice.csv')NEWLINENEWLINE# drop the 'Source Data' column as it obscures more useful columns and doesn't tell us muchNEWLINEdf.drop('Source Data', axis = 1, inplace=True)NEWLINENEWLINE# convert the provided 3 column date format to datetime format and set it as the indexNEWLINEdf['Date'] = pd.to_datetime(df[['Year','Month','Day']])NEWLINEdf.index = df['Date'].valuesNEWLINENEWLINE# split according to hemisphere, as we are expecting different trends for eachNEWLINEnorth = df[df['hemisphere'] == 'north']NEWLINEsouth = df[df['hemisphere'] == 'south']NEWLINENEWLINEdef dailyExtent():NEWLINE fig = plt.figure(figsize=(9,6))NEWLINE plt.subplot(2, 1, 1)NEWLINE plt.plot(north.index,north['Extent'], label='Northern Hemisphere')NEWLINE plt.plot(south.index,south['Extent'], label='Southern Hemisphere')NEWLINENEWLINE # add plot legend and titlesNEWLINE plt.legend(bbox_to_anchor=(0., -.362, 1., .102), loc=3, ncol=2, NEWLINE mode="expand", borderaxespad=0.)NEWLINE plt.ylabel('Sea ice extent (10^6 sq km)')NEWLINE plt.xlabel('Date')NEWLINE plt.title('Daily sea-ice extent');NEWLINE # saving to htmlNEWLINE save_html("dailyextent", fig)NEWLINENEWLINEdef annualAverage():NEWLINE # resample raw data into annual averagesNEWLINE northyear = north.resample('12M').mean()NEWLINE southyear = south.resample('12M').mean()NEWLINENEWLINE # remove the initial and final item as they aer averaged incorrectly (also indexes seem bad)NEWLINE northyear = northyear[1:-1]NEWLINE southyear = southyear[1:-1]NEWLINENEWLINE fig = plt.figure(figsize=(9,6))NEWLINE plt.subplot(2, 1, 1)NEWLINE plt.plot(northyear.index,northyear['Extent'], marker = '.', label='Northern Hemisphere')NEWLINE plt.plot(southyear.index,southyear['Extent'], marker = '.', label='Southern Hemisphere')NEWLINENEWLINE # add plot legend and titlesNEWLINE plt.legend(bbox_to_anchor=(0., -.362, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.)NEWLINE plt.ylabel('Sea ice extent (10^6 sq km)')NEWLINE plt.xlabel('Date')NEWLINE plt.title('Annual average sea-ice extent')NEWLINE # saving to htmlNEWLINE save_html("annualaverage", fig)NEWLINENEWLINEdef annualChange():NEWLINE # define date range to plot betweenNEWLINE start = 1978NEWLINE end = dt.datetime.now().year + 1NEWLINENEWLINE # define plotNEWLINE f, axarr = plt.subplots(2, sharex=True, figsize=(9,5))NEWLINENEWLINENEWLINE # organise plot axes (set x axis to months only and cycle colours according to gradient)NEWLINE month_fmt = mdates.DateFormatter('%b')NEWLINE axarr[0].xaxis.set_major_formatter(month_fmt)NEWLINE axarr[0].set_prop_cycle(plt.cycler('color', NEWLINE plt.cm.winter(np.linspace(0, 1, len(range(start, end))))))NEWLINE axarr[1].set_prop_cycle(plt.cycler('color', NEWLINE plt.cm.winter(np.linspace(0, 1, len(range(start, end))))))NEWLINENEWLINE # add plot legend and titlesNEWLINE axarr[0].set_ylabel('Sea ice extent (10^6 sq km)')NEWLINE axarr[1].set_ylabel('Sea ice extent (10^6 sq km)')NEWLINE axarr[0].set_xlabel('Month (NORTHERN HERMISPHERE)')NEWLINE axarr[1].set_xlabel('Month (SOUTHERN HERMISPHERE)')NEWLINE axarr[0].set_title('Annual change in sea-ice extent');NEWLINE axarr[0].add_artist(AnchoredText('Northern Hemisphere', loc=3))NEWLINE axarr[1].add_artist(AnchoredText('Southern Hemisphere', loc=2))NEWLINENEWLINE # loop for every year between the start year and currentNEWLINE for year in range(start, end):NEWLINE # create new dataframe for each year, NEWLINE # and set the year to 1972 so all are plotted on the same axisNEWLINE nyeardf = north[['Extent', 'Day', 'Month']][north['Year'] == year]NEWLINE nyeardf['Year'] = 1972NEWLINE nyeardf['Date'] = pd.to_datetime(nyeardf[['Year','Month','Day']])NEWLINE nyeardf.index = nyeardf['Date'].valuesNEWLINE NEWLINE syeardf = south[['Extent', 'Day', 'Month']][south['Year'] == year]NEWLINE syeardf['Year'] = 1972NEWLINE syeardf['Date'] = pd.to_datetime(syeardf[['Year','Month','Day']])NEWLINE syeardf.index = syeardf['Date'].valuesNEWLINE NEWLINE # plot each year individuallyNEWLINE axarr[0].plot(nyeardf.index,nyeardf['Extent'], label = year)NEWLINE axarr[1].plot(syeardf.index,syeardf['Extent'])NEWLINE save_html("annualchange", f)NEWLINENEWLINEdef save_html(filename, fig):NEWLINE # saving to htmlNEWLINE html_str = mpld3.fig_to_html(fig)NEWLINE Html_file= open("./templates/{}.html".format(filename),"w")NEWLINE Html_file.write(html_str)NEWLINE Html_file.close()
import asyncioNEWLINEimport loggingNEWLINEimport pickleNEWLINENEWLINEimport timeNEWLINEfrom functools import partialNEWLINEfrom typing import Callable, Any, TypeVarNEWLINENEWLINEfrom aio_pika.exchange import ExchangeTypeNEWLINEfrom aio_pika.channel import ChannelNEWLINEfrom aio_pika.exceptions import UnroutableErrorNEWLINEfrom aio_pika.message import (NEWLINE Message, IncomingMessage, DeliveryMode, ReturnedMessageNEWLINE)NEWLINEfrom .base import Proxy, BaseNEWLINENEWLINElog = logging.getLogger(__name__)NEWLINENEWLINER = TypeVar('R')NEWLINEP = TypeVar('P')NEWLINECallbackType = Callable[[P], R]NEWLINENEWLINENEWLINEclass RPC(Base):NEWLINE __slots__ = ("channel", "loop", "proxy", "result_queue",NEWLINE "result_consumer_tag", "routes", "consumer_tags",NEWLINE "dlx_exchange",)NEWLINENEWLINE DLX_NAME = 'rpc.dlx'NEWLINE DELIVERY_MODE = DeliveryMode.NOT_PERSISTENTNEWLINENEWLINE __doc__ = """NEWLINE Remote Procedure Call helper.NEWLINENEWLINE Create an instance ::NEWLINENEWLINE rpc = await RPC.create(channel)NEWLINENEWLINE Registering python function ::NEWLINENEWLINE # RPC instance passes only keyword argumentsNEWLINE def multiply(*, x, y):NEWLINE return x * yNEWLINENEWLINE await rpc.register("multiply", multiply)NEWLINENEWLINE Call function through proxy ::NEWLINENEWLINE assert await rpc.proxy.multiply(x=2, y=3) == 6NEWLINENEWLINE Call function explicit ::NEWLINENEWLINE assert await rpc.call('multiply', dict(x=2, y=3)) == 6NEWLINENEWLINE """NEWLINENEWLINE def __init__(self, channel: Channel):NEWLINE self.channel = channelNEWLINE self.loop = self.channel.loopNEWLINE self.proxy = Proxy(self.call)NEWLINE self.result_queue = NoneNEWLINE self.futures = dict()NEWLINE self.result_consumer_tag = NoneNEWLINE self.routes = {}NEWLINE self.queues = {}NEWLINE self.consumer_tags = {}NEWLINE self.dlx_exchange = NoneNEWLINENEWLINE def create_future(self) -> asyncio.Future:NEWLINE future = self.loop.create_future()NEWLINE future_id = id(future)NEWLINE self.futures[future_id] = futureNEWLINE future.add_done_callback(lambda f: self.futures.pop(future_id, None))NEWLINE return futureNEWLINENEWLINE def close(self) -> asyncio.Task:NEWLINE async def closer():NEWLINE nonlocal selfNEWLINENEWLINE if self.result_queue is None:NEWLINE returnNEWLINENEWLINE for future in self.futures.values():NEWLINE future.set_exception(asyncio.CancelledError)NEWLINENEWLINE await self.result_queue.unbind(NEWLINE self.dlx_exchange, "",NEWLINE arguments={NEWLINE "From": self.result_queue.name,NEWLINE 'x-match': 'any',NEWLINE }NEWLINE )NEWLINENEWLINE await self.result_queue.cancel(self.result_consumer_tag)NEWLINE self.result_consumer_tag = NoneNEWLINENEWLINE await self.result_queue.delete()NEWLINE self.result_queue = NoneNEWLINENEWLINE return self.loop.create_task(closer())NEWLINENEWLINE async def initialize(self, **kwargs):NEWLINE if self.result_queue is not None:NEWLINE returnNEWLINENEWLINE self.result_queue = await self.channel.declare_queue(None, **kwargs)NEWLINENEWLINE self.dlx_exchange = await self.channel.declare_exchange(NEWLINE self.DLX_NAME,NEWLINE type=ExchangeType.HEADERS,NEWLINE auto_delete=True,NEWLINE )NEWLINENEWLINE await self.result_queue.bind(NEWLINE self.dlx_exchange, "",NEWLINE arguments={NEWLINE "From": self.result_queue.name,NEWLINE 'x-match': 'any',NEWLINE }NEWLINE )NEWLINENEWLINE self.result_consumer_tag = await self.result_queue.consume(NEWLINE self.on_result_message, exclusive=True, no_ack=TrueNEWLINE )NEWLINENEWLINE self.channel.add_on_return_callback(self.on_message_returned)NEWLINENEWLINE @classmethodNEWLINE async def create(cls, channel: Channel, **kwargs) -> "RPC":NEWLINE """ Creates a new instance of :class:`aio_pika.patterns.RPC`.NEWLINE You should use this method instead of :func:`__init__`,NEWLINE because :func:`create` returns coroutine and makes async initializeNEWLINENEWLINE :param channel: initialized instance of :class:`aio_pika.Channel`NEWLINE :returns: :class:`RPC`NEWLINENEWLINE """NEWLINE rpc = cls(channel)NEWLINE await rpc.initialize(**kwargs)NEWLINE return rpcNEWLINENEWLINE def on_message_returned(self, message: ReturnedMessage):NEWLINE correlation_id = int(NEWLINE message.correlation_idNEWLINE ) if message.correlation_id else NoneNEWLINENEWLINE future = self.futures.pop(correlation_id, None) # type: asyncio.FutureNEWLINENEWLINE if not future or future.done():NEWLINE log.warning("Unknown message was returned: %r", message)NEWLINE returnNEWLINENEWLINE future.set_exception(UnroutableError([message]))NEWLINENEWLINE async def on_result_message(self, message: IncomingMessage):NEWLINE correlation_id = int(NEWLINE message.correlation_idNEWLINE ) if message.correlation_id else NoneNEWLINENEWLINE future = self.futures.pop(correlation_id, None) # type: asyncio.FutureNEWLINENEWLINE if future is None:NEWLINE log.warning("Unknown message: %r", message)NEWLINE returnNEWLINENEWLINE try:NEWLINE payload = self.deserialize(message.body)NEWLINE except Exception as e:NEWLINE log.error("Failed to deserialize response on message: %r", message)NEWLINE future.set_exception(e)NEWLINE returnNEWLINENEWLINE if message.type == 'result':NEWLINE future.set_result(payload)NEWLINE elif message.type == 'error':NEWLINE future.set_exception(payload)NEWLINE elif message.type == 'call':NEWLINE future.set_exception(NEWLINE asyncio.TimeoutError("Message timed-out", message)NEWLINE )NEWLINE else:NEWLINE future.set_exception(NEWLINE RuntimeError("Unknown message type %r" % message.type)NEWLINE )NEWLINENEWLINE async def on_call_message(self, method_name: str, message: IncomingMessage):NEWLINE if method_name not in self.routes:NEWLINE log.warning("Method %r not registered in %r", method_name, self)NEWLINE returnNEWLINENEWLINE try:NEWLINE payload = self.deserialize(message.body)NEWLINE func = self.routes[method_name]NEWLINENEWLINE result = await self.execute(func, payload)NEWLINE result = self.serialize(result)NEWLINE message_type = 'result'NEWLINE except Exception as e:NEWLINE result = self.serialize_exception(e)NEWLINE message_type = 'error'NEWLINENEWLINE result_message = Message(NEWLINE result,NEWLINE delivery_mode=message.delivery_mode,NEWLINE correlation_id=message.correlation_id,NEWLINE timestamp=time.time(),NEWLINE type=message_type,NEWLINE )NEWLINENEWLINE await self.channel.default_exchange.publish(NEWLINE result_message,NEWLINE message.reply_to,NEWLINE mandatory=FalseNEWLINE )NEWLINENEWLINE message.ack()NEWLINENEWLINE def serialize(self, data: Any) -> bytes:NEWLINE """ Serialize data to the bytes.NEWLINE Uses `pickle` by default.NEWLINE You should overlap this method when you want to change serializerNEWLINENEWLINE :param data: Data which will be serializedNEWLINE :returns: bytesNEWLINE """NEWLINE return super().serialize(data)NEWLINENEWLINE def deserialize(self, data: Any) -> bytes:NEWLINE """ Deserialize data from bytes.NEWLINE Uses `pickle` by default.NEWLINE You should overlap this method when you want to change serializerNEWLINENEWLINE :param data: Data which will be deserializedNEWLINE :returns: :class:`Any`NEWLINE """NEWLINE return super().deserialize(data)NEWLINENEWLINE def serialize_exception(self, exception: Exception) -> bytes:NEWLINE """ Serialize python exception to bytesNEWLINENEWLINE :param exception: :class:`Exception`NEWLINE :return: bytesNEWLINE """NEWLINE return pickle.dumps(exception)NEWLINENEWLINE async def execute(self, func: CallbackType, payload: P) -> R:NEWLINE """ Executes rpc call. Might be overlapped. """NEWLINE return await func(**payload)NEWLINENEWLINE async def call(self, method_name, kwargs: dict=None, *,NEWLINE expiration: int=None, priority: int=128,NEWLINE delivery_mode: DeliveryMode=DELIVERY_MODE):NEWLINENEWLINE """ Call remote method and awaiting result.NEWLINENEWLINE :param method_name: Name of methodNEWLINE :param kwargs: Methos kwargsNEWLINE :param expiration:NEWLINE If not `None` messages which staying in queue longerNEWLINE will be returned and :class:`asyncio.TimeoutError` will be raised.NEWLINE :param priority: Message priorityNEWLINE :param delivery_mode: Call message delivery modeNEWLINE :raises asyncio.TimeoutError: when message expiredNEWLINE :raises CancelledError: when called :func:`RPC.cancel`NEWLINE :raises RuntimeError: internal errorNEWLINE """NEWLINENEWLINE future = self.create_future()NEWLINENEWLINE message = Message(NEWLINE body=self.serialize(kwargs or {}),NEWLINE type='call',NEWLINE timestamp=time.time(),NEWLINE priority=priority,NEWLINE correlation_id=id(future),NEWLINE delivery_mode=delivery_mode,NEWLINE reply_to=self.result_queue.name,NEWLINE headers={NEWLINE 'From': self.result_queue.nameNEWLINE }NEWLINE )NEWLINENEWLINE if expiration is not None:NEWLINE message.expiration = expirationNEWLINENEWLINE await self.channel.default_exchange.publish(NEWLINE message, routing_key=method_name, mandatory=TrueNEWLINE )NEWLINENEWLINE return await futureNEWLINENEWLINE async def register(self, method_name, func: CallbackType, **kwargs):NEWLINE """ Method creates a queue with name which equal ofNEWLINE `method_name` argument. Then subscribes this queue.NEWLINENEWLINE :param method_name: Method nameNEWLINE :param func:NEWLINE target function. Function **MUST** accept only keyword arguments.NEWLINE :param kwargs: arguments which will be passed to `queue_declare`NEWLINE :raises RuntimeError:NEWLINE Function already registered in this :class:`RPC` instanceNEWLINE or method_name already used.NEWLINE """NEWLINE arguments = kwargs.pop('arguments', {})NEWLINE arguments.update({NEWLINE 'x-dead-letter-exchange': self.DLX_NAME,NEWLINE })NEWLINENEWLINE kwargs['arguments'] = argumentsNEWLINENEWLINE queue = await self.channel.declare_queue(method_name, **kwargs)NEWLINENEWLINE if func in self.consumer_tags:NEWLINE raise RuntimeError('Function already registered')NEWLINENEWLINE if method_name in self.routes:NEWLINE raise RuntimeError(NEWLINE 'Method name already used for %r' % self.routes[method_name]NEWLINE )NEWLINENEWLINE self.consumer_tags[func] = await queue.consume(NEWLINE partial(self.on_call_message, method_name)NEWLINE )NEWLINENEWLINE self.routes[method_name] = asyncio.coroutine(func)NEWLINE self.queues[func] = queueNEWLINENEWLINE async def unregister(self, func):NEWLINE """ Cancels subscription to the method-queue.NEWLINENEWLINE :param func: FunctionNEWLINE """NEWLINE if func not in self.consumer_tags:NEWLINE returnNEWLINENEWLINE consumer_tag = self.consumer_tags.pop(func)NEWLINE queue = self.queues.pop(func)NEWLINENEWLINE await queue.cancel(consumer_tag)NEWLINENEWLINE self.routes.pop(queue.name)NEWLINE
# Copyright 2020 Huawei Technologies Co., LtdNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ============================================================================NEWLINE"""FTRL"""NEWLINEfrom mindspore.ops import functional as F, composite as C, operations as PNEWLINEfrom mindspore.common import TensorNEWLINEimport mindspore.common.dtype as mstypeNEWLINEfrom mindspore._checkparam import Validator as validatorNEWLINEfrom mindspore._checkparam import RelNEWLINEfrom .optimizer import Optimizer, _apply_decay, _grad_scaleNEWLINENEWLINE_ftrl_opt = C.MultitypeFuncGraph("ftrl_opt")NEWLINENEWLINENEWLINE@_ftrl_opt.register("Function", "Function", "Function", "Function", "Number", "Number", "Number", "Tensor", "Tensor",NEWLINE "RowTensor", "Tensor", "Tensor", "Bool")NEWLINEdef _tensor_run_opt_with_sparse(opt, spars_opt, push, pull, l1, l2, lr_power, learning_rate, linear,NEWLINE gradient, weight, moment, ps_parameter):NEWLINE """Apply sparse ftrl optimizer to the weight parameter when the gradient is sparse."""NEWLINE success = TrueNEWLINE indices = gradient.indicesNEWLINE values = gradient.valuesNEWLINE if ps_parameter:NEWLINE op_shape = P.Shape()NEWLINE shapes = (op_shape(weight), op_shape(moment), op_shape(linear), op_shape(values), op_shape(indices))NEWLINE success = F.depend(success, pull(push((values, indices), shapes), weight))NEWLINE else:NEWLINE success = F.depend(success, spars_opt(weight, moment, linear, values, indices))NEWLINE return successNEWLINENEWLINENEWLINE@_ftrl_opt.register("Function", "Function", "Function", "Function", "Number", "Number", "Number", "Tensor", "Tensor",NEWLINE "Tensor", "Tensor", "Tensor", "Bool")NEWLINEdef _tensor_run_opt(opt, spars_opt, push, pull, l1, l2, lr_power, learning_rate, linear,NEWLINE gradient, weight, moment, ps_parameter):NEWLINE """Apply ftrl optimizer to the weight parameter."""NEWLINE success = TrueNEWLINE if ps_parameter:NEWLINE op_shape = P.Shape()NEWLINE success = F.depend(success, pull(push((gradient, learning_rate, l1, l2, lr_power),NEWLINE (op_shape(weight), op_shape(moment), op_shape(linear))), weight))NEWLINE else:NEWLINE success = F.depend(success, opt(weight, moment, linear, gradient, learning_rate, l1, l2, lr_power))NEWLINE return successNEWLINENEWLINENEWLINEdef _check_param(initial_accum, lr_power, l1, l2, use_locking, prim_name=None):NEWLINE """Check param."""NEWLINE validator.check_value_type("initial_accum", initial_accum, [float], prim_name)NEWLINE validator.check_number("initial_accum", initial_accum, 0.0, Rel.GE, prim_name)NEWLINENEWLINE validator.check_value_type("lr_power", lr_power, [float], prim_name)NEWLINE validator.check_number("lr_power", lr_power, 0.0, Rel.LE, prim_name)NEWLINENEWLINE validator.check_value_type("l1", l1, [float], prim_name)NEWLINE validator.check_number("l1", l1, 0.0, Rel.GE, prim_name)NEWLINENEWLINE validator.check_value_type("l2", l2, [float], prim_name)NEWLINE validator.check_number("l2", l2, 0.0, Rel.GE, prim_name)NEWLINENEWLINE validator.check_value_type("use_locking", use_locking, [bool], prim_name)NEWLINENEWLINENEWLINEclass FTRL(Optimizer):NEWLINE """NEWLINE Implement the FTRL algorithm with ApplyFtrl Operator.NEWLINENEWLINE FTRL is an online convex optimization algorithm that adaptively chooses its regularization functionNEWLINE based on the loss functions. Refer to paper `Adaptive Bound Optimization for Online Convex OptimizationNEWLINE <https://arxiv.org/abs/1002.4908>`_. Refer to paper `Ad Click Prediction: a View from the TrenchesNEWLINE <https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf>`_ for engineering document.NEWLINENEWLINE Note:NEWLINE When separating parameter groups, the weight decay in each group will be applied on the parameters if theNEWLINE weight decay is positive. When not separating parameter groups, the `weight_decay` in the API will be appliedNEWLINE on all of the parameters.NEWLINENEWLINE To improve parameter groups performance, the customized order of parameters can be supported.NEWLINENEWLINE The sparse strategy is applied while the SparseGatherV2 operator being used for forward network.NEWLINE The sparse feature is under continuous development. The sparse behavior is currently performed on the CPU.NEWLINENEWLINE Args:NEWLINE params (Union[list[Parameter], list[dict]]): When the `params` is a list of `Parameter` which will be updated,NEWLINE the element in `params` should be class `Parameter`. When the `params` is a list of `dict`, the "params",NEWLINE "lr", "weight_decay" and "order_params" are the keys can be parsed.NEWLINENEWLINE - params: Required. The value should be a list of `Parameter`.NEWLINENEWLINE - lr: Using different learning rate by separating parameters is currently not supported.NEWLINENEWLINE - weight_decay: Optional. If "weight_decay" in the keys, the value of corresponding weight decayNEWLINE will be used. If not, the `weight_decay` in the API will be used.NEWLINENEWLINE - order_params: Optional. If "order_params" in the keys, the value should be the order of parameters andNEWLINE the order will be followed in optimizer. There are no other keys in the `dict` and the parameters whichNEWLINE in the value of 'order_params' should be in one of group parameters.NEWLINENEWLINE initial_accum (float): The starting value for accumulators, must be zero or positive values. Default: 0.1.NEWLINE learning_rate (float): The learning rate value, should be zero or positive, dynamic learning rate is currentlyNEWLINE not supported. Default: 0.001.NEWLINE lr_power (float): Learning rate power controls how the learning rate decreases during training, must be lessNEWLINE than or equal to zero. Use fixed learning rate if lr_power is zero. Default: -0.5.NEWLINE l1 (float): l1 regularization strength, must be greater than or equal to zero. Default: 0.0.NEWLINE l2 (float): l2 regularization strength, must be greater than or equal to zero. Default: 0.0.NEWLINE use_locking (bool): If True use locks for update operation. Default: False.NEWLINE loss_scale (float): Value for the loss scale. It should be equal to or greater than 1.0. Default: 1.0.NEWLINE weight_decay (float): Weight decay value to multiply weight, must be zero or positive value. Default: 0.0.NEWLINENEWLINE Inputs:NEWLINE - **grads** (tuple[Tensor]) - The gradients of `params` in optimizer, the shape is as same as the `params`NEWLINE in optimizer.NEWLINENEWLINE Outputs:NEWLINE tuple[Parameter], the updated parameters, the shape is the same as `params`.NEWLINENEWLINE Examples:NEWLINE >>> net = Net()NEWLINE >>> #1) All parameters use the same learning rate and weight decayNEWLINE >>> optim = nn.FTRL(params=net.trainable_params())NEWLINE >>>NEWLINE >>> #2) Use parameter groups and set different valuesNEWLINE >>> conv_params = list(filter(lambda x: 'conv' in x.name, net.trainable_params()))NEWLINE >>> no_conv_params = list(filter(lambda x: 'conv' not in x.name, net.trainable_params()))NEWLINE >>> group_params = [{'params': conv_params, 'weight_decay': 0.01},NEWLINE >>> {'params': no_conv_params},NEWLINE >>> {'order_params': net.trainable_params()}]NEWLINE >>> optim = nn.FTRL(group_params, learning_rate=0.1, weight_decay=0.0)NEWLINE >>> # The conv_params's parameters will use weight decay of 0.01.NEWLINE >>> # The no_conv_params's parameters will use default weight decay of 0.0.NEWLINE >>> # The final parameters order in which the optimizer will be followed is the value of 'order_params'.NEWLINE >>>NEWLINE >>> loss = nn.SoftmaxCrossEntropyWithLogits()NEWLINE >>> model = Model(net, loss_fn=loss, optimizer=optim)NEWLINE """NEWLINE def __init__(self, params, initial_accum=0.1, learning_rate=0.001, lr_power=-0.5, l1=0.0, l2=0.0,NEWLINE use_locking=False, loss_scale=1.0, weight_decay=0.0):NEWLINE super(FTRL, self).__init__(learning_rate, params, weight_decay, loss_scale=loss_scale)NEWLINE if self.dynamic_lr or self.is_group_lr:NEWLINE raise ValueError('Dynamic learning rate or group learning rate is currently not supported.')NEWLINE _check_param(initial_accum, lr_power, l1, l2, use_locking, self.cls_name)NEWLINE self.moments = self.parameters.clone(prefix="moments", init=initial_accum)NEWLINE self.linear = self.parameters.clone(prefix="linear", init='zeros')NEWLINE self.l1 = l1NEWLINE self.l2 = l2NEWLINE self.lr_power = lr_powerNEWLINE if not self.is_group:NEWLINE self.decay_flags = tuple((lambda: True)() for x in self.parameters)NEWLINE self.hyper_map = C.HyperMap()NEWLINE self.opt = P.ApplyFtrl(use_locking=use_locking)NEWLINE self.sparse_opt = P.FusedSparseFtrl(learning_rate, l1, l2, lr_power, use_locking=use_locking)NEWLINE self._ps_pull = P.Pull()NEWLINE self._ps_push = P.Push("Ftrl", [0, 1, 2])NEWLINE self._ps_push.add_prim_attr("lr", learning_rate)NEWLINE self._ps_push.add_prim_attr("l1", l1)NEWLINE self._ps_push.add_prim_attr("l2", l2)NEWLINE self._ps_push.add_prim_attr("lr_power", lr_power)NEWLINENEWLINE def construct(self, grads):NEWLINE params = self.parametersNEWLINE moments = self.momentsNEWLINE linear = self.linearNEWLINE grads = self.decay_weight(grads)NEWLINE grads = self.scale_grad(grads)NEWLINE lr = self.get_lr()NEWLINENEWLINE success = self.map_(F.partial(_ftrl_opt, self.opt, self.sparse_opt, self._ps_push, self._ps_pull,NEWLINE self.l1, self.l2, self.lr_power, lr),NEWLINE linear, grads, params, moments, self.ps_parameters)NEWLINE return successNEWLINE
# -*- coding: utf-8 -*-NEWLINE# Copyright 2020-2021 CERNNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINE# Authors:NEWLINE# - Cedric Serfon <[email protected]>, 2020NEWLINE# - Eli Chadwick <[email protected]>, 2020NEWLINE# - Martin Barisits <[email protected]>, 2020-2021NEWLINE# - Benedikt Ziemons <[email protected]>, 2020NEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEfrom rucio.api.permission import has_permissionNEWLINEfrom rucio.api.scope import list_scopesNEWLINENEWLINEfrom rucio.core.rse import get_rse_idNEWLINEfrom rucio.core import diracNEWLINEfrom rucio.common.exception import AccessDeniedNEWLINEfrom rucio.common.utils import extract_scopeNEWLINENEWLINENEWLINEdef add_files(lfns, issuer, ignore_availability):NEWLINE """NEWLINE Bulk add files :NEWLINE - Create the file and replica.NEWLINE - If doesn't exist create the dataset containing the file as well as a rule on the dataset on ANY sites.NEWLINE - Create all the ascendants of the dataset if they do not existNEWLINENEWLINE :param lfns: List of lfn (dictionary {'lfn': <lfn>, 'rse': <rse>, 'bytes': <bytes>, 'adler32': <adler32>, 'guid': <guid>, 'pfn': <pfn>}NEWLINE :param issuer: The issuer account.NEWLINE :param ignore_availability: A boolean to ignore blocked sites.NEWLINE """NEWLINE scopes = list_scopes()NEWLINE dids = []NEWLINE rses = {}NEWLINE for lfn in lfns:NEWLINE scope, name = extract_scope(lfn['lfn'], scopes)NEWLINE dids.append({'scope': scope, 'name': name})NEWLINE rse = lfn['rse']NEWLINE if rse not in rses:NEWLINE rse_id = get_rse_id(rse=rse)NEWLINE rses[rse] = rse_idNEWLINE lfn['rse_id'] = rses[rse]NEWLINENEWLINE # Check if the issuer can add dids and use skip_availabitlityNEWLINE for rse in rses:NEWLINE rse_id = rses[rse]NEWLINE kwargs = {'rse': rse, 'rse_id': rse_id}NEWLINE if not has_permission(issuer=issuer, action='add_replicas', kwargs=kwargs):NEWLINE raise AccessDenied('Account %s can not add file replicas on %s' % (issuer, rse))NEWLINE if not has_permission(issuer=issuer, action='skip_availability_check', kwargs=kwargs):NEWLINE ignore_availability = FalseNEWLINENEWLINE # Check if the issuer can add the filesNEWLINE kwargs = {'issuer': issuer, 'dids': dids}NEWLINE if not has_permission(issuer=issuer, action='add_dids', kwargs=kwargs):NEWLINE raise AccessDenied('Account %s can not bulk add data identifier' % (issuer))NEWLINENEWLINE dirac.add_files(lfns=lfns, account=issuer, ignore_availability=ignore_availability, session=None)NEWLINE
import sysNEWLINEfrom esky import bdist_eskyNEWLINEfrom distutils.core import setupNEWLINE NEWLINE# for freezing with eskyNEWLINE# uncomment the block you want to run depending on your freezerNEWLINE# > python setup.py bdist_eskyNEWLINENEWLINE# Using py2exeNEWLINE#setup(NEWLINE# name = "example-app",NEWLINE# version = "0.2",NEWLINE# scripts = ["example.py"],NEWLINE# options = {"bdist_esky": {NEWLINE# "freezer_module":"py2exe",NEWLINE# }}NEWLINE#)NEWLINENEWLINE# Using py2appNEWLINE#setup(NEWLINE# name = "example-app",NEWLINE# version = "0.2",NEWLINE# scripts = ["example.py"],NEWLINE# options = {"bdist_esky": {NEWLINE# "freezer_module":"py2app"NEWLINE# }}NEWLINE# )NEWLINENEWLINE# cx freezeNEWLINEfrom esky.bdist_esky import ExecutableNEWLINEsetup(NEWLINE name = 'example-app',NEWLINE version = '0.2',NEWLINE #executables=[Executable('example.py')],NEWLINE options = {"bdist_esky": {NEWLINE "freezer_module":"cxfreeze"NEWLINE }},NEWLINE scripts = [Executable('example.py')],NEWLINE)NEWLINENEWLINE
import cursesNEWLINEimport collectionsNEWLINEimport sysNEWLINEimport osNEWLINEfrom time import sleepNEWLINENEWLINE# File syntaxNEWLINE#NEWLINE# When there's not enough space for all elements UI will go into scroll modeNEWLINE#NEWLINE# Syntax:NEWLINE# script.py ui_example.txtNEWLINE#NEWLINE# an object is one line, split by ;NEWLINE# The first part is the Name the second part is the shell actionNEWLINE# Use the sample file to tweak colors.NEWLINE# Valid colors are: black, red, green, yellow, blue, magenta, cyan, whiteNEWLINE# Also valid colors are black2, red2, green2.. those are usually brighter versionsNEWLINE#NEWLINE# To run an inbuilt function just use an action as followed:NEWLINE# Show version;function:Show_versionNEWLINE#NEWLINE# To implement a quit button you can do so:NEWLINE# Quit menu;quitNEWLINE#NEWLINE# For more information check out the github readme: https://github.com/DiscordDigital/ui.py/NEWLINENEWLINEdef RunInbuiltFunction(function_name):NEWLINE if (function_name == "Show_version"):NEWLINE print("Running python version " + sys.version)NEWLINENEWLINEdef generate_sample_file():NEWLINE sample_file = open('sample_ui.txt','w')NEWLINE sample_file.write(NEWLINE """menutext=Sample UI!\nmaxh=3\ntitlecolor=white\nwindow_bg=blue\nobjcolor_text=white\nobjcolor_bg=blue\nobjcolor_sel_text=black\nobjcolor_sel_bg=white\nStart Nano;nano\nShow date;date\nCredits;echo Made by discord.digital\nShow Python version;function:Show_version\nQuit;quit"""NEWLINE )NEWLINE sample_file.close()NEWLINENEWLINEif len(sys.argv) != 2:NEWLINE print("Specify ui file")NEWLINE print("Get started by typing: " + sys.argv[0] + " sample")NEWLINE exit()NEWLINEelif (sys.argv[1] == "sample"):NEWLINE generate_sample_file()NEWLINE print("Created sample_ui.txt")NEWLINE print("Use it like that: " + sys.argv[0] + " sample_ui.txt")NEWLINE exit(0)NEWLINEelse:NEWLINE if not os.path.isfile(sys.argv[1]):NEWLINE print("File not found!")NEWLINE exit()NEWLINENEWLINEscreen = curses.initscr()NEWLINEcurses.curs_set(0)NEWLINEcurses.noecho()NEWLINEscreen.keypad(1)NEWLINEcurses.start_color()NEWLINEcurses.mousemask(1)NEWLINENEWLINEdef convert_text_to_color(text):NEWLINE textup = text.upper()NEWLINE if (textup == "BLACK"):NEWLINE return 0NEWLINE if (textup == "RED"):NEWLINE return 1NEWLINE if (textup == "GREEN"):NEWLINE return 2NEWLINE if (textup == "YELLOW"):NEWLINE return 3NEWLINE if (textup == "BLUE"):NEWLINE return 4NEWLINE if (textup == "MAGENTA"):NEWLINE return 5NEWLINE if (textup == "CYAN"):NEWLINE return 6NEWLINE if (textup == "WHITE"):NEWLINE return 7NEWLINE if (textup == "BLACK2"):NEWLINE return 8NEWLINE if (textup == "RED2"):NEWLINE return 9NEWLINE if (textup == "GREEN2"):NEWLINE return 10NEWLINE if (textup == "YELLOW2"):NEWLINE return 11NEWLINE if (textup == "BLUE2"):NEWLINE return 12NEWLINE if (textup == "MAGENTA2"):NEWLINE return 13NEWLINE if (textup == "CYAN2"):NEWLINE return 14NEWLINE if (textup == "WHITE2"):NEWLINE return 15NEWLINE NEWLINE return 7NEWLINENEWLINEobjects = collections.defaultdict(dict)NEWLINEobject_i = 0NEWLINEmenutext = "Menu"NEWLINEmaxh = 3NEWLINEtitlecolor = "white"NEWLINEwindow_bg = "black"NEWLINEobjcolor_text = "white"NEWLINEobjcolor_bg = "black"NEWLINEobjcolor_sel_text = "black"NEWLINEobjcolor_sel_bg = "white"NEWLINENEWLINEfp = open(sys.argv[1])NEWLINEfor _, line in enumerate(fp):NEWLINE if line.startswith("menutext="):NEWLINE menutext = line.replace('menutext=','').replace('\n','')NEWLINE elif line.startswith("maxh="):NEWLINE maxh = line.replace('maxh=','').replace('\n','')NEWLINE elif line.startswith("titlecolor="):NEWLINE titlecolor = line.replace('titlecolor=','').replace('\n','')NEWLINE elif line.startswith("window_bg="):NEWLINE window_bg = line.replace('window_bg=','').replace('\n','')NEWLINE elif line.startswith("objcolor_text="):NEWLINE objcolor_text = line.replace('objcolor_text=','').replace('\n','')NEWLINE elif line.startswith("objcolor_bg="):NEWLINE objcolor_bg = line.replace('objcolor_bg=','').replace('\n','')NEWLINE elif line.startswith("objcolor_sel_text="):NEWLINE objcolor_sel_text = line.replace('objcolor_sel_text=','').replace('\n','')NEWLINE elif line.startswith("objcolor_sel_bg="):NEWLINE objcolor_sel_bg = line.replace('objcolor_sel_bg=','').replace('\n','')NEWLINE else:NEWLINE if (line == '\n'):NEWLINE breakNEWLINE interface = line.split(';')NEWLINE objects[object_i]['Label'] = interface[0].replace('\n','')NEWLINE objects[object_i]['Action'] = interface[1].replace('\n','')NEWLINE object_i = object_i + 1NEWLINEfp.close()NEWLINENEWLINEcolorcode = convert_text_to_color(titlecolor)NEWLINEcolorcode_bg = convert_text_to_color(window_bg)NEWLINEcurses.init_pair(2, colorcode, colorcode_bg)NEWLINEcolorcode_text = convert_text_to_color(objcolor_text)NEWLINEcolorcode_bg = convert_text_to_color(objcolor_bg)NEWLINEcurses.init_pair(3, colorcode_text, colorcode_bg)NEWLINEcolorcode_text = convert_text_to_color(objcolor_sel_text)NEWLINEcolorcode_bg = convert_text_to_color(objcolor_sel_bg)NEWLINEcurses.init_pair(4, colorcode_text, colorcode_bg)NEWLINENEWLINEmaxh = int(maxh)NEWLINENEWLINEscreen.bkgd(' ', curses.color_pair(2))NEWLINENEWLINE_, x = screen.getmaxyx()NEWLINEtitlepad = curses.newpad(1, x-2)NEWLINEtitlepad.addstr(menutext, curses.color_pair(2))NEWLINEtitlepad.bkgd(' ', curses.color_pair(2) | curses.A_BOLD)NEWLINENEWLINEinfopad = curses.newpad(3, 15)NEWLINEinfopad.addstr("Press q to exit", curses.color_pair(2))NEWLINENEWLINEdef create_entry(text,startheight):NEWLINE _, x = screen.getmaxyx()NEWLINE pad = curses.newpad(maxh, x - 2)NEWLINE cheight = int(maxh / 2)NEWLINE tstart = int((x / 2) - (len(text) / 2))-1NEWLINE pad.addstr(cheight,tstart,text)NEWLINE pad.bkgd(' ', curses.color_pair(3))NEWLINE return padNEWLINENEWLINEdef select_entry(pad):NEWLINE global parseoffsetNEWLINE global selectNEWLINE global refreshlistNEWLINE global selectedpadNEWLINE global scrolldirectionNEWLINE global object_iNEWLINE global maxfitobjNEWLINE global resizeNEWLINE if (object_i > maxfitobj) or (parseoffset != 0):NEWLINE selectpad.erase()NEWLINE selectpad.resize(3,len(str(100) + "/") + len(str(object_i)))NEWLINE selectpad.addstr(str(select + 1) + "/" + str(object_i), curses.color_pair(2))NEWLINE selectpad.refresh(0, 0, 1, 2, 1, x-2)NEWLINE if (pad):NEWLINE if (selectedpad != None) and not (resize):NEWLINE deselect_entry(selectedpad)NEWLINE pad['pad'].bkgd(' ', curses.color_pair(4))NEWLINE cheight = int(maxh / 2)NEWLINE tstart = int((x / 2) - (len(pad['label']) / 2))-1NEWLINE pad['pad'].addstr(cheight,tstart,pad['label'])NEWLINE y, _ = pad['pad'].getbegyx()NEWLINE sy, sx = screen.getmaxyx()NEWLINE pad['pad'].refresh(0,0,y,1,sy,sx-2)NEWLINE selectedpad = padNEWLINE else:NEWLINE scrolldirection = "up"NEWLINE parseoffset = parseoffset - 1NEWLINE refreshlist = TrueNEWLINE screen.refresh() NEWLINENEWLINEdef deselect_entry(pad):NEWLINE pad['pad'].bkgd(' ', curses.color_pair(3))NEWLINE cheight = int(maxh / 2)NEWLINE tstart = int((x / 2) - (len(pad['label']) / 2))-1NEWLINE pad['pad'].addstr(cheight,tstart,pad['label'])NEWLINE y, _ = pad['pad'].getbegyx()NEWLINE sy, sx = screen.getmaxyx()NEWLINE pad['pad'].refresh(0,0,y,1,sy,sx-2)NEWLINE screen.refresh()NEWLINENEWLINEcurseLoop = TrueNEWLINEpads = FalseNEWLINEaction = FalseNEWLINEselect = 0NEWLINEselectedpad = NoneNEWLINEscroll = FalseNEWLINEparseoffset = 0NEWLINErefreshlist = FalseNEWLINEscrolldirection = "down"NEWLINENEWLINEseltext = "Selecting 0/0"NEWLINEselectpad = curses.newpad(3, len(seltext))NEWLINEselectpad.bkgd(' ', curses.color_pair(3))NEWLINENEWLINEy, x = screen.getmaxyx()NEWLINEscreensize = y - 4NEWLINEmaxfitobj = int(screensize / maxh)NEWLINENEWLINEwhile curseLoop:NEWLINE screen.refresh()NEWLINE resize = curses.is_term_resized(y, x)NEWLINE if resize is True:NEWLINE y, x = screen.getmaxyx()NEWLINE screen.clear()NEWLINE curses.resizeterm(y, x)NEWLINE screensize = y - 4NEWLINE maxfitobj = int(screensize / maxh)NEWLINE pads = FalseNEWLINE screen.refresh()NEWLINE else:NEWLINE try:NEWLINE titlepad.refresh(0, 0, 2, int((x/2)-(len(menutext)/2)), 2, x-2)NEWLINE infopad.refresh(0, 0, 1, x-17, 1, x-2)NEWLINE except:NEWLINE passNEWLINENEWLINE j = 4NEWLINE NEWLINE if (pads == False) or (refreshlist):NEWLINE pads = collections.defaultdict(dict)NEWLINENEWLINE if (object_i > maxfitobj):NEWLINE parserange = range(0 + parseoffset, maxfitobj + parseoffset)NEWLINE else:NEWLINE parserange = range(object_i)NEWLINE NEWLINE for i in parserange:NEWLINE pads[i]['pad'] = create_entry(objects[i]['Label'],j)NEWLINE try:NEWLINE pads[i]['pad'].refresh(0,0,j,1,y,x-2)NEWLINE except:NEWLINE passNEWLINE pads[i]['action'] = objects[i]['Action']NEWLINE pads[i]['label'] = objects[i]['Label']NEWLINE pads[i]['range-start'] = jNEWLINE pads[i]['range-end'] = j + maxhNEWLINE j = j + maxhNEWLINE if (refreshlist):NEWLINE if (scrolldirection == "down"):NEWLINE select = maxfitobj + parseoffset - 1NEWLINE select_entry(pads[select])NEWLINE if (scrolldirection == "up"):NEWLINE select = parseoffsetNEWLINE select_entry(pads[select])NEWLINE else:NEWLINE select = 0NEWLINE select_entry(pads[select])NEWLINE refreshlist = FalseNEWLINENEWLINE event = screen.getch()NEWLINE if event == ord("q"): breakNEWLINE if event == curses.KEY_MOUSE:NEWLINE try:NEWLINE _, _, my, _, _ = curses.getmouse()NEWLINE if (object_i > maxfitobj):NEWLINE parserange = range(0 + parseoffset, maxfitobj + parseoffset)NEWLINE else:NEWLINE parserange = range(object_i)NEWLINE for i in parserange:NEWLINE if (my >= pads[i]['range-start']) and (my < pads[i]['range-end']):NEWLINE if (selectedpad != None):NEWLINE deselect_entry(selectedpad)NEWLINE select_entry(pads[i])NEWLINE action = pads[i]['action']NEWLINE y, _ = pads[i]['pad'].getbegyx()NEWLINE sy, sx = screen.getmaxyx()NEWLINE pads[i]['pad'].refresh(0,0,y,1,sy,sx-2)NEWLINE sleep(0.2)NEWLINE curseLoop = FalseNEWLINE except:NEWLINE passNEWLINE if event == curses.KEY_UP:NEWLINE if (selectedpad == None):NEWLINE select = 0NEWLINE select_entry(pads[select])NEWLINE if (select != 0):NEWLINE select = select - 1NEWLINE select_entry(pads[select])NEWLINENEWLINE if event == curses.KEY_DOWN:NEWLINE if (selectedpad != None):NEWLINE if (select != maxfitobj + parseoffset - 1):NEWLINE if not (select == object_i - 1):NEWLINE select = select + 1NEWLINE deselect_entry(selectedpad)NEWLINE select_entry(pads[select])NEWLINE else:NEWLINE if (select == maxfitobj + parseoffset - 1):NEWLINE if (select != object_i - 1):NEWLINE select = select + 1NEWLINE parseoffset = parseoffset + 1NEWLINE scrolldirection = "down"NEWLINE refreshlist = TrueNEWLINE else:NEWLINE if (object_i == 1):NEWLINE select = 0NEWLINE select_entry(pads[select])NEWLINE else:NEWLINE select = 1NEWLINE select_entry(pads[select])NEWLINE if event == 10:NEWLINE if (selectedpad != None):NEWLINE action = objects[select]['Action']NEWLINE curseLoop = FalseNEWLINEcurses.endwin()NEWLINEsleep(0.1)NEWLINEif (action):NEWLINE if action.startswith("function:"):NEWLINE function = action.split(":")[1]NEWLINE RunInbuiltFunction(function)NEWLINE elif (action == "quit"):NEWLINE exit()NEWLINE else:NEWLINE os.system(action)NEWLINE
#! /usr/vin/env pythonNEWLINE# -*-coding:utf8-*-NEWLINENEWLINEfrom collections import OrderedDictNEWLINENEWLINENEWLINEclass LRUCache(OrderedDict):NEWLINE '''不能存储可变类型对象,不能并发访问set()''' NEWLINENEWLINE def __init__(self,capacity):NEWLINE self.capacity = capacityNEWLINE self.cache = OrderedDict()NEWLINENEWLINENEWLINE def get(self,key):NEWLINE if self.cache.has_key(key):NEWLINE value = self.cache.pop(key)NEWLINE self.cache[key] = valueNEWLINE else:NEWLINE value = NoneNEWLINENEWLINE return valueNEWLINENEWLINENEWLINE def set(self,key,value):NEWLINE if self.cache.has_key(key):NEWLINE value = self.cache.pop(key)NEWLINE self.cache[key] = valueNEWLINE else:NEWLINE if len(self.cache) == self.capacity:NEWLINE self.cache.popitem(last = False) #pop出第一个itemNEWLINE self.cache[key] = valueNEWLINE else:NEWLINE self.cache[key] = valueNEWLINE
# Copyright 2018 The TensorFlow Probability Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ============================================================================NEWLINE"""Tests for VariationalGaussianProcess."""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINE# Dependency importsNEWLINEimport numpy as npNEWLINENEWLINEimport tensorflow.compat.v1 as tf1NEWLINEimport tensorflow.compat.v2 as tfNEWLINENEWLINEfrom tensorflow_probability import distributions as tfdNEWLINEfrom tensorflow_probability import positive_semidefinite_kernels as psd_kernelsNEWLINEfrom tensorflow_probability.python.internal import tensorshape_utilNEWLINEfrom tensorflow_probability.python.internal import test_utilNEWLINENEWLINENEWLINEdef _np_kernel_matrix_fn(amp, length_scale, x, y):NEWLINE x = np.expand_dims(x, -2)[..., 0]NEWLINE y = np.expand_dims(y, -3)[..., 0]NEWLINE return amp ** 2 * np.exp(-.5 * ((x - y)**2) / (length_scale**2))NEWLINENEWLINENEWLINE# TODO(cgs, srvasude): Figure out good tests for correctness for VGP, and addNEWLINE# them here.NEWLINE# Potential start is constructing kernels for which the Nystrom approximation isNEWLINE# almost exact. This imples the VGP replicates the GP.NEWLINEclass _VariationalGaussianProcessTest(object):NEWLINENEWLINE def testShapes(self):NEWLINE # 5x5 grid of index points in R^2 and flatten to 25x2NEWLINE index_points = np.linspace(-4., 4., 5, dtype=np.float64)NEWLINE index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)NEWLINE index_points = np.reshape(index_points, [-1, 2])NEWLINE # ==> shape = [25, 2]NEWLINE batched_index_points = np.expand_dims(np.stack([index_points]*6), -3)NEWLINE # ==> shape = [6, 1, 25, 2]NEWLINENEWLINE # 9 inducing index points in R^2NEWLINE inducing_index_points = np.linspace(-4., 4., 3, dtype=np.float64)NEWLINE inducing_index_points = np.stack(np.meshgrid(inducing_index_points,NEWLINE inducing_index_points),NEWLINE axis=-1)NEWLINE inducing_index_points = np.reshape(inducing_index_points, [-1, 2])NEWLINE # ==> shape = [9, 2]NEWLINENEWLINE variational_inducing_observations_loc = np.zeros([3, 9], dtype=np.float64)NEWLINE variational_inducing_observations_scale = np.eye(9, dtype=np.float64)NEWLINENEWLINE # Kernel with batch_shape [2, 4, 1, 1]NEWLINE amplitude = np.array([1., 2.], np.float64).reshape([2, 1, 1, 1])NEWLINE length_scale = np.array([.1, .2, .3, .4], np.float64).reshape([1, 4, 1, 1])NEWLINENEWLINE jitter = np.float64(1e-6)NEWLINE observation_noise_variance = np.float64(1e-2)NEWLINENEWLINE if not self.is_static:NEWLINE amplitude = tf1.placeholder_with_default(amplitude, shape=None)NEWLINE length_scale = tf1.placeholder_with_default(length_scale, shape=None)NEWLINE batched_index_points = tf1.placeholder_with_default(NEWLINE batched_index_points, shape=None)NEWLINENEWLINE inducing_index_points = tf1.placeholder_with_default(NEWLINE inducing_index_points, shape=None)NEWLINE variational_inducing_observations_loc = tf1.placeholder_with_default(NEWLINE variational_inducing_observations_loc, shape=None)NEWLINE variational_inducing_observations_scale = tf1.placeholder_with_default(NEWLINE variational_inducing_observations_scale, shape=None)NEWLINENEWLINE kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)NEWLINENEWLINE vgp = tfd.VariationalGaussianProcess(NEWLINE kernel=kernel,NEWLINE index_points=batched_index_points,NEWLINE inducing_index_points=inducing_index_points,NEWLINE variational_inducing_observations_loc=(NEWLINE variational_inducing_observations_loc),NEWLINE variational_inducing_observations_scale=(NEWLINE variational_inducing_observations_scale),NEWLINE observation_noise_variance=observation_noise_variance,NEWLINE jitter=jitter)NEWLINENEWLINE batch_shape = [2, 4, 6, 3]NEWLINE event_shape = [25]NEWLINE sample_shape = [9, 3]NEWLINENEWLINE samples = vgp.sample(sample_shape)NEWLINENEWLINE if self.is_static or tf.executing_eagerly():NEWLINE self.assertAllEqual(vgp.batch_shape_tensor(), batch_shape)NEWLINE self.assertAllEqual(vgp.event_shape_tensor(), event_shape)NEWLINE self.assertAllEqual(samples.shape,NEWLINE sample_shape + batch_shape + event_shape)NEWLINE self.assertAllEqual(vgp.batch_shape, batch_shape)NEWLINE self.assertAllEqual(vgp.event_shape, event_shape)NEWLINE self.assertAllEqual(samples.shape,NEWLINE sample_shape + batch_shape + event_shape)NEWLINE else:NEWLINE self.assertAllEqual(self.evaluate(vgp.batch_shape_tensor()), batch_shape)NEWLINE self.assertAllEqual(self.evaluate(vgp.event_shape_tensor()), event_shape)NEWLINE self.assertAllEqual(self.evaluate(samples).shape,NEWLINE sample_shape + batch_shape + event_shape)NEWLINE self.assertIsNone(tensorshape_util.rank(samples.shape))NEWLINE self.assertIsNone(tensorshape_util.rank(vgp.batch_shape))NEWLINE self.assertEqual(tensorshape_util.rank(vgp.event_shape), 1)NEWLINE self.assertIsNone(NEWLINE tf.compat.dimension_value(tensorshape_util.dims(vgp.event_shape)[0]))NEWLINENEWLINE def testOptimalVariationalShapes(self):NEWLINE # 5x5 grid of observation index points in R^2 and flatten to 25x2NEWLINE observation_index_points = np.linspace(-4., 4., 5, dtype=np.float64)NEWLINE observation_index_points = np.stack(NEWLINE np.meshgrid(NEWLINE observation_index_points, observation_index_points), axis=-1)NEWLINE observation_index_points = np.reshape(NEWLINE observation_index_points, [-1, 2])NEWLINE # ==> shape = [25, 2]NEWLINE observation_index_points = np.expand_dims(NEWLINE np.stack([observation_index_points]*6), -3)NEWLINE # ==> shape = [6, 1, 25, 2]NEWLINE observations = np.sin(observation_index_points[..., 0])NEWLINE # ==> shape = [6, 1, 25]NEWLINENEWLINE # 9 inducing index points in R^2NEWLINE inducing_index_points = np.linspace(-4., 4., 3, dtype=np.float64)NEWLINE inducing_index_points = np.stack(np.meshgrid(inducing_index_points,NEWLINE inducing_index_points),NEWLINE axis=-1)NEWLINE inducing_index_points = np.reshape(inducing_index_points, [-1, 2])NEWLINE # ==> shape = [9, 2]NEWLINENEWLINE # Kernel with batch_shape [2, 4, 1, 1]NEWLINE amplitude = np.array([1., 2.], np.float64).reshape([2, 1, 1, 1])NEWLINE length_scale = np.array([.1, .2, .3, .4], np.float64).reshape([1, 4, 1, 1])NEWLINENEWLINE jitter = np.float64(1e-6)NEWLINE observation_noise_variance = np.float64(1e-2)NEWLINENEWLINE if not self.is_static:NEWLINE amplitude = tf1.placeholder_with_default(amplitude, shape=None)NEWLINE length_scale = tf1.placeholder_with_default(length_scale, shape=None)NEWLINE observation_index_points = tf1.placeholder_with_default(NEWLINE observation_index_points, shape=None)NEWLINENEWLINE inducing_index_points = tf1.placeholder_with_default(NEWLINE inducing_index_points, shape=None)NEWLINE kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)NEWLINENEWLINE loc, scale = tfd.VariationalGaussianProcess.optimal_variational_posterior(NEWLINE kernel=kernel,NEWLINE inducing_index_points=inducing_index_points,NEWLINE observation_index_points=observation_index_points,NEWLINE observations=observations,NEWLINE observation_noise_variance=observation_noise_variance,NEWLINE jitter=jitter,NEWLINE )NEWLINE # We should expect that loc has shape [2, 4, 6, 1, 9]. This is because:NEWLINE # * [2, 4] comes from the batch shape of the kernel.NEWLINE # * [6, 1] comes from the batch shape of the observations / observationNEWLINE # index points.NEWLINE # * [9] comes from the number of inducing points.NEWLINE # Similar reasoning applies to scale.NEWLINE self.assertAllEqual([2, 4, 6, 1, 9], tf.shape(input=loc))NEWLINE self.assertAllEqual([2, 4, 6, 1, 9, 9], tf.shape(input=scale))NEWLINENEWLINE def testVariationalLossShapes(self):NEWLINE # 2x2 grid of index points in R^2 and flatten to 4x2NEWLINE index_points = np.linspace(-4., 4., 2, dtype=np.float64)NEWLINE index_points = np.stack(np.meshgrid(index_points, index_points), axis=-1)NEWLINE index_points = np.reshape(index_points, [-1, 2])NEWLINE # ==> shape = [4, 2]NEWLINE batched_index_points = np.expand_dims(np.stack([index_points]*6), -3)NEWLINE # ==> shape = [6, 1, 4, 2]NEWLINENEWLINE # 3x3 grid of index points in R^2 and flatten to 9x2NEWLINE observation_index_points = np.linspace(-4., 4., 3, dtype=np.float64)NEWLINE observation_index_points = np.stack(NEWLINE np.meshgrid(NEWLINE observation_index_points, observation_index_points), axis=-1)NEWLINE observation_index_points = np.reshape(NEWLINE observation_index_points, [-1, 2])NEWLINE # ==> shape = [9, 2]NEWLINE observation_index_points = np.expand_dims(NEWLINE np.stack([observation_index_points]*6), -3)NEWLINE # ==> shape = [6, 1, 9, 2]NEWLINE observations = np.sin(observation_index_points[..., 0])NEWLINE # ==> shape = [6, 1, 9]NEWLINENEWLINE # 9 inducing index points in R^2NEWLINE inducing_index_points = np.linspace(-4., 4., 3, dtype=np.float64)NEWLINE inducing_index_points = np.stack(np.meshgrid(inducing_index_points,NEWLINE inducing_index_points),NEWLINE axis=-1)NEWLINE inducing_index_points = np.reshape(inducing_index_points, [-1, 2])NEWLINE # ==> shape = [9, 2]NEWLINENEWLINE variational_inducing_observations_loc = np.zeros([3, 9], dtype=np.float64)NEWLINE variational_inducing_observations_scale = np.eye(9, dtype=np.float64)NEWLINENEWLINE # Kernel with batch_shape [2, 4, 1, 1]NEWLINE amplitude = np.array([1., 2.], np.float64).reshape([2, 1, 1, 1])NEWLINE length_scale = np.array([.1, .2, .3, .4], np.float64).reshape([1, 4, 1, 1])NEWLINENEWLINE jitter = np.float64(1e-6)NEWLINE observation_noise_variance = np.float64(1e-2)NEWLINENEWLINE if not self.is_static:NEWLINE amplitude = tf1.placeholder_with_default(amplitude, shape=None)NEWLINE length_scale = tf1.placeholder_with_default(length_scale, shape=None)NEWLINE batched_index_points = tf1.placeholder_with_default(NEWLINE batched_index_points, shape=None)NEWLINENEWLINE observations = tf1.placeholder_with_default(observations, shape=None)NEWLINE observation_index_points = tf1.placeholder_with_default(NEWLINE observation_index_points, shape=None)NEWLINE inducing_index_points = tf1.placeholder_with_default(NEWLINE inducing_index_points, shape=None)NEWLINE variational_inducing_observations_loc = tf1.placeholder_with_default(NEWLINE variational_inducing_observations_loc, shape=None)NEWLINE variational_inducing_observations_scale = tf1.placeholder_with_default(NEWLINE variational_inducing_observations_scale, shape=None)NEWLINENEWLINE kernel = psd_kernels.ExponentiatedQuadratic(amplitude, length_scale)NEWLINENEWLINE vgp = tfd.VariationalGaussianProcess(NEWLINE kernel=kernel,NEWLINE index_points=batched_index_points,NEWLINE inducing_index_points=inducing_index_points,NEWLINE variational_inducing_observations_loc=(NEWLINE variational_inducing_observations_loc),NEWLINE variational_inducing_observations_scale=(NEWLINE variational_inducing_observations_scale),NEWLINE observation_noise_variance=observation_noise_variance,NEWLINE jitter=jitter)NEWLINENEWLINE loss = vgp.variational_loss(NEWLINE observations=observations,NEWLINE observation_index_points=observation_index_points)NEWLINE # Expect a scalar loss.NEWLINE self.assertAllClose([], tf.shape(input=loss))NEWLINENEWLINENEWLINE@test_util.test_all_tf_execution_regimesNEWLINEclass VariationalGaussianProcessStaticTest(NEWLINE _VariationalGaussianProcessTest, test_util.TestCase):NEWLINE is_static = TrueNEWLINENEWLINENEWLINE@test_util.test_all_tf_execution_regimesNEWLINEclass VariationalGaussianProcessDynamicTest(NEWLINE _VariationalGaussianProcessTest, test_util.TestCase):NEWLINE is_static = FalseNEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE tf.test.main()NEWLINE
#!/usr/bin/env pythonNEWLINE# Copyright (c) 2011 Google Inc. All rights reserved.NEWLINE#NEWLINE# Redistribution and use in source and binary forms, with or withoutNEWLINE# modification, are permitted provided that the following conditions areNEWLINE# met:NEWLINE#NEWLINE# * Redistributions of source code must retain the above copyrightNEWLINE# notice, this list of conditions and the following disclaimer.NEWLINE# * Redistributions in binary form must reproduce the aboveNEWLINE# copyright notice, this list of conditions and the following disclaimerNEWLINE# in the documentation and/or other materials provided with theNEWLINE# distribution.NEWLINE# * Neither the name of Google Inc. nor the names of itsNEWLINE# contributors may be used to endorse or promote products derived fromNEWLINE# this software without specific prior written permission.NEWLINE#NEWLINE# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORSNEWLINE# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOTNEWLINE# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FORNEWLINE# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHTNEWLINE# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,NEWLINE# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOTNEWLINE# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,NEWLINE# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANYNEWLINE# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORTNEWLINE# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USENEWLINE# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.NEWLINENEWLINEimport osNEWLINEimport reNEWLINEtry:NEWLINE import jsonNEWLINEexcept ImportError:NEWLINE import simplejson as jsonNEWLINENEWLINEtype_traits = {NEWLINE "any": "*",NEWLINE "string": "string",NEWLINE "integer": "number",NEWLINE "number": "number",NEWLINE "boolean": "boolean",NEWLINE "array": "!Array.<*>",NEWLINE "object": "!Object",NEWLINE}NEWLINENEWLINEpromisified_domains = {NEWLINE "Accessibility",NEWLINE "Animation",NEWLINE "CSS",NEWLINE "Emulation",NEWLINE "Profiler"NEWLINE}NEWLINENEWLINEref_types = {}NEWLINENEWLINEdef full_qualified_type_id(domain_name, type_id):NEWLINE if type_id.find(".") == -1:NEWLINE return "%s.%s" % (domain_name, type_id)NEWLINE return type_idNEWLINENEWLINENEWLINEdef fix_camel_case(name):NEWLINE refined = re.sub(r'-(\w)', lambda pat: pat.group(1).upper(), name)NEWLINE refined = to_title_case(refined)NEWLINE return re.sub(r'(?i)HTML|XML|WML|API', lambda pat: pat.group(0).upper(), refined)NEWLINENEWLINENEWLINEdef to_title_case(name):NEWLINE return name[:1].upper() + name[1:]NEWLINENEWLINENEWLINEdef generate_enum(name, json):NEWLINE enum_members = []NEWLINE for member in json["enum"]:NEWLINE enum_members.append(" %s: \"%s\"" % (fix_camel_case(member), member))NEWLINE return "\n/** @enum {string} */\n%s = {\n%s\n};\n" % (name, (",\n".join(enum_members)))NEWLINENEWLINENEWLINEdef param_type(domain_name, param):NEWLINE if "type" in param:NEWLINE if param["type"] == "array":NEWLINE items = param["items"]NEWLINE return "!Array.<%s>" % param_type(domain_name, items)NEWLINE else:NEWLINE return type_traits[param["type"]]NEWLINE if "$ref" in param:NEWLINE type_id = full_qualified_type_id(domain_name, param["$ref"])NEWLINE if type_id in ref_types:NEWLINE return ref_types[type_id]NEWLINE else:NEWLINE print "Type not found: " + type_idNEWLINE return "!! Type not found: " + type_idNEWLINENEWLINENEWLINEdef load_schema(file, domains):NEWLINE input_file = open(file, "r")NEWLINE json_string = input_file.read()NEWLINE parsed_json = json.loads(json_string)NEWLINE domains.extend(parsed_json["domains"])NEWLINENEWLINENEWLINEdef generate_protocol_externs(output_path, file1, file2):NEWLINE domains = []NEWLINE load_schema(file1, domains)NEWLINE load_schema(file2, domains)NEWLINE output_file = open(output_path, "w")NEWLINENEWLINE output_file.write(NEWLINE"""NEWLINEvar InspectorBackend = {}NEWLINENEWLINEvar Protocol = {};NEWLINE/** @typedef {string}*/NEWLINEProtocol.Error;NEWLINE""")NEWLINENEWLINE for domain in domains:NEWLINE domain_name = domain["domain"]NEWLINE if "types" in domain:NEWLINE for type in domain["types"]:NEWLINE type_id = full_qualified_type_id(domain_name, type["id"])NEWLINE ref_types[type_id] = "%sAgent.%s" % (domain_name, type["id"])NEWLINENEWLINE for domain in domains:NEWLINE domain_name = domain["domain"]NEWLINE promisified = domain_name in promisified_domainsNEWLINENEWLINE output_file.write("\n\n/**\n * @constructor\n*/\n")NEWLINE output_file.write("Protocol.%sAgent = function(){};\n" % domain_name)NEWLINENEWLINE if "commands" in domain:NEWLINE for command in domain["commands"]:NEWLINE output_file.write("\n/**\n")NEWLINE params = []NEWLINE has_return_value = "returns" in commandNEWLINE explicit_parameters = promisified and has_return_valueNEWLINE if ("parameters" in command):NEWLINE for in_param in command["parameters"]:NEWLINE # All parameters are not optional in case of promisified domain with return value.NEWLINE if (not explicit_parameters and "optional" in in_param):NEWLINE params.append("opt_%s" % in_param["name"])NEWLINE output_file.write(" * @param {%s=} opt_%s\n" % (param_type(domain_name, in_param), in_param["name"]))NEWLINE else:NEWLINE params.append(in_param["name"])NEWLINE output_file.write(" * @param {%s} %s\n" % (param_type(domain_name, in_param), in_param["name"]))NEWLINE returns = []NEWLINE returns.append("?Protocol.Error")NEWLINE if ("error" in command):NEWLINE returns.append("%s=" % param_type(domain_name, command["error"]))NEWLINE if (has_return_value):NEWLINE for out_param in command["returns"]:NEWLINE if ("optional" in out_param):NEWLINE returns.append("%s=" % param_type(domain_name, out_param))NEWLINE else:NEWLINE returns.append("%s" % param_type(domain_name, out_param))NEWLINE callback_return_type = "void="NEWLINE if explicit_parameters:NEWLINE callback_return_type = "T"NEWLINE elif promisified:NEWLINE callback_return_type = "T="NEWLINE output_file.write(" * @param {function(%s):%s} opt_callback\n" % (", ".join(returns), callback_return_type))NEWLINE if (promisified):NEWLINE output_file.write(" * @return {!Promise.<T>}\n")NEWLINE output_file.write(" * @template T\n")NEWLINE params.append("opt_callback")NEWLINENEWLINE output_file.write(" */\n")NEWLINE output_file.write("Protocol.%sAgent.prototype.%s = function(%s) {}\n" % (domain_name, command["name"], ", ".join(params)))NEWLINE output_file.write("/** @param {function(%s):void=} opt_callback */\n" % ", ".join(returns))NEWLINE output_file.write("Protocol.%sAgent.prototype.invoke_%s = function(obj, opt_callback) {}\n" % (domain_name, command["name"]))NEWLINENEWLINE output_file.write("\n\n\nvar %sAgent = function(){};\n" % domain_name)NEWLINENEWLINE if "types" in domain:NEWLINE for type in domain["types"]:NEWLINE if type["type"] == "object":NEWLINE typedef_args = []NEWLINE if "properties" in type:NEWLINE for property in type["properties"]:NEWLINE suffix = ""NEWLINE if ("optional" in property):NEWLINE suffix = "|undefined"NEWLINE if "enum" in property:NEWLINE enum_name = "%sAgent.%s%s" % (domain_name, type["id"], to_title_case(property["name"]))NEWLINE output_file.write(generate_enum(enum_name, property))NEWLINE typedef_args.append("%s:(%s%s)" % (property["name"], enum_name, suffix))NEWLINE else:NEWLINE typedef_args.append("%s:(%s%s)" % (property["name"], param_type(domain_name, property), suffix))NEWLINE if (typedef_args):NEWLINE output_file.write("\n/** @typedef {!{%s}} */\n%sAgent.%s;\n" % (", ".join(typedef_args), domain_name, type["id"]))NEWLINE else:NEWLINE output_file.write("\n/** @typedef {!Object} */\n%sAgent.%s;\n" % (domain_name, type["id"]))NEWLINE elif type["type"] == "string" and "enum" in type:NEWLINE output_file.write(generate_enum("%sAgent.%s" % (domain_name, type["id"]), type))NEWLINE elif type["type"] == "array":NEWLINE output_file.write("\n/** @typedef {!Array.<!%s>} */\n%sAgent.%s;\n" % (param_type(domain_name, type["items"]), domain_name, type["id"]))NEWLINE else:NEWLINE output_file.write("\n/** @typedef {%s} */\n%sAgent.%s;\n" % (type_traits[type["type"]], domain_name, type["id"]))NEWLINENEWLINE output_file.write("/** @interface */\n")NEWLINE output_file.write("%sAgent.Dispatcher = function() {};\n" % domain_name)NEWLINE if "events" in domain:NEWLINE for event in domain["events"]:NEWLINE params = []NEWLINE if ("parameters" in event):NEWLINE output_file.write("/**\n")NEWLINE for param in event["parameters"]:NEWLINE if ("optional" in param):NEWLINE params.append("opt_%s" % param["name"])NEWLINE output_file.write(" * @param {%s=} opt_%s\n" % (param_type(domain_name, param), param["name"]))NEWLINE else:NEWLINE params.append(param["name"])NEWLINE output_file.write(" * @param {%s} %s\n" % (param_type(domain_name, param), param["name"]))NEWLINE output_file.write(" */\n")NEWLINE output_file.write("%sAgent.Dispatcher.prototype.%s = function(%s) {};\n" % (domain_name, event["name"], ", ".join(params)))NEWLINENEWLINE output_file.write("\n/** @constructor\n * @param {!Object.<string, !Object>} agentsMap\n */\n")NEWLINE output_file.write("Protocol.Agents = function(agentsMap){this._agentsMap;};\n")NEWLINE output_file.write("/**\n * @param {string} domain\n * @param {!Object} dispatcher\n */\n")NEWLINE output_file.write("Protocol.Agents.prototype.registerDispatcher = function(domain, dispatcher){};\n")NEWLINE for domain in domains:NEWLINE domain_name = domain["domain"]NEWLINE uppercase_length = 0NEWLINE while uppercase_length < len(domain_name) and domain_name[uppercase_length].isupper():NEWLINE uppercase_length += 1NEWLINENEWLINE output_file.write("/** @return {!Protocol.%sAgent}*/\n" % domain_name)NEWLINE output_file.write("Protocol.Agents.prototype.%s = function(){};\n" % (domain_name[:uppercase_length].lower() + domain_name[uppercase_length:] + "Agent"))NEWLINENEWLINE output_file.write("/**\n * @param {!%sAgent.Dispatcher} dispatcher\n */\n" % domain_name)NEWLINE output_file.write("Protocol.Agents.prototype.register%sDispatcher = function(dispatcher) {}\n" % domain_name)NEWLINENEWLINENEWLINE output_file.close()NEWLINENEWLINEif __name__ == "__main__":NEWLINE import sysNEWLINE import os.pathNEWLINE program_name = os.path.basename(__file__)NEWLINE if len(sys.argv) < 5 or sys.argv[1] != "-o":NEWLINE sys.stderr.write("Usage: %s -o OUTPUT_FILE INPUT_FILE_1 INPUT_FILE_2\n" % program_name)NEWLINE exit(1)NEWLINE output_path = sys.argv[2]NEWLINE input_path_1 = sys.argv[3]NEWLINE input_path_2 = sys.argv[4]NEWLINE generate_protocol_externs(output_path, input_path_1, input_path_2)NEWLINE
"""Testing handling with CoreState."""NEWLINENEWLINEfrom supervisor.coresys import CoreSysNEWLINENEWLINENEWLINEasync def test_timezone(run_dir, coresys: CoreSys):NEWLINE """Test write corestate to /run/supervisor."""NEWLINENEWLINE assert coresys.timezone == "UTC"NEWLINE assert coresys.config.timezone is NoneNEWLINENEWLINE await coresys.dbus.timedate.connect()NEWLINE await coresys.dbus.timedate.update()NEWLINE assert coresys.timezone == "Etc/UTC"NEWLINENEWLINE coresys.config.timezone = "Europe/Zurich"NEWLINE assert coresys.timezone == "Europe/Zurich"NEWLINE
import abcNEWLINEimport numpy as npNEWLINENEWLINEfrom . import _base_modelNEWLINENEWLINENEWLINEclass SklearnModel(_base_model.BaseModel, abc.ABC):NEWLINE """NEWLINE Parent class based on :obj:`~easyPheno.model._base_model.BaseModel` for all models with a sklearn-like API to shareNEWLINE functionalities. See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINENEWLINE **Attributes**NEWLINENEWLINE *Inherited attributes*NEWLINENEWLINE See :obj:`~easyPheno.model._base_model.BaseModel`NEWLINE """NEWLINENEWLINE def retrain(self, X_retrain: np.array, y_retrain: np.array):NEWLINE """NEWLINE Implementation of the retraining for models with sklearn-like API.NEWLINE See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINE """NEWLINE self.model.fit(X_retrain, np.ravel(y_retrain))NEWLINENEWLINE def predict(self, X_in: np.array) -> np.array:NEWLINE """NEWLINE Implementation of a prediction based on input features for models with sklearn-like API.NEWLINE See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINE """NEWLINE return np.reshape(self.model.predict(X_in), (-1, 1))NEWLINENEWLINE def train_val_loop(self, X_train: np.array, y_train: np.array, X_val: np.array, y_val: np.array) -> np.array:NEWLINE """NEWLINE Implementation of a train and validation loop for models with sklearn-like API.NEWLINE See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINE """NEWLINE self.model.fit(X_train, np.ravel(y_train))NEWLINE return self.predict(X_in=X_val)NEWLINE
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-NEWLINE#NEWLINE# Copyright 2002 Ben Escoto <[email protected]>NEWLINE# Copyright 2007 Kenneth Loafman <[email protected]>NEWLINE#NEWLINE# This file is part of duplicity.NEWLINE#NEWLINE# Duplicity is free software; you can redistribute it and/or modify itNEWLINE# under the terms of the GNU General Public License as published by theNEWLINE# Free Software Foundation; either version 2 of the License, or (at yourNEWLINE# option) any later version.NEWLINE#NEWLINE# Duplicity is distributed in the hope that it will be useful, butNEWLINE# WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNUNEWLINE# General Public License for more details.NEWLINE#NEWLINE# You should have received a copy of the GNU General Public LicenseNEWLINE# along with duplicity; if not, write to the Free Software Foundation,NEWLINE# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USANEWLINENEWLINE# The following can be redefined to use different shell commands fromNEWLINE# ssh or scp or to add more arguments. However, the replacements mustNEWLINE# have the same syntax. Also these strings will be executed by theNEWLINE# shell, so shouldn't have strange characters in them.NEWLINENEWLINEimport reNEWLINEimport stringNEWLINEimport timeNEWLINEimport osNEWLINENEWLINEimport duplicity.backendNEWLINEfrom duplicity import globalsNEWLINEfrom duplicity import logNEWLINEfrom duplicity import pexpectNEWLINEfrom duplicity.errors import * #@UnusedWildImportNEWLINENEWLINEclass SSHPExpectBackend(duplicity.backend.Backend):NEWLINE """This backend copies files using scp. List not supported"""NEWLINE def __init__(self, parsed_url):NEWLINE """scpBackend initializer"""NEWLINE duplicity.backend.Backend.__init__(self, parsed_url)NEWLINENEWLINE self.retry_delay = 10NEWLINENEWLINE self.scp_command = "scp"NEWLINE if globals.scp_command: self.scp_command = globals.scp_commandNEWLINENEWLINE self.sftp_command = "sftp"NEWLINE if globals.sftp_command: self.sftp_command = globals.sftp_commandNEWLINENEWLINE # host string of form [user@]hostnameNEWLINE if parsed_url.username:NEWLINE self.host_string = parsed_url.username + "@" + parsed_url.hostnameNEWLINE else:NEWLINE self.host_string = parsed_url.hostnameNEWLINE # make sure remote_dir is always validNEWLINE if parsed_url.path:NEWLINE # remove leading '/'NEWLINE self.remote_dir = re.sub(r'^/', r'', parsed_url.path, 1)NEWLINE else:NEWLINE self.remote_dir = '.'NEWLINE self.remote_prefix = self.remote_dir + '/'NEWLINE # maybe use different ssh portNEWLINE if parsed_url.port:NEWLINE globals.ssh_options = globals.ssh_options + " -oPort=%s" % parsed_url.portNEWLINE # set some defaults if user has not specified already.NEWLINE if "ServerAliveInterval" not in globals.ssh_options:NEWLINE globals.ssh_options += " -oServerAliveInterval=%d" % ((int)(globals.timeout / 2))NEWLINE if "ServerAliveCountMax" not in globals.ssh_options:NEWLINE globals.ssh_options += " -oServerAliveCountMax=2"NEWLINENEWLINE # set up passwordNEWLINE self.use_getpass = globals.ssh_askpassNEWLINE self.password = self.get_password()NEWLINENEWLINE def run_scp_command(self, commandline):NEWLINE """ Run an scp command, responding to password prompts """NEWLINE for n in range(1, globals.num_retries+1):NEWLINE if n > 1:NEWLINE # sleep before retryNEWLINE time.sleep(self.retry_delay)NEWLINE log.Info("Running '%s' (attempt #%d)" % (commandline, n))NEWLINE child = pexpect.spawn(commandline, timeout = None)NEWLINE if globals.ssh_askpass:NEWLINE state = "authorizing"NEWLINE else:NEWLINE state = "copying"NEWLINE while 1:NEWLINE if state == "authorizing":NEWLINE match = child.expect([pexpect.EOF,NEWLINE "(?i)timeout, server not responding",NEWLINE "(?i)pass(word|phrase .*):",NEWLINE "(?i)permission denied",NEWLINE "authenticity"])NEWLINE log.Debug("State = %s, Before = '%s'" % (state, child.before.strip()))NEWLINE if match == 0:NEWLINE log.Warn("Failed to authenticate")NEWLINE breakNEWLINE elif match == 1:NEWLINE log.Warn("Timeout waiting to authenticate")NEWLINE breakNEWLINE elif match == 2:NEWLINE child.sendline(self.password)NEWLINE state = "copying"NEWLINE elif match == 3:NEWLINE log.Warn("Invalid SSH password")NEWLINE breakNEWLINE elif match == 4:NEWLINE log.Warn("Remote host authentication failed (missing known_hosts entry?)")NEWLINE breakNEWLINE elif state == "copying":NEWLINE match = child.expect([pexpect.EOF,NEWLINE "(?i)timeout, server not responding",NEWLINE "stalled",NEWLINE "authenticity",NEWLINE "ETA"])NEWLINE log.Debug("State = %s, Before = '%s'" % (state, child.before.strip()))NEWLINE if match == 0:NEWLINE breakNEWLINE elif match == 1:NEWLINE log.Warn("Timeout waiting for response")NEWLINE breakNEWLINE elif match == 2:NEWLINE state = "stalled"NEWLINE elif match == 3:NEWLINE log.Warn("Remote host authentication failed (missing known_hosts entry?)")NEWLINE breakNEWLINE elif state == "stalled":NEWLINE match = child.expect([pexpect.EOF,NEWLINE "(?i)timeout, server not responding",NEWLINE "ETA"])NEWLINE log.Debug("State = %s, Before = '%s'" % (state, child.before.strip()))NEWLINE if match == 0:NEWLINE breakNEWLINE elif match == 1:NEWLINE log.Warn("Stalled for too long, aborted copy")NEWLINE breakNEWLINE elif match == 2:NEWLINE state = "copying"NEWLINE child.close(force = True)NEWLINE if child.exitstatus == 0:NEWLINE returnNEWLINE log.Warn("Running '%s' failed (attempt #%d)" % (commandline, n))NEWLINE log.Warn("Giving up trying to execute '%s' after %d attempts" % (commandline, globals.num_retries))NEWLINE raise BackendException("Error running '%s'" % commandline)NEWLINENEWLINE def run_sftp_command(self, commandline, commands):NEWLINE """ Run an sftp command, responding to password prompts, passing commands from list """NEWLINE maxread = 2000 # expected read buffer sizeNEWLINE responses = [pexpect.EOF,NEWLINE "(?i)timeout, server not responding",NEWLINE "sftp>",NEWLINE "(?i)pass(word|phrase .*):",NEWLINE "(?i)permission denied",NEWLINE "authenticity",NEWLINE "(?i)no such file or directory",NEWLINE "Couldn't delete file: No such file or directory",NEWLINE "Couldn't delete file",NEWLINE "open(.*): Failure"]NEWLINE max_response_len = max([len(p) for p in responses[1:]])NEWLINE for n in range(1, globals.num_retries+1):NEWLINE if n > 1:NEWLINE # sleep before retryNEWLINE time.sleep(self.retry_delay)NEWLINE log.Info("Running '%s' (attempt #%d)" % (commandline, n))NEWLINE child = pexpect.spawn(commandline, timeout = None, maxread=maxread)NEWLINE cmdloc = 0NEWLINE passprompt = 0NEWLINE while 1:NEWLINE msg = ""NEWLINE match = child.expect(responses,NEWLINE searchwindowsize=maxread+max_response_len)NEWLINE log.Debug("State = sftp, Before = '%s'" % (child.before.strip()))NEWLINE if match == 0:NEWLINE breakNEWLINE elif match == 1:NEWLINE msg = "Timeout waiting for response"NEWLINE breakNEWLINE if match == 2:NEWLINE if cmdloc < len(commands):NEWLINE command = commands[cmdloc]NEWLINE log.Info("sftp command: '%s'" % (command,))NEWLINE child.sendline(command)NEWLINE cmdloc += 1NEWLINE else:NEWLINE command = 'quit'NEWLINE child.sendline(command)NEWLINE res = child.beforeNEWLINE elif match == 3:NEWLINE passprompt += 1NEWLINE child.sendline(self.password)NEWLINE if (passprompt>1):NEWLINE raise BackendException("Invalid SSH password.")NEWLINE elif match == 4:NEWLINE if not child.before.strip().startswith("mkdir"):NEWLINE msg = "Permission denied"NEWLINE breakNEWLINE elif match == 5:NEWLINE msg = "Host key authenticity could not be verified (missing known_hosts entry?)"NEWLINE breakNEWLINE elif match == 6:NEWLINE if not child.before.strip().startswith("rm"):NEWLINE msg = "Remote file or directory does not exist in command='%s'" % (commandline,)NEWLINE breakNEWLINE elif match == 7:NEWLINE if not child.before.strip().startswith("Removing"):NEWLINE msg = "Could not delete file in command='%s'" % (commandline,)NEWLINE break;NEWLINE elif match == 8:NEWLINE msg = "Could not delete file in command='%s'" % (commandline,)NEWLINE breakNEWLINE elif match == 9:NEWLINE msg = "Could not open file in command='%s'" % (commandline,)NEWLINE breakNEWLINE child.close(force = True)NEWLINE if child.exitstatus == 0:NEWLINE return resNEWLINE log.Warn("Running '%s' with commands:\n %s\n failed (attempt #%d): %s" % (commandline, "\n ".join(commands), n, msg))NEWLINE raise BackendException("Giving up trying to execute '%s' with commands:\n %s\n after %d attempts" % (commandline, "\n ".join(commands), globals.num_retries))NEWLINENEWLINE def put(self, source_path, remote_filename = None):NEWLINE if globals.use_scp:NEWLINE self.put_scp(source_path, remote_filename = remote_filename)NEWLINE else:NEWLINE self.put_sftp(source_path, remote_filename = remote_filename)NEWLINENEWLINE def put_sftp(self, source_path, remote_filename = None):NEWLINE """Use sftp to copy source_dir/filename to remote computer"""NEWLINE if not remote_filename:NEWLINE remote_filename = source_path.get_filename()NEWLINE commands = ["put \"%s\" \"%s.%s.part\"" %NEWLINE (source_path.name, self.remote_prefix, remote_filename),NEWLINE "rename \"%s.%s.part\" \"%s%s\"" %NEWLINE (self.remote_prefix, remote_filename,self.remote_prefix, remote_filename)]NEWLINE commandline = ("%s %s %s" % (self.sftp_command,NEWLINE globals.ssh_options,NEWLINE self.host_string))NEWLINE self.run_sftp_command(commandline, commands)NEWLINENEWLINE def put_scp(self, source_path, remote_filename = None):NEWLINE """Use scp to copy source_dir/filename to remote computer"""NEWLINE if not remote_filename:NEWLINE remote_filename = source_path.get_filename()NEWLINE commandline = "%s %s %s %s:%s%s" % \NEWLINE (self.scp_command, globals.ssh_options, source_path.name, self.host_string,NEWLINE self.remote_prefix, remote_filename)NEWLINE self.run_scp_command(commandline)NEWLINENEWLINE def get(self, remote_filename, local_path):NEWLINE if globals.use_scp:NEWLINE self.get_scp(remote_filename, local_path)NEWLINE else:NEWLINE self.get_sftp(remote_filename, local_path)NEWLINENEWLINE def get_sftp(self, remote_filename, local_path):NEWLINE """Use sftp to get a remote file"""NEWLINE commands = ["get \"%s%s\" \"%s\"" %NEWLINE (self.remote_prefix, remote_filename, local_path.name)]NEWLINE commandline = ("%s %s %s" % (self.sftp_command,NEWLINE globals.ssh_options,NEWLINE self.host_string))NEWLINE self.run_sftp_command(commandline, commands)NEWLINE local_path.setdata()NEWLINE if not local_path.exists():NEWLINE raise BackendException("File %s not found locally after get "NEWLINE "from backend" % local_path.name)NEWLINENEWLINE def get_scp(self, remote_filename, local_path):NEWLINE """Use scp to get a remote file"""NEWLINE commandline = "%s %s %s:%s%s %s" % \NEWLINE (self.scp_command, globals.ssh_options, self.host_string, self.remote_prefix,NEWLINE remote_filename, local_path.name)NEWLINE self.run_scp_command(commandline)NEWLINE local_path.setdata()NEWLINE if not local_path.exists():NEWLINE raise BackendException("File %s not found locally after get "NEWLINE "from backend" % local_path.name)NEWLINENEWLINE def _list(self):NEWLINE """NEWLINE List files available for scpNEWLINENEWLINE Note that this command can get confused when dealing withNEWLINE files with newlines in them, as the embedded newlines cannotNEWLINE be distinguished from the file boundaries.NEWLINE """NEWLINE dirs = self.remote_dir.split(os.sep)NEWLINE if len(dirs) > 0:NEWLINE if not dirs[0] :NEWLINE dirs = dirs[1:]NEWLINE dirs[0]= '/' + dirs[0]NEWLINE mkdir_commands = [];NEWLINE for d in dirs:NEWLINE mkdir_commands += ["mkdir \"%s\"" % (d)] + ["cd \"%s\"" % (d)]NEWLINENEWLINE commands = mkdir_commands + ["ls -1"]NEWLINE commandline = ("%s %s %s" % (self.sftp_command,NEWLINE globals.ssh_options,NEWLINE self.host_string))NEWLINENEWLINE l = self.run_sftp_command(commandline, commands).split('\n')[1:]NEWLINENEWLINE return filter(lambda x: x, map(string.strip, l))NEWLINENEWLINE def delete(self, filename_list):NEWLINE """NEWLINE Runs sftp rm to delete files. Files must not require quoting.NEWLINE """NEWLINE commands = ["cd \"%s\"" % (self.remote_dir,)]NEWLINE for fn in filename_list:NEWLINE commands.append("rm \"%s\"" % fn)NEWLINE commandline = ("%s %s %s" % (self.sftp_command, globals.ssh_options, self.host_string))NEWLINE self.run_sftp_command(commandline, commands)NEWLINENEWLINEduplicity.backend.register_backend("ssh", SSHPExpectBackend)NEWLINEduplicity.backend.register_backend("scp", SSHPExpectBackend)NEWLINEduplicity.backend.register_backend("sftp", SSHPExpectBackend)NEWLINE
# Generated by Django 2.2.4 on 2021-01-15 01:18NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('users', '0019_puzzlesolution'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AddField(NEWLINE model_name='puzzlesolution',NEWLINE name='previous_attempts',NEWLINE field=models.CharField(default='', max_length=2000),NEWLINE ),NEWLINE ]NEWLINE
# -*- coding: utf-8 -*-NEWLINE# Copyright (c) Facebook, Inc. and its affiliates. All Rights ReservedNEWLINEimport loggingNEWLINEimport copyNEWLINEimport torchNEWLINEimport torchvisionNEWLINEimport numpy as npNEWLINEimport cv2NEWLINEfrom PIL import ImageNEWLINEfrom fvcore.common.file_io import PathManagerNEWLINEfrom fvcore.transforms.transform import NoOpTransform, TransformNEWLINENEWLINEfrom detectron2.data import MetadataCatalogNEWLINEfrom detectron2.data import detection_utils as utilsNEWLINEfrom detectron2.data import transforms as TNEWLINENEWLINEfrom .dataset import BB8_KEYPOINT_CONNECTION_RULES, FPS8_KEYPOINT_CONNECTION_RULESNEWLINE# from .structures import DensePoseDataRelative, DensePoseList, DensePoseTransformDataNEWLINENEWLINEclass RandomBlurTransform(Transform):NEWLINE def __init__(self, blur_sigma=1):NEWLINE super().__init__()NEWLINE self._set_attributes(locals())NEWLINENEWLINE def apply_image(self, img: np.ndarray, interp: str = None) -> np.ndarray:NEWLINE """NEWLINE Apply blur transform on the image(s).NEWLINENEWLINE Args:NEWLINE img (ndarray): of shape NxHxWxC, or HxWxC or HxW. The array can beNEWLINE of type uint8 in range [0, 255], or floating point in rangeNEWLINE [0, 1] or [0, 255].NEWLINE interp (str): keep this option for consistency, perform blur would notNEWLINE require interpolation.NEWLINE Returns:NEWLINE ndarray: blured image(s).NEWLINE """NEWLINE if img.dtype == np.uint8:NEWLINE img = img.astype(np.float32)NEWLINE img = cv2.GaussianBlur(img, (self.blur_sigma, self.blur_sigma), 0)NEWLINE return np.clip(img, 0, 255).astype(np.uint8)NEWLINE else:NEWLINE return cv2.GaussianBlur(img, (self.blur_sigma, self.blur_sigma), 0)NEWLINENEWLINE def apply_coords(self, coords: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the coordinates.NEWLINE """NEWLINE return coordsNEWLINENEWLINE def apply_segmentation(self, segmentation: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the full-image segmentation.NEWLINE """NEWLINE return segmentationNEWLINENEWLINEclass ColorJitterTransform(Transform):NEWLINE def __init__(self, brightness=None,NEWLINE contrast=None,NEWLINE saturation=None,NEWLINE hue=None):NEWLINE super().__init__()NEWLINE self._set_attributes(locals())NEWLINENEWLINE def apply_image(self, img: np.ndarray, interp: str = None) -> np.ndarray:NEWLINE """NEWLINE Apply color jitter transform on the image(s).NEWLINENEWLINE Args:NEWLINE img (ndarray): of shape NxHxWxC, or HxWxC or HxW. The array can beNEWLINE of type uint8 in range [0, 255], or floating point in rangeNEWLINE [0, 1] or [0, 255].NEWLINE interp (str): keep this option for consistency, perform color jitter would notNEWLINE require interpolation.NEWLINE Returns:NEWLINE ndarray: color jittered image(s).NEWLINE """NEWLINE self.color_jitter = torchvision.transforms.ColorJitter(NEWLINE brightness=self.brightness,NEWLINE contrast=self.contrast,NEWLINE saturation=self.saturation,NEWLINE hue=self.hue)NEWLINE img = np.asarray(self.color_jitter(Image.fromarray(np.ascontiguousarray(img, np.uint8))))NEWLINE return imgNEWLINE NEWLINE def apply_coords(self, coords: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the coordinates.NEWLINE """NEWLINE return coordsNEWLINENEWLINE def apply_segmentation(self, segmentation: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the full-image segmentation.NEWLINE """NEWLINE return segmentationNEWLINENEWLINEclass RandomBlur(T.TransformGen):NEWLINE """NEWLINE Randomly gussian blur an image.NEWLINE """NEWLINE def __init__(self, blur_prob=0.5, blur_sigma=None):NEWLINE super().__init__()NEWLINE self._init(locals())NEWLINENEWLINE def get_transform(self, img):NEWLINE do = self._rand_range() < self.blur_probNEWLINE if do:NEWLINE if self.blur_sigma is None:NEWLINE self.blur_sigma = np.random.choice([3, 5, 7, 9])NEWLINE return RandomBlurTransform(self.blur_sigma)NEWLINE else:NEWLINE return NoOpTransform()NEWLINENEWLINEclass ColorJitter(T.TransformGen):NEWLINE """NEWLINE Color jitter an image.NEWLINE """NEWLINE def __init__(self, brightness=None, contrast=None, saturation=None, hue=None):NEWLINE super().__init__()NEWLINE self._init(locals())NEWLINENEWLINE def get_transform(self, img):NEWLINE return ColorJitterTransform(self.brightness, self.contrast, self.saturation, self.hue)NEWLINENEWLINEdef create_sixdpose_keypoint_hflip_indices(dataset_names, keypoint_format):NEWLINE """NEWLINE Args:NEWLINE dataset_names (list[str]): list of dataset namesNEWLINE keypoint_format(str): bb8, fps8, or bb8+fps8NEWLINE Returns:NEWLINE ndarray[int]: a vector of size=#keypoints, storing theNEWLINE horizontally-flipped keypoint indices.NEWLINE """NEWLINE meta = MetadataCatalog.get(dataset_names[0])NEWLINE keypoint_flip_map = () # sixd pose has no filp mapNEWLINENEWLINE if keypoint_format == 'bb8':NEWLINE names = (NEWLINE "center",NEWLINE # bb8NEWLINE "bb8_0", "bb8_1",NEWLINE "bb8_2", "bb8_3",NEWLINE "bb8_4", "bb8_5",NEWLINE "bb8_6", "bb8_7",NEWLINE )NEWLINE connection_rules = BB8_KEYPOINT_CONNECTION_RULESNEWLINE meta.set(keypoint_names=names, keypoint_flip_map=keypoint_flip_map, keypoint_connection_rules=connection_rules)NEWLINE elif keypoint_format == 'fps8':NEWLINE names = (NEWLINE "center",NEWLINE # fps8NEWLINE "fps8_0", "fps8_1",NEWLINE "fps8_2", "fps8_3",NEWLINE "fps8_4", "fps8_5",NEWLINE "fps8_6", "fps8_7",NEWLINE )NEWLINE connection_rules = FPS8_KEYPOINT_CONNECTION_RULESNEWLINE meta.set(keypoint_names=names, keypoint_flip_map=keypoint_flip_map, keypoint_connection_rules=connection_rules)NEWLINE else:NEWLINE assert keypoint_format == 'bb8+fps8', keypoint_formatNEWLINE names = (NEWLINE "center",NEWLINE # bb8NEWLINE "bb8_0", "bb8_1",NEWLINE "bb8_2", "bb8_3",NEWLINE "bb8_4", "bb8_5",NEWLINE "bb8_6", "bb8_7",NEWLINE # fps8NEWLINE "fps8_0", "fps8_1",NEWLINE "fps8_2", "fps8_3",NEWLINE "fps8_4", "fps8_5",NEWLINE "fps8_6", "fps8_7",NEWLINE )NEWLINE connection_rules = BB8_KEYPOINT_CONNECTION_RULES + FPS8_KEYPOINT_CONNECTION_RULESNEWLINE meta.set(keypoint_names=names, keypoint_flip_map=keypoint_flip_map, keypoint_connection_rules=connection_rules)NEWLINE NEWLINE # TODO flip -> hflip NEWLINE flip_map = dict(keypoint_flip_map)NEWLINE flip_map.update({v: k for k, v in flip_map.items()})NEWLINE flipped_names = [i if i not in flip_map else flip_map[i] for i in names]NEWLINE flip_indices = [names.index(i) for i in flipped_names]NEWLINE return np.asarray(flip_indices)NEWLINENEWLINENEWLINEclass DatasetMapper:NEWLINE """NEWLINE A callable which takes a dataset dict in Detectron2 Dataset format,NEWLINE and map it into a format used by the model.NEWLINENEWLINE This is the default callable to be used to map your dataset dict into training data.NEWLINE You may need to follow it to implement your own one for customized logic.NEWLINENEWLINE The callable currently does the following:NEWLINENEWLINE 1. Read the image from "file_name"NEWLINE 2. Applies cropping/geometric transforms to the image and annotationsNEWLINE 3. Prepare data and annotations to Tensor and :class:`Instances`NEWLINE """NEWLINENEWLINE def __init__(self, cfg, is_train=True):NEWLINE if cfg.INPUT.CROP.ENABLED and is_train:NEWLINE self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)NEWLINE logging.getLogger(__name__).info("CropGen used in training: " + str(self.crop_gen))NEWLINE else:NEWLINE self.crop_gen = NoneNEWLINENEWLINE if cfg.INPUT.RANDOMBLUR.ENABLED and is_train:NEWLINE self.blur_gen = RandomBlur(cfg.INPUT.RANDOMBLUR.PROB)NEWLINE logging.getLogger(__name__).info("BlurGen used in training: " + str(self.blur_gen))NEWLINE else:NEWLINE self.blur_gen = None NEWLINENEWLINE if cfg.INPUT.COLORJITTER.ENABLED and is_train:NEWLINE self.colorjitter_gen = ColorJitter(cfg.INPUT.COLORJITTER.BRIGHTNESS, cfg.INPUT.COLORJITTER.CONTRAST,NEWLINE cfg.INPUT.COLORJITTER.SATURATION, cfg.INPUT.COLORJITTER.HUE)NEWLINE logging.getLogger(__name__).info("ColorJitterGen used in training: " + str(self.colorjitter_gen))NEWLINE else:NEWLINE self.colorjitter_gen = None NEWLINENEWLINE self.tfm_gens = utils.build_transform_gen(cfg, is_train)NEWLINENEWLINE # fmt: offNEWLINE self.img_format = cfg.INPUT.FORMATNEWLINE self.mask_on = cfg.MODEL.MASK_ON or cfg.MODEL.PVNET_ONNEWLINE self.mask_format = cfg.INPUT.MASK_FORMATNEWLINE self.keypoint_on = cfg.MODEL.KEYPOINT_ON or cfg.MODEL.PVNET_ON or cfg.MODEL.CRPNET_ON or cfg.MODEL.HCR_ONNEWLINE self.keypoint_format= cfg.INPUT.KEYPOINT_FORMATNEWLINE self.load_proposals = cfg.MODEL.LOAD_PROPOSALSNEWLINE # fmt: onNEWLINE if self.keypoint_on and is_train:NEWLINE # Flip only makes sense in trainingNEWLINE self.keypoint_hflip_indices = create_sixdpose_keypoint_hflip_indices(cfg.DATASETS.TRAIN, self.keypoint_format)NEWLINE else:NEWLINE self.keypoint_hflip_indices = NoneNEWLINENEWLINE if self.load_proposals:NEWLINE self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZENEWLINE self.proposal_topk = (NEWLINE cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAINNEWLINE if is_trainNEWLINE else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TESTNEWLINE )NEWLINE self.is_train = is_trainNEWLINENEWLINE def __call__(self, dataset_dict):NEWLINE """NEWLINE Args:NEWLINE dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.NEWLINENEWLINE Returns:NEWLINE dict: a format that builtin models in detectron2 acceptNEWLINE """NEWLINE dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code belowNEWLINE # USER: Write your own image loading if it's not from a fileNEWLINE image = utils.read_image(dataset_dict["file_name"], format=self.img_format)NEWLINE utils.check_image_size(dataset_dict, image)NEWLINENEWLINE if "annotations" not in dataset_dict:NEWLINE image, transforms = T.apply_transform_gens(NEWLINE ([self.crop_gen] if self.crop_gen else []) + NEWLINE ([self.blur_gen] if self.blur_gen else []) + NEWLINE ([self.colorjitter_gen] if self.colorjitter_gen else []) + self.tfm_gens, imageNEWLINE )NEWLINE else:NEWLINE # Crop around an instance if there are instances in the image.NEWLINE # USER: Remove if you don't use croppingNEWLINE if self.crop_gen:NEWLINE crop_tfm = utils.gen_crop_transform_with_instance(NEWLINE self.crop_gen.get_crop_size(image.shape[:2]),NEWLINE image.shape[:2],NEWLINE np.random.choice(dataset_dict["annotations"]),NEWLINE )NEWLINE image = crop_tfm.apply_image(image)NEWLINE if self.blur_gen:NEWLINE blur_tfm = self.blur_gen.get_transform(image)NEWLINE image = blur_tfm.apply_image(image)NEWLINE if self.colorjitter_gen:NEWLINE colorjitter_tfm = self.colorjitter_gen.get_transform(image)NEWLINE image = colorjitter_tfm.apply_image(image)NEWLINENEWLINE image, transforms = T.apply_transform_gens(self.tfm_gens, image)NEWLINE if self.colorjitter_gen:NEWLINE transforms = colorjitter_tfm + transformsNEWLINE if self.blur_gen:NEWLINE transforms = blur_tfm + transformsNEWLINE if self.crop_gen:NEWLINE transforms = crop_tfm + transformsNEWLINENEWLINE image_shape = image.shape[:2] # h, wNEWLINENEWLINE # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,NEWLINE # but not efficient on large generic data structures due to the use of pickle & mp.Queue.NEWLINE # Therefore it's important to use torch.Tensor.NEWLINE dataset_dict["image"] = torch.as_tensor(NEWLINE image.transpose(2, 0, 1).astype("float32")NEWLINE ).contiguous()NEWLINE # Can use uint8 if it turns out to be slow some dayNEWLINENEWLINE # USER: Remove if you don't use pre-computed proposals.NEWLINE if self.load_proposals:NEWLINE utils.transform_proposals(NEWLINE dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topkNEWLINE )NEWLINENEWLINE if not self.is_train:NEWLINE dataset_dict.pop("annotations", None)NEWLINE dataset_dict.pop("sem_seg_file_name", None)NEWLINE return dataset_dictNEWLINENEWLINE if "annotations" in dataset_dict:NEWLINE # USER: Modify this if you want to keep them for some reason.NEWLINE for anno in dataset_dict["annotations"]:NEWLINE if not self.mask_on:NEWLINE anno.pop("segmentation", None)NEWLINE if not self.keypoint_on:NEWLINE anno.pop("keypoints", None)NEWLINE # USER: load keypoints according to keypoint_formatNEWLINE else:NEWLINE keypts = anno["keypoints"]NEWLINE if 'bb8' in self.keypoint_format:NEWLINE corner_2d = np.array(anno["corner_2d"])NEWLINE corner_2d = np.insert(corner_2d, 2, 2, axis=1).flatten().tolist()NEWLINE keypts += corner_2dNEWLINE if 'fps8' in self.keypoint_format:NEWLINE fps_2d = np.array(anno["fps_2d"])NEWLINE fps_2d = np.insert(fps_2d, 2, 2, axis=1).flatten().tolist()NEWLINE keypts += fps_2dNEWLINE anno["keypoints"] = keyptsNEWLINENEWLINE # USER: Implement additional transformations if you have other types of dataNEWLINE annos = [NEWLINE utils.transform_instance_annotations(NEWLINE obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indicesNEWLINE )NEWLINE for obj in dataset_dict.pop("annotations")NEWLINE if obj.get("iscrowd", 0) == 0NEWLINE ]NEWLINE instances = utils.annotations_to_instances(NEWLINE annos, image_shape, mask_format=self.mask_formatNEWLINE )NEWLINE # Create a tight bounding box from masks, useful when image is croppedNEWLINE if self.crop_gen and instances.has("gt_masks"):NEWLINE instances.gt_boxes = instances.gt_masks.get_bounding_boxes()NEWLINE dataset_dict["instances"] = utils.filter_empty_instances(instances)NEWLINENEWLINE # USER: Remove if you don't do semantic/panoptic segmentation.NEWLINE # if "sem_seg_file_name" in dataset_dict:NEWLINE # with PathManager.open(dataset_dict.pop("sem_seg_file_name"), "rb") as f:NEWLINE # sem_seg_gt = Image.open(f)NEWLINE # sem_seg_gt = np.asarray(sem_seg_gt, dtype="uint8")NEWLINE # sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)NEWLINE # sem_seg_gt = torch.as_tensor(sem_seg_gt.astype("long"))NEWLINE # dataset_dict["sem_seg"] = sem_seg_gtNEWLINE return dataset_dictNEWLINENEWLINEclass COCODatasetMapper:NEWLINE """NEWLINE A callable which takes a dataset dict in Detectron2 Dataset format,NEWLINE and map it into a format used by the model.NEWLINENEWLINE This is the default callable to be used to map your dataset dict into training data.NEWLINE You may need to follow it to implement your own one for customized logic,NEWLINE such as a different way to read or transform images.NEWLINE See :doc:`/tutorials/data_loading` for details.NEWLINENEWLINE The callable currently does the following:NEWLINENEWLINE 1. Read the image from "file_name"NEWLINE 2. Applies cropping/geometric transforms to the image and annotationsNEWLINE 3. Prepare data and annotations to Tensor and :class:`Instances`NEWLINE """NEWLINENEWLINE def __init__(self, cfg, is_train=True):NEWLINE if cfg.INPUT.CROP.ENABLED and is_train:NEWLINE self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)NEWLINE logging.getLogger(__name__).info("CropGen used in training: " + str(self.crop_gen))NEWLINE else:NEWLINE self.crop_gen = NoneNEWLINENEWLINE self.tfm_gens = utils.build_transform_gen(cfg, is_train)NEWLINENEWLINE # fmt: offNEWLINE self.img_format = cfg.INPUT.FORMATNEWLINE self.mask_on = cfg.MODEL.MASK_ON or cfg.MODEL.PVNET_ONNEWLINE self.mask_format = cfg.INPUT.MASK_FORMATNEWLINE self.keypoint_on = cfg.MODEL.KEYPOINT_ON or cfg.MODEL.PVNET_ON or cfg.MODEL.CRPNET_ON or cfg.MODEL.HCR_ONNEWLINE self.load_proposals = cfg.MODEL.LOAD_PROPOSALSNEWLINE # fmt: onNEWLINE if self.keypoint_on and is_train:NEWLINE # Flip only makes sense in trainingNEWLINE self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN)NEWLINE else:NEWLINE self.keypoint_hflip_indices = NoneNEWLINENEWLINE if self.load_proposals:NEWLINE self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZENEWLINE self.proposal_topk = (NEWLINE cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAINNEWLINE if is_trainNEWLINE else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TESTNEWLINE )NEWLINE self.is_train = is_trainNEWLINENEWLINE def __call__(self, dataset_dict):NEWLINE """NEWLINE Args:NEWLINE dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.NEWLINENEWLINE Returns:NEWLINE dict: a format that builtin models in detectron2 acceptNEWLINE """NEWLINE dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code belowNEWLINE # USER: Write your own image loading if it's not from a fileNEWLINE image = utils.read_image(dataset_dict["file_name"], format=self.img_format)NEWLINE utils.check_image_size(dataset_dict, image)NEWLINENEWLINE if "annotations" not in dataset_dict:NEWLINE image, transforms = T.apply_transform_gens(NEWLINE ([self.crop_gen] if self.crop_gen else []) + self.tfm_gens, imageNEWLINE )NEWLINE else:NEWLINE # Crop around an instance if there are instances in the image.NEWLINE # USER: Remove if you don't use croppingNEWLINE if self.crop_gen:NEWLINE crop_tfm = utils.gen_crop_transform_with_instance(NEWLINE self.crop_gen.get_crop_size(image.shape[:2]),NEWLINE image.shape[:2],NEWLINE np.random.choice(dataset_dict["annotations"]),NEWLINE )NEWLINE image = crop_tfm.apply_image(image)NEWLINE image, transforms = T.apply_transform_gens(self.tfm_gens, image)NEWLINE if self.crop_gen:NEWLINE transforms = crop_tfm + transformsNEWLINENEWLINE image_shape = image.shape[:2] # h, wNEWLINENEWLINE # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,NEWLINE # but not efficient on large generic data structures due to the use of pickle & mp.Queue.NEWLINE # Therefore it's important to use torch.Tensor.NEWLINE dataset_dict["image"] = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1)))NEWLINENEWLINE # USER: Remove if you don't use pre-computed proposals.NEWLINE if self.load_proposals:NEWLINE utils.transform_proposals(NEWLINE dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topkNEWLINE )NEWLINENEWLINE if not self.is_train:NEWLINE # USER: Modify this if you want to keep them for some reason.NEWLINE dataset_dict.pop("annotations", None)NEWLINE dataset_dict.pop("sem_seg_file_name", None)NEWLINE return dataset_dictNEWLINENEWLINE if "annotations" in dataset_dict:NEWLINE # USER: Modify this if you want to keep them for some reason.NEWLINE for anno in dataset_dict["annotations"]:NEWLINE if not self.mask_on:NEWLINE anno.pop("segmentation", None)NEWLINE if not self.keypoint_on:NEWLINE anno.pop("keypoints", None)NEWLINENEWLINE # USER: Implement additional transformations if you have other types of dataNEWLINE annos = [NEWLINE utils.transform_instance_annotations(NEWLINE obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indicesNEWLINE )NEWLINE for obj in dataset_dict.pop("annotations")NEWLINE if obj.get("iscrowd", 0) == 0NEWLINE ]NEWLINE instances = utils.annotations_to_instances(NEWLINE annos, image_shape, mask_format=self.mask_formatNEWLINE )NEWLINE # Create a tight bounding box from masks, useful when image is croppedNEWLINE if self.crop_gen and instances.has("gt_masks"):NEWLINE instances.gt_boxes = instances.gt_masks.get_bounding_boxes()NEWLINE dataset_dict["instances"] = utils.filter_empty_instances(instances)NEWLINENEWLINE # USER: Remove if you don't do semantic/panoptic segmentation.NEWLINE if "sem_seg_file_name" in dataset_dict:NEWLINE with PathManager.open(dataset_dict.pop("sem_seg_file_name"), "rb") as f:NEWLINE sem_seg_gt = Image.open(f)NEWLINE sem_seg_gt = np.asarray(sem_seg_gt, dtype="uint8")NEWLINE sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)NEWLINE sem_seg_gt = torch.as_tensor(sem_seg_gt.astype("long"))NEWLINE dataset_dict["sem_seg"] = sem_seg_gtNEWLINE return dataset_dict
"""Objects, functions and constants relating to OCP bounds.NEWLINENEWLINEAttributesNEWLINE----------NEWLINEDEFAULT_ASSUME_INF_BOUNDS : boolNEWLINE Default as to whether Pycollo should treat unspecified bounds as beingNEWLINE numerically infinite.NEWLINEDEFAULT_INF_VALUE : floatNEWLINE Default numerical value for when Pycollo needs to use a finite numericalNEWLINE approximation for infinity.NEWLINENEWLINE"""NEWLINENEWLINENEWLINE__all__ = ["EndpointBounds", "PhaseBounds"]NEWLINENEWLINENEWLINEfrom abc import (ABC, abstractmethod)NEWLINEfrom collections import namedtupleNEWLINEfrom numbers import NumberNEWLINEfrom typing import (Iterable, Optional, Union)NEWLINENEWLINEimport numpy as npNEWLINEimport scipy.optimize as optimizeNEWLINEimport sympy as symNEWLINENEWLINEfrom .node import NodeNEWLINEfrom .typing import OptionalBoundsTypeNEWLINEfrom .utils import (fast_sympify,NEWLINE format_for_output,NEWLINE SUPPORTED_ITER_TYPES,NEWLINE symbol_primitives,NEWLINE )NEWLINENEWLINENEWLINE# Default values for settingsNEWLINEDEFAULT_ASSUME_INF_BOUNDS = TrueNEWLINEDEFAULT_BOUND_CLASH_ABSOLUTE_TOLERANCE = 1e-6NEWLINEDEFAULT_BOUND_CLASH_RELATIVE_TOLERANCE = 1e-6NEWLINEDEFAULT_NUMERICAL_INF = 10e19NEWLINEDEFAULT_OVERRIDE_ENDPOINTS = TrueNEWLINEDEFAULT_REMOVE_CONSTANT_VARIABLES = TrueNEWLINENEWLINENEWLINE# Data structuresNEWLINEphase_info_fields = ("name", "index", "backend")NEWLINEPhaseInfo = namedtuple("PhaseInfo", phase_info_fields)NEWLINE"""Data structure for information about OCP phases.NEWLINENEWLINEThese are mostly used to format descriptive error messages for the user.NEWLINENEWLINEFieldsNEWLINE------NEWLINEname : strNEWLINE The name associated with the phaseNEWLINEindex : intNEWLINE The index of the phase.NEWLINEbackend : :py:class:`PycolloPhaseData`NEWLINE The phase backend associated with the specified OCP phase.NEWLINENEWLINE"""NEWLINENEWLINEbounds_info_fields = ("user_bnds", "user_syms", "bnds_type", "num",NEWLINE "is_variable", "none_default_allowed")NEWLINEBoundsInfo = namedtuple("BoundsInfo",NEWLINE bounds_info_fields,NEWLINE defaults=[True, True])NEWLINE"""Data structure for storing information about user-supplied bounds.NEWLINENEWLINEFieldsNEWLINE------NEWLINEuser_bnds : objNEWLINE The bounds that the user has supplied.NEWLINEuser_syms : Iterable[sym.Symbols]NEWLINE An iterable of symbols relating to the user-supplied bounds (if available).NEWLINEbnds_type : strNEWLINE String indentifying the aspect of the OCP that the bounds relate to. MostlyNEWLINE used for formatting descriptive error messages for the user.NEWLINEnum : intNEWLINE The number of variables/constraints that should be expected for the type ofNEWLINE bounds in question.NEWLINEis_variable : boolNEWLINE `True` if the bound type in question is a variable, `False` if it is aNEWLINE constraint.NEWLINEnone_default_allowed : boolNEWLINE `True` if Pycollo should automatically handle the situation where no boundsNEWLINE have been supplied. `False` if an error should be raised.NEWLINENEWLINE"""NEWLINENEWLINENEWLINEclass BoundsABC(ABC):NEWLINENEWLINE @abstractmethodNEWLINE def optimal_control_problem(self):NEWLINE passNEWLINENEWLINE @abstractmethodNEWLINE def _process_and_check_user_values(self):NEWLINE passNEWLINENEWLINE @abstractmethodNEWLINE def _required_variable_bounds(self):NEWLINE passNEWLINENEWLINENEWLINEclass EndpointBounds(BoundsABC):NEWLINENEWLINE def __init__(self,NEWLINE optimal_control_problem,NEWLINE *,NEWLINE parameter_variables: OptionalBoundsType = None,NEWLINE endpoint_constraints: OptionalBoundsType = None,NEWLINE ):NEWLINENEWLINE self.ocp = optimal_control_problemNEWLINE self.parameter_variables = parameter_variablesNEWLINE self.endpoint_constraints = endpoint_constraintsNEWLINENEWLINE @propertyNEWLINE def optimal_control_problem(self):NEWLINE return self.ocpNEWLINENEWLINE def _process_and_check_user_values(self):NEWLINE self._backend = self.optimal_control_problem._backendNEWLINE self._INF = self.optimal_control_problem.settings.numerical_infNEWLINE self._process_parameter_vars()NEWLINE self._process_endpoint_cons()NEWLINENEWLINE def _process_parameter_vars(self):NEWLINE user_bnds = self.parameter_variablesNEWLINE user_syms = self._backend.s_var_userNEWLINE bnds_type = "parameter variable"NEWLINE num_expected = self._backend.num_s_var_fullNEWLINE bnds_info = BoundsInfo(user_bnds, user_syms, bnds_type, num_expected)NEWLINE self._s_bnd, self._s_needed = process_single_type_of_values(self,NEWLINE bnds_info)NEWLINENEWLINE def _process_endpoint_cons(self):NEWLINE num_b_con = self.optimal_control_problem.number_endpoint_constraintsNEWLINE user_bnds = self.endpoint_constraintsNEWLINE user_syms = [None] * num_b_conNEWLINE bnds_type = "endpoint constraints"NEWLINE num_expect = num_b_conNEWLINE bnds_info = BoundsInfo(user_bnds, user_syms, bnds_type, num_expect,NEWLINE False)NEWLINE self._b_con_bnd, needed = process_single_type_of_values(self,NEWLINE bnds_info)NEWLINENEWLINE def _required_variable_bounds(self):NEWLINE x_bnd = self._s_bnd[self._s_needed]NEWLINE return x_bndNEWLINENEWLINENEWLINEclass PhaseBounds(BoundsABC):NEWLINE """Bounds on variables and constraints associated with a phase.NEWLINENEWLINE This class currently behaves like a data class, however additionalNEWLINE functionality will be added in the future to support robust checking of theNEWLINE user-supplied values for the bounds.NEWLINENEWLINE Intended behaviour will be::NEWLINENEWLINE * None values will be treated as no bounds, i.e. ['-inf', 'inf'].NEWLINE * Single values will be treated as equal lower and upper bounds.NEWLINE * Mappings will be accepted for `state_variables`, `control_variables`,NEWLINE `initial_state_constraints` and `final_state_constraints`.NEWLINE * Keys in the mappings should be the strings of the correspondingNEWLINE `state_variables` or `control_variables` for the phase.NEWLINE * 'inf' values will be replaced by a large floating point value so thatNEWLINE scaling can be done automatically.NEWLINE * The 'inf' replacement value can be changed inNEWLINE `OptimalControlProblem.settings.numerical_inf`, the default is 1e19.NEWLINE * If a :obj:`np.ndarray` with size = (2, 2) is passed as a value thenNEWLINE the first dimension will be treated as corresponding to theNEWLINE variable or constraint to be bounded.NEWLINE * If iterables are passed then they may contain a combination of None,NEWLINE single numerical values, and pairs of numerical valuesNEWLINE * Symbolic expressions should also be allowed if they can be convertedNEWLINE into numerical values when processed alongside auxiliary data.NEWLINENEWLINE NotesNEWLINE -----NEWLINE * 'inf' values should be avoided where possible in order to give betterNEWLINE automatic scaling.NEWLINENEWLINE AttributesNEWLINE ----------NEWLINE phaseNEWLINE The phase with which these bounds will be associated. Default value isNEWLINE `None`.NEWLINE initial_timeNEWLINE Bounds on when the phase starts. Default value is `None`.NEWLINE final_timeNEWLINE Bounds on when the phase ends. Default value is `None`.NEWLINE state_variables:NEWLINE Bounds on the phase's state variables. Default value is `None`.NEWLINE control_variablesNEWLINE Bounds on the phase's control variables. Default value is `None`.NEWLINE integral_variablesNEWLINE Bounds on the phase's integral variables. Default value is `None`.NEWLINE path_constraintsNEWLINE Bounds on the phase's path constraints. Default value is `None`.NEWLINE initial_state_constraintsNEWLINE Bounds on the phase's state variables at the initial time. DefaultNEWLINE value is `None`.NEWLINE final_state_constraintsNEWLINE Bounds on the phase's state variables at the final time. Default valueNEWLINE is `None`.NEWLINE """NEWLINENEWLINE def __init__(self,NEWLINE phase: "Phase",NEWLINE *,NEWLINE initial_time: Optional[float] = None,NEWLINE final_time: Optional[float] = None,NEWLINE state_variables: OptionalBoundsType = None,NEWLINE control_variables: OptionalBoundsType = None,NEWLINE integral_variables: OptionalBoundsType = None,NEWLINE path_constraints: OptionalBoundsType = None,NEWLINE initial_state_constraints: OptionalBoundsType = None,NEWLINE final_state_constraints: OptionalBoundsType = None,NEWLINE ):NEWLINE """Bounds on variables and constraints associated with a phase.NEWLINENEWLINE ArgsNEWLINE ----NEWLINE phaseNEWLINE The phase with which these bounds will be associated.NEWLINE initial_timeNEWLINE Bounds on when the phase starts. Default value is `None`.NEWLINE final_timeNEWLINE Bounds on when the phase ends. Default value is `None`.NEWLINE state_variablesNEWLINE Bounds on the phase's state variables. Default value is `None`.NEWLINE control_variablesNEWLINE Bounds on the phase's control variables. Default value is `None`.NEWLINE integral_variablesNEWLINE Bounds on the phase's integral variables. Default value is `None`.NEWLINE path_constraintsNEWLINE Bounds on the phase's path constraints. Default value is `None`.NEWLINE initial_state_constraintsNEWLINE Bounds on the phase's state variables at the initial time. DefaultNEWLINE value is `None`.NEWLINE final_state_constraintsNEWLINE Bounds on the phase's state variables at the final time. DefaultNEWLINE value is `None`.NEWLINE """NEWLINE self.ocp = phase.optimal_control_problemNEWLINE self.phase = phaseNEWLINE self.initial_time = initial_timeNEWLINE self.final_time = final_timeNEWLINE self.state_variables = state_variablesNEWLINE self.control_variables = control_variablesNEWLINE self.integral_variables = integral_variablesNEWLINE self.path_constraints = path_constraintsNEWLINE self.initial_state_constraints = initial_state_constraintsNEWLINE self.final_state_constraints = final_state_constraintsNEWLINENEWLINE @propertyNEWLINE def optimal_control_problem(self):NEWLINE return self.phase.optimal_control_problemNEWLINENEWLINE def _process_and_check_user_values(self, phase_backend):NEWLINE self._backend = phase_backendNEWLINE self._INF = self.optimal_control_problem.settings.numerical_infNEWLINE p_info = self._get_phase_info(phase_backend)NEWLINE self._process_state_vars(p_info)NEWLINE self._process_control_vars(p_info)NEWLINE self._process_integral_vars(p_info)NEWLINE self._process_time_vars(p_info)NEWLINE self._process_path_cons(p_info)NEWLINE self._process_initial_state_cons(p_info)NEWLINE self._process_final_state_cons(p_info)NEWLINENEWLINE def _get_phase_info(self, phase_backend):NEWLINE phase_name = phase_backend.ocp_phase.nameNEWLINE phase_index = phase_backend.ocp_phase.phase_numberNEWLINE phase_info = PhaseInfo(phase_name, phase_index, phase_backend)NEWLINE return phase_infoNEWLINENEWLINE def _process_state_vars(self, p_info):NEWLINE user_bnds = self.state_variablesNEWLINE user_syms = p_info.backend.y_var_userNEWLINE bnds_type = "state variable"NEWLINE num_expect = p_info.backend.num_y_var_fullNEWLINE bnds_info = BoundsInfo(user_bnds, user_syms, bnds_type, num_expect)NEWLINE self._y_bnd, self._y_needed = process_single_type_of_values(self,NEWLINE bnds_info,NEWLINE p_info)NEWLINENEWLINE def _process_control_vars(self, p_info):NEWLINE user_bnd = self.control_variablesNEWLINE user_sym = p_info.backend.u_var_userNEWLINE bnd_type = "control variable"NEWLINE num_expect = p_info.backend.num_u_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect)NEWLINE self._u_bnd, self._u_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINENEWLINE def _process_integral_vars(self, p_info):NEWLINE user_bnd = self.integral_variablesNEWLINE user_sym = p_info.backend.q_var_userNEWLINE bnd_type = "integral variable"NEWLINE num_expect = p_info.backend.num_q_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect)NEWLINE self._q_bnd, self._q_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINENEWLINE def _process_path_cons(self, p_info):NEWLINE user_bnd = self.path_constraintsNEWLINE user_sym = [None] * p_info.backend.num_p_conNEWLINE bnd_type = "path constraints"NEWLINE num_expect = p_info.backend.num_p_conNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect, False)NEWLINE self._p_con_bnd, needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINENEWLINE def _process_time_vars(self, p_info):NEWLINE user_bnd = [self.initial_time, self.final_time]NEWLINE user_sym = p_info.backend.t_var_userNEWLINE bnd_type = "time variable"NEWLINE num_expect = p_info.backend.num_t_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect)NEWLINE self._t_bnd, self._t_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINE self._check_time_bounds_error((0, 0), (1, 0), p_info)NEWLINE self._check_time_bounds_error((0, 1), (1, 1), p_info)NEWLINENEWLINE def _check_time_bounds_error(self, i_1, i_2, p_info):NEWLINE arg_1 = self._t_bnd[i_1]NEWLINE arg_2 = self._t_bnd[i_2]NEWLINE if arg_1 > arg_2:NEWLINE self._raise_time_bounds_error(i_1, i_2, arg_1, arg_2, p_info)NEWLINENEWLINE def _raise_time_bounds_error(self, i_1, i_2, bnd_1, bnd_2, p_info):NEWLINE bnd_1_t0_or_tF = "initial" if i_1[0] == 0 else "final"NEWLINE bnd_1_lower_or_upper = "lower" if i_1[1] == 0 else "upper"NEWLINE bnd_2_t0_or_tF = "initial" if i_2[0] == 0 else "final"NEWLINE bnd_2_lower_or_upper = "lower" if i_2[1] == 0 else "upper"NEWLINE msg = (f"The {bnd_2_lower_or_upper} bound for the {bnd_2_t0_or_tF} "NEWLINE f"time ('{bnd_2}') must be greater than the "NEWLINE f"{bnd_1_lower_or_upper} bound for the {bnd_1_t0_or_tF} time "NEWLINE f"('{bnd_1}') in phase {p_info.name} (index #{p_info.index}).")NEWLINE raise ValueError(msg)NEWLINENEWLINE def _process_initial_state_cons(self, p_info):NEWLINE user_bnd = self.initial_state_constraintsNEWLINE user_sym = p_info.backend.y_var_userNEWLINE bnd_type = "initial state constraint"NEWLINE num_expect = p_info.backend.num_y_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect, False)NEWLINE y_t0_bnd, self._y_t0_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINE if self.ocp.settings.override_endpoint_bounds:NEWLINE y_t0_bnd = self._override_endpoint_bounds(y_t0_bnd)NEWLINE self._y_t0_bnd = y_t0_bndNEWLINENEWLINE def _process_final_state_cons(self, p_info):NEWLINE user_bnd = self.final_state_constraintsNEWLINE user_sym = p_info.backend.y_var_userNEWLINE bnd_type = "final state constraint"NEWLINE num_expect = p_info.backend.num_y_var_fullNEWLINE bnd_info = BoundsInfo(user_bnd, user_sym, bnd_type, num_expect, False)NEWLINE y_tF_bnd, self._y_tF_needed = process_single_type_of_values(self,NEWLINE bnd_info,NEWLINE p_info)NEWLINE if self.ocp.settings.override_endpoint_bounds:NEWLINE y_tF_bnd = self._override_endpoint_bounds(y_tF_bnd)NEWLINE self._y_tF_bnd = y_tF_bndNEWLINENEWLINE def _override_endpoint_bounds(self, y_con_bnd):NEWLINE settings = self.ocp.settingsNEWLINE override = settings.override_endpoint_boundsNEWLINE lower_is_less = y_con_bnd[:, 0] < self._y_bnd[:, 0]NEWLINE if not override and np.any(lower_is_less):NEWLINE msg = (f"")NEWLINE raise ValueError(msg)NEWLINE y_con_bnd[lower_is_less, 0] = self._y_bnd[lower_is_less, 0]NEWLINE upper_is_more = y_con_bnd[:, 1] > self._y_bnd[:, 1]NEWLINE if not override and np.any(upper_is_more):NEWLINE msg = (f"")NEWLINE raise ValueError(msg)NEWLINE y_con_bnd[upper_is_more, 1] = self._y_bnd[upper_is_more, 1]NEWLINE return y_con_bndNEWLINENEWLINE # def _process_potential_dual_value_to_single_value(self, bnd_info, p_info):NEWLINE # bnd = bnd_info.user_bndNEWLINE # msg = (f"Single bounds in this form ('{bnd}') are not supported.")NEWLINE # is_list = isinstance(bnd, SUPPORTED_ITER_TYPES)NEWLINE # if not is_list:NEWLINE # raise TypeError(msg)NEWLINE # is_len_2 = len(bnd) == 2NEWLINE # if not is_len_2:NEWLINE # raise ValueError(msg)NEWLINE # is_pair_same = bnd[0] == bnd[1]NEWLINE # if not is_pair_same:NEWLINE # raise ValueError(msg)NEWLINE # bnd = bnd[0]NEWLINE # bnd_info = bnd_info._replace(user_bnds=bnd)NEWLINE # return bnd_infoNEWLINENEWLINE def _required_variable_bounds(self):NEWLINE y_bnd = self._y_bnd[self._y_needed]NEWLINE u_bnd = self._u_bnd[self._u_needed]NEWLINE q_bnd = self._q_bnd[self._q_needed]NEWLINE t_bnd = self._t_bnd[self._t_needed]NEWLINE x_bnd = np.vstack([y_bnd, u_bnd, q_bnd, t_bnd])NEWLINE return x_bndNEWLINENEWLINENEWLINEclass Bounds:NEWLINENEWLINE def __init__(self, ocp_backend):NEWLINE self.ocp_backend = ocp_backendNEWLINE self.process_and_check_user_values()NEWLINE self.collect_required_variable_bounds()NEWLINE self.collect_required_state_variable_endpoint_bounds()NEWLINE self.collect_constraint_bounds()NEWLINE self.add_unrequired_variables_to_auxiliary_data()NEWLINENEWLINE def process_and_check_user_values(self):NEWLINE for p in self.ocp_backend.p:NEWLINE p.ocp_phase.bounds._process_and_check_user_values(p)NEWLINE self.ocp_backend.ocp.bounds._process_and_check_user_values()NEWLINENEWLINE def collect_required_variable_bounds(self):NEWLINE x_bnd = []NEWLINE for p in self.ocp_backend.p:NEWLINE p_bnds = p.ocp_phase.boundsNEWLINE x_bnd.append(p_bnds._required_variable_bounds())NEWLINE x_bnd.append(self.ocp_backend.ocp.bounds._required_variable_bounds())NEWLINE self.x_bnd = np.vstack(x_bnd)NEWLINENEWLINE def collect_required_state_variable_endpoint_bounds(self):NEWLINE y_t0_bnd = []NEWLINE y_tF_bnd = []NEWLINE for p in self.ocp_backend.p:NEWLINE p_bnd = p.ocp_phase.boundsNEWLINE y_t0_bnd.append(p_bnd._y_t0_bnd[p_bnd._y_needed])NEWLINE y_tF_bnd.append(p_bnd._y_tF_bnd[p_bnd._y_needed])NEWLINE self.y_t0_bnd = np.vstack(y_t0_bnd)NEWLINE self.y_tF_bnd = np.vstack(y_tF_bnd)NEWLINENEWLINE @propertyNEWLINE def x_bnd_lower(self):NEWLINE return self.x_bnd[:, 0]NEWLINENEWLINE @propertyNEWLINE def x_bnd_upper(self):NEWLINE return self.x_bnd[:, 1]NEWLINENEWLINE def collect_constraint_bounds(self):NEWLINE passNEWLINENEWLINE def add_unrequired_variables_to_auxiliary_data(self):NEWLINE self.aux_data = {}NEWLINE for p in self.ocp_backend.p:NEWLINE p_bnd = p.ocp_phase.boundsNEWLINE self.aux_data.update({y: np.mean(value) NEWLINE for y, y_needed, value in zip(NEWLINE p.y_var_full, p_bnd._y_needed, p_bnd._y_bnd) NEWLINE if not y_needed})NEWLINE self.aux_data.update({u: np.mean(value) NEWLINE for u, u_needed, value in zip(NEWLINE p.u_var_full, p_bnd._u_needed, p_bnd._u_bnd) NEWLINE if not u_needed})NEWLINE self.aux_data.update({q: np.mean(value) NEWLINE for q, q_needed, value in zip(NEWLINE p.q_var_full, p_bnd._q_needed, p_bnd._q_bnd) NEWLINE if not q_needed})NEWLINE self.aux_data.update({t: np.mean(value) NEWLINE for t, t_needed, value in zip(NEWLINE p.t_var_full, p_bnd._t_needed, p_bnd._t_bnd) NEWLINE if not t_needed})NEWLINE prob_bnd = self.ocp_backend.ocp.boundsNEWLINE self.aux_data.update({s: np.mean(value) NEWLINE for s, s_needed, value in zip(NEWLINE self.ocp_backend.s_var_full, prob_bnd._s_needed, prob_bnd._s_bnd) NEWLINE if not s_needed})NEWLINENEWLINENEWLINE"""NEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE"""NEWLINENEWLINENEWLINEdef process_single_type_of_values(bnds_obj, bnds_info, p_info=None):NEWLINE """Given a `BoundsInfo` object, process and determine if needed.NEWLINENEWLINE Bounds can either be passed by the user as:NEWLINE * a dictionary with the keys as the OCP symbols and the values as theNEWLINE bounds;NEWLINE * no bounds via the use of `None`; orNEWLINE * an iterable of supported type (e.g. tuple, list, np.ndarray) providedNEWLINE that the first dimension is the number of variables/constraints ofNEWLINE that type and the second dimension is either 1 or 2 (depending onNEWLINE the circumstance).NEWLINENEWLINE Note that some forms of bounds are not supported for specific types ofNEWLINE bounds.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE Returns:NEWLINE --------NEWLINE `tuple`NEWLINE Of length 2 with the first item being a :py:class:`ndarray <numpy>` withNEWLINE the correctly formatted bounds and the second item being anotherNEWLINE :py:class:`ndarray <numpy>` of type `bool` stating whether the boundsNEWLINE are needed (i.e. have they been determined to be equal in upper andNEWLINE lower bound so that Pycollo can remove them from the OCP and insteadNEWLINE treat them as variables).NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds supplied by the user are of a type that cannot be handledNEWLINE by Pycollo.NEWLINENEWLINE """NEWLINE if isinstance(bnds_info.user_bnds, dict):NEWLINE bnds = process_mapping_bounds_instance(bnds_obj, bnds_info, p_info)NEWLINE elif bnds_info.user_bnds is None:NEWLINE bnds = process_none_bounds_instance(bnds_obj, bnds_info, p_info)NEWLINE elif isinstance(bnds_info.user_bnds, SUPPORTED_ITER_TYPES):NEWLINE bnds = process_iterable_bounds_instance(bnds_obj, bnds_info, p_info)NEWLINE else:NEWLINE formatted_valid_types = format_for_output(SUPPORTED_ITER_TYPES)NEWLINE msg = (f"Bounds for {bnds_info.bnds_type} cannot be supplied as a "NEWLINE f"{type(bnds_info.user_bnds)}, use one of: "NEWLINE f"{formatted_valid_types}")NEWLINE raise TypeError(msg)NEWLINE bnds, needed = check_lower_against_upper(bnds_obj, bnds, bnds_info, p_info)NEWLINE return bnds, neededNEWLINENEWLINENEWLINEdef process_mapping_bounds_instance(bnds_obj, bnds_info, p_info):NEWLINE """Used to process bounds supplied by the user as a `dict`.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE listNEWLINE A list of lists with the outer length equal to the number of expectedNEWLINE bounds and the inner lengths all equal to 2.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds type is not supported for use of dictionary because thereNEWLINE aren't symbols associated with every variable/constraint of that type.NEWLINENEWLINE """NEWLINE if any(user_sym is None for user_sym in bnds_info.user_syms):NEWLINE msg = f"Can't use mapping for {bnds_info.bnds_type} bounds."NEWLINE raise TypeError(msg)NEWLINE bnds = []NEWLINE for bnd_i, user_sym in enumerate(bnds_info.user_syms):NEWLINE bnd = bnds_info.user_bnds.get(user_sym)NEWLINE bnd_info = BoundsInfo(bnd, user_sym, bnds_info.bnds_type, bnd_i)NEWLINE check_user_bound_missing(bnds_obj, bnd_info, p_info)NEWLINE bnd = as_lower_upper_pair(bnds_obj, bnd_info, p_info)NEWLINE bnds.append(bnd)NEWLINE return bndsNEWLINENEWLINENEWLINEdef check_user_bound_missing(bnds_obj, bnds_info, p_info):NEWLINE """Check if any user-supplied bounds for a specific type are missing.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If there are bounds that need to be supplied by aren't.NEWLINENEWLINE """NEWLINE is_bnd_none = bnds_info.user_bnds is NoneNEWLINE is_inf_assumed = bnds_obj.ocp.settings.assume_inf_boundsNEWLINE if is_bnd_none and not is_inf_assumed:NEWLINE msg = (f"No bounds have been supplied for the {bnds_info.bnds_type} "NEWLINE f"'{bnds_info.user_syms}' (index #{bnds_info.num}).")NEWLINE raise ValueError(msg)NEWLINENEWLINENEWLINEdef process_iterable_bounds_instance(bnds_obj, bnds_info, p_info):NEWLINE """Used to process bounds supplied by the user as a `dict`.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE listNEWLINE A list of lists with the outer length equal to the number of expectedNEWLINE bounds and the inner lengths all equal to 2.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds type is not supported for use of dictionary because thereNEWLINE aren't symbols associated with every variable/constraint of that type.NEWLINENEWLINE """NEWLINE supported_iter = isinstance(bnds_info.user_bnds[0], SUPPORTED_ITER_TYPES)NEWLINE if bnds_info.num == 1 and not supported_iter:NEWLINE bnds_info = bnds_info._replace(user_bnds=[bnds_info.user_bnds])NEWLINE bnds = []NEWLINE for bnd_i, bnd in enumerate(bnds_info.user_bnds):NEWLINE bnd_info = BoundsInfo(bnd, None, bnds_info.bnds_type, bnd_i)NEWLINE check_user_bound_missing(bnds_obj, bnd_info, p_info)NEWLINE bnd = as_lower_upper_pair(bnds_obj, bnd_info, p_info)NEWLINE bnds.append(bnd)NEWLINE return bndsNEWLINENEWLINENEWLINEdef process_none_bounds_instance(bnds_obj, bnds_info, p_info):NEWLINE """Used to process bounds supplied by the user as a `dict`.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE listNEWLINE A list of lists with the outer length equal to the number of expectedNEWLINE bounds and the inner lengths all equal to 2.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the bounds type is not supported for use of dictionary because thereNEWLINE aren't symbols associated with every variable/constraint of that type.NEWLINENEWLINE """NEWLINE bnds = []NEWLINE for bnd_i, user_sym in enumerate(bnds_info.user_syms):NEWLINE bnd = NoneNEWLINE bnd_info = BoundsInfo(bnd, user_sym, bnds_info.bnds_type, bnd_i)NEWLINE check_user_bound_missing(bnds_obj, bnd_info, p_info)NEWLINE bnd = as_lower_upper_pair(bnds_obj, bnd_info, p_info)NEWLINE bnds.append(bnd)NEWLINE return bndsNEWLINENEWLINENEWLINEdef as_lower_upper_pair(bnds_obj, bnds_info, p_info):NEWLINE """Get the user-supplied bounds as a lower-upper pair of numeric values.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE `list`NEWLINE Pair of bounds as a lower bound (first) and an upper bound (second) inNEWLINE a `list`.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If the flattened user-supplied bounds are not either shape (1, ) orNEWLINE (2, ).NEWLINENEWLINE """NEWLINE bnds = np.array(bnds_info.user_bnds).flatten()NEWLINE if bnds.shape == (1, ):NEWLINE both = "lower and upper bounds"NEWLINE both_info = bnds_info._replace(user_bnds=bnds[0])NEWLINE lower_bnd = get_bound_as_number(bnds_obj, both_info, both, p_info)NEWLINE upper_bnd = lower_bndNEWLINE elif bnds.shape == (2, ):NEWLINE lower = "lower bound"NEWLINE upper = "upper bound"NEWLINE lower_info = bnds_info._replace(user_bnds=bnds[0])NEWLINE upper_info = bnds_info._replace(user_bnds=bnds[1])NEWLINE lower_bnd = get_bound_as_number(bnds_obj, lower_info, lower, p_info)NEWLINE upper_bnd = get_bound_as_number(bnds_obj, upper_info, upper, p_info)NEWLINE else:NEWLINE raise ValueErrorNEWLINE lower_bnd = -bnds_obj._INF if lower_bnd is None else lower_bndNEWLINE upper_bnd = bnds_obj._INF if upper_bnd is None else upper_bndNEWLINE return [lower_bnd, upper_bnd]NEWLINENEWLINENEWLINEdef get_bound_as_number(bnds_obj, bnds_info, lower_upper, p_info):NEWLINE """Format user-supplied bounds to be a number.NEWLINENEWLINE Users can potentially supply bounds as strings (such as "inf" etc.),NEWLINE numerical values from non-core Python (e.g. :py:type`float64 <numpy>`,NEWLINE :py:type:`DM <casadi>`), or as symbols (e.g. :py:type:`Symbol <sympy>`,NEWLINE :py:type:`SX <casadi>`) provided that they can be resolved as constants dueNEWLINE to auxiliary data supplied by the user.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE floatNEWLINE The bound as a numerical value.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If the user-supplied bound is symbolic and contains a symbol primitiveNEWLINE that cannot be resolved down to a numerical value.NEWLINE NotImplementedErrorNEWLINE If the user supplies a string bound that is unsupported, e.g. 'nan'.NEWLINENEWLINE """NEWLINE bnds = bnds_info.user_bndsNEWLINE if bnds is None:NEWLINE return bndsNEWLINE elif isinstance(bnds, str):NEWLINE if bnds == "inf":NEWLINE return bnds_obj._INFNEWLINE elif bnds == "-inf":NEWLINE return -bnds_obj._INFNEWLINE try:NEWLINE bnds = float(bnds)NEWLINE except TypeError:NEWLINE msg = (f"A bound value of {bnds} is not supported.")NEWLINE raise NotImplementedError(msg)NEWLINE if isinstance(bnds, (np.float64, np.int64, float, int)):NEWLINE return float(bnds)NEWLINE bnds = bnds_obj.ocp._backend.substitute_pycollo_sym(bnds)NEWLINE if symbol_primitives(bnds):NEWLINE msg = (f"The user-supplied {lower_upper} for the "NEWLINE f"{bnds_info.bnds_type} '{bnd_info.user_syms}' "NEWLINE f"(index #{bnds_info.num}) of '{bnds}' "NEWLINE f"cannot be precomputed.")NEWLINE raise ValueError(msg)NEWLINE return float(bnds)NEWLINENEWLINENEWLINEdef check_lower_against_upper(bnds_obj, bnds, bnds_info, p_info):NEWLINE """Abstraction layer for checking lower bound against upper bound in pair.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE `tuple`NEWLINE The first index is an :py:type:`ndarray <numpy>` of shape (2, ) withNEWLINE the numerical lower and upper bounds for the bound in question and theNEWLINE second index is a `bool` of whether that bound pair is needed in theNEWLINE OCP (`True`) or if it can be treated as a constant (`False`).NEWLINENEWLINE """NEWLINE if not bnds:NEWLINE bnds = np.empty(shape=(0, 2), dtype=float)NEWLINE needed = np.empty(shape=0, dtype=bool)NEWLINE return bnds, neededNEWLINE bnds = np.array(bnds, dtype=float)NEWLINE bnds, needed = check_lower_same_as_upper_to_tol(bnds_obj, bnds, bnds_info,NEWLINE p_info)NEWLINE bnds = check_lower_less_than_upper(bnds_obj, bnds, bnds_info, p_info)NEWLINE return bnds, neededNEWLINENEWLINENEWLINEdef check_lower_same_as_upper_to_tol(bnds_obj, bnds, bnd_info, p_info):NEWLINE """Handle case where bounds are equal to floating precision.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE `tuple`NEWLINE The first index is an :py:type:`ndarray <numpy>` of shape (2, ) withNEWLINE the numerical lower and upper bounds for the bound in question and theNEWLINE second index is a `bool` of whether that bound pair is needed in theNEWLINE OCP (`True`) or if it can be treated as a constant (`False`).NEWLINENEWLINE """NEWLINE lower_bnds = bnds[:, 0]NEWLINE upper_bnds = bnds[:, 1]NEWLINE atol = bnds_obj.ocp.settings.bound_clash_relative_toleranceNEWLINE rtol = bnds_obj.ocp.settings.bound_clash_absolute_toleranceNEWLINE are_same = np.isclose(lower_bnds, upper_bnds, rtol=rtol, atol=atol)NEWLINE needed = extract_variables_to_constants(bnds_obj, bnds, are_same)NEWLINE mean_bnds = (lower_bnds + upper_bnds) / 2NEWLINE bnds[are_same, 0] = mean_bnds[are_same]NEWLINE bnds[are_same, 1] = mean_bnds[are_same]NEWLINE return bnds, neededNEWLINENEWLINENEWLINEdef check_lower_less_than_upper(bnds_obj, bnds, bnds_info, p_info):NEWLINE """Ensure the lower bound is less than the upper bound.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE bnds_info : `BoundsInfo`NEWLINE The bounds info that is being processed.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE :py:type:`ndarray <numpy>`NEWLINE The lower-upper bound pair with shape (2, ).NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ValueErrorNEWLINE If any lower bounds are greater than their upper bound.NEWLINENEWLINE """NEWLINE lower_bnds = bnds[:, 0]NEWLINE upper_bnds = bnds[:, 1]NEWLINE lower_less_than_upper = lower_bnds <= upper_bndsNEWLINE all_less_than = np.all(lower_less_than_upper)NEWLINE if not all_less_than:NEWLINE error_indices = np.flatnonzero(~lower_less_than_upper)NEWLINE error_syms = np.array(bnds_info.user_syms)[error_indices]NEWLINE plural_needed = len(error_indices) > 1NEWLINE bound_plural = "bounds" if plural_needed else "bound"NEWLINE index_plural = "indices" if plural_needed else "index"NEWLINE bnds_type_plural = (f"{bnds_info.bnds_type}"NEWLINE f"{'s' if plural_needed else ''}")NEWLINE user_syms_formatted = format_for_output(error_syms)NEWLINE user_indices_formatted = format_for_output(NEWLINE error_indices, wrapping_char="", prefix_char="#")NEWLINE lower_bnds_formatted = format_for_output(lower_bnds[error_indices])NEWLINE upper_bnds_formatted = format_for_output(upper_bnds[error_indices])NEWLINE msg = (f"The user-supplied upper {bound_plural} for the "NEWLINE f"{bnds_type_plural} {user_syms_formatted} ({index_plural} "NEWLINE f"{user_indices_formatted}) of {upper_bnds_formatted} "NEWLINE f"cannot be less than the user-supplied lower "NEWLINE f"{bound_plural} of {lower_bnds_formatted}.")NEWLINE raise ValueError(msg)NEWLINE return bndsNEWLINENEWLINENEWLINEdef extract_variables_to_constants(bnds_obj, bnds, are_same):NEWLINE """NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE bnds_obj : Union[`EndpointBounds`, `PhaseBounds`]NEWLINE The parent bounds-related object for which this function is processingNEWLINE bounds for.NEWLINE bnds : `list`NEWLINE The pre-processed bounds.NEWLINE are_same : `bool`NEWLINE If bounds are equal.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE boolNEWLINE `True` if the bounds pair are needed, `False` if not.NEWLINENEWLINE """NEWLINE if not bnds_obj.ocp.settings.remove_constant_variables:NEWLINE needed = np.full(bnds.shape[0], True)NEWLINE return neededNEWLINE needed = ~are_sameNEWLINE return neededNEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINENEWLINE
import matplotlib.pyplot as pltNEWLINEfrom sklearn.manifold import TSNENEWLINENEWLINE# Tsne PlotNEWLINEdef tsneplot(embeddings,labels,fig_path):NEWLINE print("********************* tSNE Plot*********************")NEWLINE X = TSNE(n_components=2,perplexity=100,n_iter=1000).fit_transform(embeddings)NEWLINE colors = ['#FF0000', '#06D506', '#0931F7', '#00FFFF', '#FFE500', '#F700FF', '#9300FF', '#FFD700','#10DADE'] # Red , Green, BlueNEWLINE for c in range(len(colors)):NEWLINE points = []NEWLINE for j in range(len(labels)):NEWLINE if (labels[j] == c):NEWLINE points.append(list(X[j]))NEWLINE x = []NEWLINE y = []NEWLINE for z in points:NEWLINE x.append(z[0])NEWLINE y.append(z[1])NEWLINE plt.plot(x, y, 'ro', c=colors[c], markersize=20, marker='.')NEWLINE plt.axis('off')NEWLINE plt.savefig(fig_path)NEWLINE plt.close()
class ServerFlushed:NEWLINE def __init__(self, request):NEWLINE self.request = requestNEWLINE
import pathlibNEWLINEimport randomNEWLINEimport reNEWLINENEWLINENEWLINEclass Tip:NEWLINE def __init__(self, html=None, ref_url=None, ref_name=None):NEWLINE self.html = htmlNEWLINE self.ref_url = ref_urlNEWLINE self.ref_name = ref_nameNEWLINENEWLINE @staticmethodNEWLINE def parse_meta(meta):NEWLINE meta = meta.split('\n')NEWLINE meta = [kv.split(': ') for kv in meta]NEWLINE meta = {k: v for k, v in meta}NEWLINE return metaNEWLINENEWLINE @classmethodNEWLINE def from_file(cls, path):NEWLINE with open(path, 'r') as f:NEWLINE html = f.read() # .split('\n')NEWLINE try:NEWLINE meta, content = re.split(r'\n-{3,}\n', html, maxsplit=1)NEWLINE except (IndexError, ValueError):NEWLINE return cls('parse error', '', '')NEWLINE meta = cls.parse_meta(meta)NEWLINE return cls(content, **meta)NEWLINENEWLINE def __repr__(self):NEWLINE return self.htmlNEWLINENEWLINE def _repr_html_(self):NEWLINE return self.nice_output()NEWLINENEWLINE def nice_output(self):NEWLINE html = f'''NEWLINE <div class="alert alert-warning" role="alert">NEWLINE {self.html}NEWLINENEWLINE <button type="button" class="close" data-dismiss="alert" aria-label="Close">NEWLINE <span aria-hidden="true">&times;</span>NEWLINE </button>NEWLINE </div>NEWLINE <p>NEWLINE Source: <a href="{self.ref_url}" target="_blank">{self.ref_name}</a>NEWLINE </p>NEWLINE '''NEWLINE return htmlNEWLINENEWLINENEWLINEdef random_tip():NEWLINE tip_list = pathlib.Path(__file__).parent / 'tip_files'NEWLINE tip_list = list(tip_list.iterdir())NEWLINE tip_file = random.choice(tip_list)NEWLINE tip = Tip.from_file(tip_file)NEWLINE return tipNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE random_tip()NEWLINE
custoUnitario = [0.35, 0.45] # variável globalNEWLINEunidadesPorMetro = [37, 30] # variável globalNEWLINEcustoMateriaisPorMetro = [9, 7] # variável globalNEWLINEmetragemParedes = 200 # variável globalNEWLINENEWLINEdef calculaPrecoConstrucao():NEWLINE custosTotais = [] # variável localNEWLINE for i in range(len(custoUnitario)):NEWLINE # variáveis locais: custoTijolos, custoTotalPorMetro e custoTotalNEWLINE custoTijolos = custoUnitario[i] * unidadesPorMetro[i]NEWLINE custoTotalPorMetro = custoTijolos + custoMateriaisPorMetro[i]NEWLINE custoTotal = custoTotalPorMetro * metragemParedesNEWLINE custosTotais.append(custoTotal)NEWLINE print('método', i, custoTotal)NEWLINENEWLINE global menorCustoNEWLINE menorCusto = 100000 # variável globalNEWLINE for custo in custosTotais:NEWLINE if (custo < menorCusto):NEWLINE menorCusto = custoNEWLINENEWLINEcalculaPrecoConstrucao()NEWLINEprint('menor custo:', menorCusto)NEWLINE
# Licensed to the Apache Software Foundation (ASF) under one or moreNEWLINE# contributor license agreements. See the NOTICE file distributed withNEWLINE# this work for additional information regarding copyright ownership.NEWLINE# The ASF licenses this file to You under the Apache License, Version 2.0NEWLINE# (the "License"); you may not use this file except in compliance withNEWLINE# the License. You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINEimport sysNEWLINEimport unittestNEWLINEfrom libcloud.utils.py3 import httplibNEWLINENEWLINEfrom libcloud.compute.base import Node, NodeSize, NodeImage, NodeLocationNEWLINEfrom libcloud.compute.drivers.voxel import VoxelNodeDriver as VoxelNEWLINEfrom libcloud.compute.types import InvalidCredsErrorNEWLINENEWLINEfrom libcloud.test import MockHttpNEWLINEfrom libcloud.test.file_fixtures import ComputeFileFixturesNEWLINENEWLINEfrom libcloud.test.secrets import VOXEL_PARAMSNEWLINENEWLINENEWLINEclass VoxelTest(unittest.TestCase):NEWLINENEWLINE def setUp(self):NEWLINENEWLINE Voxel.connectionCls.conn_class = VoxelMockHttpNEWLINE VoxelMockHttp.type = NoneNEWLINE self.driver = Voxel(*VOXEL_PARAMS)NEWLINENEWLINE def test_auth_failed(self):NEWLINE VoxelMockHttp.type = 'UNAUTHORIZED'NEWLINE try:NEWLINE self.driver.list_nodes()NEWLINE except Exception as e:NEWLINE self.assertTrue(isinstance(e, InvalidCredsError))NEWLINE else:NEWLINE self.fail('test should have thrown')NEWLINENEWLINE def test_response_failure(self):NEWLINE VoxelMockHttp.type = 'FAILURE'NEWLINENEWLINE try:NEWLINE self.driver.list_nodes()NEWLINE except Exception:NEWLINE passNEWLINE else:NEWLINE self.fail('Invalid response, but exception was not thrown')NEWLINENEWLINE def test_list_nodes(self):NEWLINE VoxelMockHttp.type = 'LIST_NODES'NEWLINE nodes = self.driver.list_nodes()NEWLINENEWLINE self.assertEqual(len(nodes), 1)NEWLINE self.assertEqual(nodes[0].name, 'www.voxel.net')NEWLINENEWLINE def test_list_sizes(self):NEWLINE sizes = self.driver.list_sizes()NEWLINENEWLINE self.assertEqual(len(sizes), 13)NEWLINENEWLINE def test_list_images(self):NEWLINE VoxelMockHttp.type = 'LIST_IMAGES'NEWLINE images = self.driver.list_images()NEWLINENEWLINE self.assertEqual(len(images), 1)NEWLINENEWLINE def test_list_locations(self):NEWLINE VoxelMockHttp.type = 'LIST_LOCATIONS'NEWLINE locations = self.driver.list_locations()NEWLINENEWLINE self.assertEqual(len(locations), 2)NEWLINE self.assertEqual(locations[0].name, 'Amsterdam')NEWLINENEWLINE def test_create_node_invalid_disk_size(self):NEWLINE image = NodeImage(NEWLINE id=1, name='Ubuntu 8.10 (intrepid)', driver=self.driver)NEWLINE size = NodeSize(NEWLINE 1, '256 slice', None, None, None, None, driver=self.driver)NEWLINE location = NodeLocation(id=1, name='Europe', country='England',NEWLINE driver=self.driver)NEWLINENEWLINE try:NEWLINE self.driver.create_node(name='foo', image=image, size=size,NEWLINE location=location)NEWLINE except ValueError:NEWLINE passNEWLINE else:NEWLINE self.fail('Invalid disk size provided but an exception was not'NEWLINE ' thrown')NEWLINENEWLINE def test_create_node(self):NEWLINE VoxelMockHttp.type = 'CREATE_NODE'NEWLINE image = NodeImage(NEWLINE id=1, name='Ubuntu 8.10 (intrepid)', driver=self.driver)NEWLINE size = NodeSize(NEWLINE 1, '256 slice', 1024, 500, None, None, driver=self.driver)NEWLINE location = NodeLocation(id=1, name='Europe', country='England',NEWLINE driver=self.driver)NEWLINENEWLINE node = self.driver.create_node(name='foo', image=image, size=size,NEWLINE location=location)NEWLINE self.assertEqual(node.id, '1234')NEWLINENEWLINE node = self.driver.create_node(name='foo', image=image, size=size,NEWLINE location=location, ex_voxel_access=True)NEWLINE self.assertEqual(node.id, '1234')NEWLINENEWLINE def test_reboot_node(self):NEWLINE VoxelMockHttp.type = 'REBOOT_NODE'NEWLINE node = Node(NEWLINE id=72258, name=None, state=None, public_ips=None, private_ips=None,NEWLINE driver=self.driver)NEWLINENEWLINE self.assertTrue(node.reboot())NEWLINENEWLINE def test_destroy_node(self):NEWLINE VoxelMockHttp.type = 'DESTROY_NODE'NEWLINE node = Node(NEWLINE id=72258, name=None, state=None, public_ips=None, private_ips=None,NEWLINE driver=self.driver)NEWLINENEWLINE self.assertTrue(node.destroy())NEWLINENEWLINENEWLINEclass VoxelMockHttp(MockHttp):NEWLINENEWLINE fixtures = ComputeFileFixtures('voxel')NEWLINENEWLINE def _UNAUTHORIZED(self, method, url, body, headers):NEWLINE body = self.fixtures.load('unauthorized.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _FAILURE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('failure.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _LIST_NODES(self, method, url, body, headers):NEWLINE body = self.fixtures.load('nodes.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _LIST_IMAGES(self, method, url, body, headers):NEWLINE body = self.fixtures.load('images.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _LIST_LOCATIONS(self, method, url, body, headers):NEWLINE body = self.fixtures.load('locations.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _CREATE_NODE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('create_node.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _REBOOT_NODE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('success.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _DESTROY_NODE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('success.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINEif __name__ == '__main__':NEWLINE sys.exit(unittest.main())NEWLINE
#!/usr/bin/env pythonNEWLINENEWLINE# Import modulesNEWLINEfrom pcl_helper import *NEWLINENEWLINENEWLINE# TODO: Define functions as requiredNEWLINENEWLINE# Callback function for your Point Cloud SubscriberNEWLINEdef pcl_callback(pcl_msg):NEWLINE # TODO: Convert ROS msg to PCL dataNEWLINE cloud = ros_to_pcl(pcl_msg)NEWLINENEWLINE # TODO: Voxel Grid DownsamplingNEWLINE # Create a VoxelGrid filter object for our input point cloudNEWLINE vox = cloud.make_voxel_grid_filter()NEWLINENEWLINE # Choose a voxel (also known as leaf) sizeNEWLINE # Note: this (1) is a poor choice of leaf sizeNEWLINE # Experiment and find the appropriate size!NEWLINE LEAF_SIZE = 0.01NEWLINENEWLINE # Set the voxel (or leaf) sizeNEWLINE vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)NEWLINENEWLINE # Call the filter function to obtain the resultant downsampled point cloudNEWLINE cloud_filtered = vox.filter()NEWLINENEWLINE # TODO: PassThrough FilterNEWLINE # Create a PassThrough filter object.NEWLINE passthrough = cloud_filtered.make_passthrough_filter()NEWLINENEWLINE # Assign axis and range to the passthrough filter object.NEWLINE filter_axis = 'z'NEWLINE passthrough.set_filter_field_name(filter_axis)NEWLINE axis_min = 0.6NEWLINE axis_max = 1.1NEWLINE passthrough.set_filter_limits(axis_min, axis_max)NEWLINENEWLINE # Finally use the filter function to obtain the resultant point cloud.NEWLINE cloud_filtered = passthrough.filter()NEWLINENEWLINE # TODO: RANSAC Plane SegmentationNEWLINE # Create the segmentation objectNEWLINE seg = cloud_filtered.make_segmenter()NEWLINENEWLINE # Set the model you wish to fitNEWLINE seg.set_model_type(pcl.SACMODEL_PLANE)NEWLINE seg.set_method_type(pcl.SAC_RANSAC)NEWLINENEWLINE # Max distance for a point to be considered fitting the modelNEWLINE # Experiment with different values for max_distanceNEWLINE # for segmenting the tableNEWLINE max_distance = 0.01NEWLINE seg.set_distance_threshold(max_distance)NEWLINE # Call the segment function to obtain set of inlier indices and model coefficientsNEWLINE inliers, coefficients = seg.segment()NEWLINENEWLINE # TODO: Extract inliers and outliersNEWLINE extracted_inliers = cloud_filtered.extract(inliers, negative=False) # tableNEWLINE extracted_outliers = cloud_filtered.extract(inliers, negative=True) # objects on tableNEWLINENEWLINE # TODO: Euclidean ClusteringNEWLINE # Apply function to convert XYZRGB to XYZNEWLINE white_cloud = XYZRGB_to_XYZ(extracted_outliers)NEWLINE tree = white_cloud.make_kdtree()NEWLINE # Create a cluster extraction objectNEWLINE ec = white_cloud.make_EuclideanClusterExtraction()NEWLINE # Set tolerances for distance thresholdNEWLINE # as well as minimum and maximum cluster size (in points)NEWLINE # NOTE: These are poor choices of clustering parametersNEWLINE # Your task is to experiment and find values that work for segmenting objects.NEWLINE ec.set_ClusterTolerance(0.02)NEWLINE ec.set_MinClusterSize(10)NEWLINE ec.set_MaxClusterSize(10000)NEWLINE # Search the k-d tree for clustersNEWLINE ec.set_SearchMethod(tree)NEWLINE # Extract indices for each of the discovered clustersNEWLINE cluster_indices = ec.Extract()NEWLINENEWLINE # TODO: Create Cluster-Mask Point Cloud to visualize each cluster separatelyNEWLINE # Assign a color corresponding to each segmented object in sceneNEWLINE cluster_color = get_color_list(len(cluster_indices))NEWLINENEWLINE color_cluster_point_list = []NEWLINENEWLINE for j, indices in enumerate(cluster_indices):NEWLINE for i, indice in enumerate(indices):NEWLINE color_cluster_point_list.append([white_cloud[indice][0],NEWLINE white_cloud[indice][1],NEWLINE white_cloud[indice][2],NEWLINE rgb_to_float(cluster_color[j])])NEWLINENEWLINE # Create new cloud containing all clusters, each with unique colorNEWLINE cluster_cloud = pcl.PointCloud_PointXYZRGB()NEWLINE cluster_cloud.from_list(color_cluster_point_list)NEWLINENEWLINE # TODO: Convert PCL data to ROS messagesNEWLINE ros_objects_cloud = pcl_to_ros(extracted_outliers)NEWLINE ros_table_cloud = pcl_to_ros(extracted_inliers)NEWLINE ros_cluster_cloud = pcl_to_ros(cluster_cloud)NEWLINENEWLINE # TODO: Publish ROS messagesNEWLINE pcl_objects_pub.publish(ros_objects_cloud)NEWLINE pcl_table_pub.publish(ros_table_cloud)NEWLINE pcl_cluster_pub.publish(ros_cluster_cloud)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINENEWLINE # TODO:ROS node initializationNEWLINE rospy.init_node('clustering', anonymous=True)NEWLINENEWLINE # TODO: Create SubscribersNEWLINE pcl_sub = rospy.Subscriber("/sensor_stick/point_cloud", pc2.PointCloud2, pcl_callback, queue_size=1)NEWLINENEWLINE # TODO: Create PublishersNEWLINE pcl_objects_pub = rospy.Publisher("/pcl_objects", PointCloud2, queue_size=1)NEWLINE pcl_table_pub = rospy.Publisher("/pcl_table", PointCloud2, queue_size=1)NEWLINE pcl_cluster_pub = rospy.Publisher("/pcl_cluster", PointCloud2, queue_size=1)NEWLINENEWLINE # Initialize color_listNEWLINE get_color_list.color_list = []NEWLINENEWLINE # TODO: Spin while node is not shutdownNEWLINE while not rospy.is_shutdown():NEWLINE rospy.spin()NEWLINE
# coding=utf-8NEWLINE# Copyright 2019 The Google Research Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE# Lint as: python2, python3NEWLINE"""Binary for running temperature scaling, writing temperature param to disk."""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport jsonNEWLINEimport osNEWLINENEWLINEfrom absl import appNEWLINEfrom absl import flagsNEWLINENEWLINEimport numpy as npNEWLINEimport tensorflow.compat.v2 as tfNEWLINENEWLINEfrom uq_benchmark_2019 import array_utilsNEWLINEfrom uq_benchmark_2019 import calibration_libNEWLINEfrom uq_benchmark_2019 import metrics_libNEWLINEfrom uq_benchmark_2019 import uq_utilsNEWLINENEWLINEgfile = tf.io.gfileNEWLINEFLAGS = flags.FLAGSNEWLINENEWLINENUM_EXAMPLES = 20000NEWLINENEWLINENEWLINEdef _declare_flags():NEWLINE """Declare flags; not invoked when this module is imported as a library."""NEWLINE flags.DEFINE_string('prediction_path', None, 'Path to predictions file.')NEWLINENEWLINENEWLINEdef run(prediction_path):NEWLINE """Run temperature scaling."""NEWLINE stats = array_utils.load_stats_from_tfrecords(prediction_path)NEWLINE probs = stats['probs'].astype(np.float32)NEWLINE labels = stats['labels'].astype(np.int32)NEWLINE if len(labels.shape) > 1:NEWLINE labels = np.squeeze(labels, -1)NEWLINENEWLINE if probs.shape[0] > NUM_EXAMPLES:NEWLINE probs = probs[:NUM_EXAMPLES, :]NEWLINE labels = labels[:NUM_EXAMPLES]NEWLINENEWLINE probs = metrics_lib.soften_probabilities(probs=probs)NEWLINE logits = uq_utils.np_inverse_softmax(probs)NEWLINE temp = calibration_lib.find_scaling_temperature(labels, logits)NEWLINE with gfile.GFile(NEWLINE os.path.join(os.path.dirname(prediction_path),NEWLINE 'temperature_hparam.json'), 'w') as fh:NEWLINE fh.write(json.dumps({'temperature': temp}))NEWLINENEWLINENEWLINEdef main(argv):NEWLINE if len(argv) > 1:NEWLINE raise app.UsageError('Too many command-line arguments.')NEWLINE tf.enable_v2_behavior()NEWLINE run(FLAGS.prediction_path)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE _declare_flags()NEWLINE app.run(main)NEWLINE
import argparseNEWLINENEWLINEfrom .virsh_cleanup import DEFAULT_SKIP_LIST, clean_virsh_resources, logNEWLINENEWLINENEWLINEdef _get_parsed_args() -> argparse.Namespace:NEWLINE parser = argparse.ArgumentParser(description="Clear libvrt resources")NEWLINE group = parser.add_mutually_exclusive_group()NEWLINE group.add_argument("-a", "--all", help="Clean all virsh resources", action="store_true")NEWLINE group.add_argument("-m", "--minikube", help="Clean only minikube resources", action="store_true")NEWLINE group.add_argument("--skip-minikube", help="Clean all but skip minikube resources", action="store_true")NEWLINE group.add_argument(NEWLINE "-f",NEWLINE "--filter",NEWLINE help="List of filter of resources to delete",NEWLINE nargs="*",NEWLINE type=str,NEWLINE default=None,NEWLINE )NEWLINE return parser.parse_args()NEWLINENEWLINENEWLINEdef main():NEWLINE log.info("===== CLEANING VIRSH RESOURCES =====")NEWLINE p_args = _get_parsed_args()NEWLINE skip_list = DEFAULT_SKIP_LISTNEWLINE resource_filter = []NEWLINE if p_args.minikube:NEWLINE resource_filter.append("minikube")NEWLINE elif p_args.filter:NEWLINE resource_filter = p_args.filterNEWLINE else:NEWLINE skip_list.extend(["minikube", "minikube-net"])NEWLINENEWLINE clean_virsh_resources(skip_list, resource_filter)NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINE
from Node import NodeNEWLINEfrom shutil import rmtreeNEWLINE# from config_creator import create_file_structureNEWLINEfrom sesamutils import sesam_loggerNEWLINEfrom os import mkdirNEWLINEfrom git import RepoNEWLINEimport subprocessNEWLINEfrom json import dumps as dump_jsonNEWLINENEWLINENEWLINEclass Gitter:NEWLINE def __init__(self, url, username, password_or_token, folder, branch):NEWLINE self.url = urlNEWLINE self.username = usernameNEWLINE self.password_or_token = password_or_tokenNEWLINE self.folder = folderNEWLINE self.branch = branchNEWLINENEWLINE self.LOGGER = sesam_logger('Git')NEWLINENEWLINE self.repo = self.clone_repo()NEWLINENEWLINE def clone_repo(self):NEWLINE self.try_to_delete_dir(self.folder)NEWLINE url = f'https://{self.username}:{self.password_or_token}@{self.url}'NEWLINE repo = Repo.clone_from(url, self.folder, branch=self.branch)NEWLINE return repoNEWLINENEWLINE def push_if_diff(self, dry_run=False):NEWLINE if self.is_there_a_diff():NEWLINE if dry_run:NEWLINE self.LOGGER.info('Dry run! Skipping push to repo.')NEWLINE else:NEWLINE self.push()NEWLINE self.LOGGER.info('Successfully pushed to git repo!')NEWLINE else:NEWLINE self.LOGGER.info('No current diff! Skipping push to repo.')NEWLINENEWLINE def is_there_a_diff(self):NEWLINE import subprocessNEWLINE bashCommand = 'git status'NEWLINE process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE, cwd=self.repo.working_dir + '/node/')NEWLINE output, error = process.communicate()NEWLINE if output.endswith(b"working tree clean\n"):NEWLINE return FalseNEWLINE else:NEWLINE self.LOGGER.info(f'Git status result : "{output}"')NEWLINE return TrueNEWLINENEWLINE def push(self):NEWLINE self.LOGGER.debug(f"Pushing to git repo {self.repo.remote}")NEWLINE self.repo.git.add([self.repo.working_dir])NEWLINE self.repo.index.commit(message='Update based on master node config')NEWLINE origin = self.repo.remote('origin')NEWLINE origin.push()NEWLINENEWLINE def try_to_delete_dir(self, directory):NEWLINE try:NEWLINE self.LOGGER.debug(f'Deleting directory "{directory}"')NEWLINE rmtree(directory, ignore_errors=True)NEWLINE except FileNotFoundError:NEWLINE self.LOGGER.info(f'Did not delete "{directory}" because it does not exist!.')NEWLINENEWLINE def try_to_make_dir(self, directory):NEWLINE try:NEWLINE self.LOGGER.debug(f'Creating directory "{directory}"')NEWLINE mkdir(directory)NEWLINE except FileExistsError:NEWLINE self.LOGGER.info(f'Did not create "{directory}" because it already exists!')NEWLINENEWLINE def create_node_file_structure(self, node: Node, env):NEWLINE self.try_to_delete_dir(f'{self.repo.working_dir}/node')NEWLINE for p in [NEWLINE f'{self.repo.working_dir}/node/',NEWLINE f'{self.repo.working_dir}/node/pipes/',NEWLINE f'{self.repo.working_dir}/node/systems/',NEWLINE f'{self.repo.working_dir}/node/variables/'NEWLINE ]:NEWLINE self.try_to_make_dir(p)NEWLINE tmp_file = NoneNEWLINE for conf in node.conf:NEWLINE if conf['type'] == 'pipe':NEWLINE tmp_file = open(f'{self.repo.working_dir}/node/pipes/{conf["_id"]}.conf.json', 'w+')NEWLINE if 'system' in conf['type']:NEWLINE tmp_file = open(f'{self.repo.working_dir}/node/systems/{conf["_id"]}.conf.json', 'w+')NEWLINE if conf['type'] == 'metadata':NEWLINE tmp_file = open(f'{self.repo.working_dir}/node/node-metadata.conf.json', 'w+')NEWLINE tmp_file.write(dump_json(conf, indent=2))NEWLINE if len([key for key in node.upload_vars]) != 0:NEWLINE tmp_file = open(f'{self.repo.working_dir}/node/variables/variables-{env}.json', 'w+')NEWLINE tmp_file.write(dump_json(node.upload_vars, indent=2))NEWLINE
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License").NEWLINE# You may not use this file except in compliance with the License.NEWLINE# A copy of the License is located atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# or in the "license" file accompanying this file. This file is distributedNEWLINE# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eitherNEWLINE# express or implied. See the License for the specific language governingNEWLINE# permissions and limitations under the License.NEWLINENEWLINE# Standard library importsNEWLINEfrom typing import Dict, Optional, Tuple, ListNEWLINENEWLINE# Third-party importsNEWLINEimport numpy as npNEWLINENEWLINE# First-party importsNEWLINEfrom gluonts.model.common import TensorNEWLINEfrom gluonts.core.component import validatedNEWLINENEWLINE# Relative importsNEWLINEfrom .distribution import Distribution, _sample_multiple, getF, softplusNEWLINEfrom .distribution_output import DistributionOutputNEWLINENEWLINENEWLINEclass NegativeBinomial(Distribution):NEWLINE r"""NEWLINE Negative binomial distribution, i.e. the distribution of the number ofNEWLINE successes in a sequence of independet Bernoulli trials.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE muNEWLINE Tensor containing the means, of shape `(*batch_shape, *event_shape)`.NEWLINE alphaNEWLINE Tensor of the shape parameters, of shape `(*batch_shape, *event_shape)`.NEWLINE FNEWLINE """NEWLINENEWLINE is_reparameterizable = FalseNEWLINENEWLINE @validated()NEWLINE def __init__(self, mu: Tensor, alpha: Tensor, F=None) -> None:NEWLINE self.mu = muNEWLINE self.alpha = alphaNEWLINE self.F = F if F else getF(mu)NEWLINENEWLINE @propertyNEWLINE def batch_shape(self) -> Tuple:NEWLINE return self.mu.shapeNEWLINENEWLINE @propertyNEWLINE def event_shape(self) -> Tuple:NEWLINE return ()NEWLINENEWLINE @propertyNEWLINE def event_dim(self) -> int:NEWLINE return 0NEWLINENEWLINE def log_prob(self, x: Tensor) -> Tensor:NEWLINE alphaInv = 1.0 / self.alphaNEWLINE alpha_times_mu = self.alpha * self.muNEWLINE F = self.FNEWLINE ll = (NEWLINE x * F.log(alpha_times_mu / (1.0 + alpha_times_mu))NEWLINE - alphaInv * F.log1p(alpha_times_mu)NEWLINE + F.gammaln(x + alphaInv)NEWLINE - F.gammaln(x + 1.0)NEWLINE - F.gammaln(alphaInv)NEWLINE )NEWLINE return llNEWLINENEWLINE @propertyNEWLINE def mean(self) -> Tensor:NEWLINE return self.muNEWLINENEWLINE @propertyNEWLINE def stddev(self) -> Tensor:NEWLINE return self.F.sqrt(self.mu * (1.0 + self.mu * self.alpha))NEWLINENEWLINE def sample(NEWLINE self, num_samples: Optional[int] = None, dtype=np.float32NEWLINE ) -> Tensor:NEWLINE def s(mu: Tensor, alpha: Tensor) -> Tensor:NEWLINE F = self.FNEWLINE tol = 1e-5NEWLINE r = 1.0 / alphaNEWLINE theta = alpha * muNEWLINE r = F.minimum(F.maximum(tol, r), 1e10)NEWLINE theta = F.minimum(F.maximum(tol, theta), 1e10)NEWLINE x = F.minimum(F.random.gamma(r, theta), 1e6)NEWLINE return F.random.poisson(lam=x, dtype=dtype)NEWLINENEWLINE return _sample_multiple(NEWLINE s, mu=self.mu, alpha=self.alpha, num_samples=num_samplesNEWLINE )NEWLINENEWLINE @propertyNEWLINE def args(self) -> List:NEWLINE return [self.mu, self.alpha]NEWLINENEWLINENEWLINEclass NegativeBinomialOutput(DistributionOutput):NEWLINE args_dim: Dict[str, int] = {"mu": 1, "alpha": 1}NEWLINE distr_cls: type = NegativeBinomialNEWLINENEWLINE @classmethodNEWLINE def domain_map(cls, F, mu, alpha):NEWLINE epsilon = np.finfo(cls._dtype).eps # machine epsilonNEWLINENEWLINE mu = softplus(F, mu) + epsilonNEWLINE alpha = softplus(F, alpha) + epsilonNEWLINE return mu.squeeze(axis=-1), alpha.squeeze(axis=-1)NEWLINENEWLINE # Overwrites the parent class method.NEWLINE # We cannot scale using the affine transformation since negative binomial should return integers.NEWLINE # Instead we scale the parameters.NEWLINE def distribution(NEWLINE self,NEWLINE distr_args,NEWLINE loc: Optional[Tensor] = None,NEWLINE scale: Optional[Tensor] = None,NEWLINE ) -> NegativeBinomial:NEWLINE assert loc is NoneNEWLINE mu, alpha = distr_argsNEWLINE if scale is None:NEWLINE return NegativeBinomial(mu, alpha)NEWLINE else:NEWLINE F = getF(mu)NEWLINE mu = F.broadcast_mul(mu, scale)NEWLINE alpha = F.broadcast_mul(alpha, F.sqrt(scale + 1.0))NEWLINE return NegativeBinomial(mu, alpha, F)NEWLINENEWLINE @propertyNEWLINE def event_shape(self) -> Tuple:NEWLINE return ()NEWLINE
"""Testing handling with CoreState."""NEWLINENEWLINEfrom supervisor.coresys import CoreSysNEWLINENEWLINENEWLINEasync def test_timezone(run_dir, coresys: CoreSys):NEWLINE """Test write corestate to /run/supervisor."""NEWLINENEWLINE assert coresys.timezone == "UTC"NEWLINE assert coresys.config.timezone is NoneNEWLINENEWLINE await coresys.dbus.timedate.connect()NEWLINE await coresys.dbus.timedate.update()NEWLINE assert coresys.timezone == "Etc/UTC"NEWLINENEWLINE coresys.config.timezone = "Europe/Zurich"NEWLINE assert coresys.timezone == "Europe/Zurich"NEWLINE