code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
import cv2
import sys
import os
import numpy as np
import time
# Initialize the parameters
confThreshold = 0.5 # Confidence threshold
nmsThreshold = 0.4 # Non-maximum suppression threshold
inpWidth = 416 # Width of network's input image
inpHeight = 416 # Height of network's input image
starting_time = 0
frame_id = 0
font = cv2.FONT_HERSHEY_PLAIN
# Load names of classes
classesFile = "coco.names"
classes = None
with open(classesFile, 'rt') as f:
classes = f.read().rstrip('\n').split('\n')
# Give the configuration and weight files for the model and load the network using them.
modelConfiguration = "yolov3.cfg"
modelWeights = "yolov3.weights"
net = cv2.dnn.readNetFromDarknet(modelConfiguration, modelWeights)
net.setPreferableBackend(cv2.dnn.DNN_BACKEND_OPENCV)
net.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU)
inputFile = "presen_T.mp4"
inputFile2 = "presen_R.mp4"
outputFile = "yolo_out_py.avi"
# Open the video file
if not os.path.isfile(inputFile):
print("Input video file ", inputFile, " doesn't exist")
sys.exit(1)
cap = cv2.VideoCapture(inputFile)
cap2 = cv2.VideoCapture(inputFile2)
outputFile = inputFile[:-4] + "_yolo_out_py.avi"
# Get the video writer initialized to save the output video
vid_writer = cv2.VideoWriter(outputFile, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 30,
(round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))))
# Get the names of the output layers
def getOutputsNames(net):
# Get the names of all the layers in the network
layersNames = net.getLayerNames()
# Get the names of the output layers, i.e. the layers with unconnected outputs
return [layersNames[i[0] - 1] for i in net.getUnconnectedOutLayers()]
# Draw the predicted bounding box
def drawPred(classId, conf, left, top, right, bottom):
# Draw a bounding box.
cv2.rectangle(frame, (left, top), (right, bottom), (0, 255, 0))
label = '%.2f' % conf
# Get the label for the class name and its confidence
if classes:
assert (classId < len(classes))
label = '%s:%s' % (classes[classId], label)
# Display the label at the top of the bounding box
labelSize, baseLine = cv2.getTextSize(label, font, 0.5, 1)
top = max(top, labelSize[1])
cv2.putText(frame, label, (left, top), font, 1, (0, 255, 0), 2)
# Remove the bounding boxes with low confidence using non-maxima suppression
def postprocess(frame, outs):
frameHeight = frame.shape[0]
frameWidth = frame.shape[1]
# Scan through all the bounding boxes output from the network and keep only the
# ones with high confidence scores. Assign the box's class label as the class with the highest score.
classIds = []
confidences = []
boxes = []
for out in outs:
for detection in out:
scores = detection[5:]
classId = np.argmax(scores)
confidence = scores[classId]
if confidence > confThreshold:
center_x = int(detection[0] * frameWidth)
center_y = int(detection[1] * frameHeight)
width = int(detection[2] * frameWidth)
height = int(detection[3] * frameHeight)
left = int(center_x - width / 2)
top = int(center_y - height / 2)
classIds.append(classId)
confidences.append(float(confidence))
boxes.append([left, top, width, height])
# Perform non maximum suppression to eliminate redundant overlapping boxes with
# lower confidences.
indices = cv2.dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThreshold)
for i in indices:
i = i[0]
box = boxes[i]
left = box[0]
top = box[1]
width = box[2]
height = box[3]
drawPred(classIds[i], confidences[i], left, top, left + width, top + height)
# Main
while True:
# get frame from the video
hasFrame, frame = cap.read()
hasFrame2, frame2 = cap2.read()
frame = cv2.resize(frame, dsize=(600, 402))
frame2 = cv2.resize(frame2, dsize=(600, 402))
cv2.imshow("Camera", frame)
cv2.imshow("Thermal_Camera", frame2)
# Stop the program if reached end of video
if not hasFrame:
print("Done processing !!!")
cv2.waitKey(3000)
break
# Create a 4D blob from a frame.
blob = cv2.dnn.blobFromImage(frame, 1 / 255, (inpWidth, inpHeight), [0, 0, 0], 1, crop=False)
# Sets the input to the network
net.setInput(blob)
# Runs the forward pass to get output of the output layers
outs = net.forward(getOutputsNames(net))
# Remove the bounding boxes with low confidence
postprocess(frame, outs)
# Print the FPS
current_time = time.time()
sec = current_time - starting_time
starting_time = current_time
fps = 1 / (sec)
str2 = "FPS : %0.1f" % fps
# cv2.putText(frame, str2, (10, 50), font, 2, (0, 255, 0), 2)
# Write the frame with the detection boxes
vid_writer.write(frame.astype(np.uint8))
# CAMERA RESULT
cv2.imshow("CAMERA_Detection", frame)
img2 = None
fast = cv2.FastFeatureDetector_create(30)
fast.setNonmaxSuppression(0)
kp = fast.detect(frame2, None)
img2 = cv2.drawKeypoints(frame2, kp, img2, (0, 255, 255))
# cv2.imshow("THERMAL", img2)
hsv = cv2.cvtColor(frame2, cv2.COLOR_BGR2HSV)
car_prediction = 30
lower_white = np.array([0, 0, 255 - car_prediction], dtype=np.uint8)
upper_white = np.array([255, car_prediction, 255], dtype=np.uint8)
mask_white = cv2.inRange(hsv, lower_white, upper_white)
res = cv2.bitwise_and(frame2, frame2, mask=mask_white)
# cv2.imshow("THERMAL_CAR", res)
res2 = None
res2 = res
igray = cv2.cvtColor(res2, cv2.COLOR_BGR2GRAY)
iret, ibinary = cv2.threshold(igray, 127, 255, cv2.THRESH_BINARY)
contours, hierachy = cv2.findContours(ibinary, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
for i in range(len(contours)):
cv2.drawContours(res2, [contours[i]], 0, (255, 255, 255), 2)
cv2.putText(res2, "car", tuple(contours[i][0][0]), font, 1, (0, 255, 0), 1)
# cv2.imshow("THERMAL_CONTOUR", res2)
# THERMAL PROCESSING RESULT
dst = cv2.addWeighted(res2, 1, frame2, 1, 0)
#cv2.imshow('THERMAL_RES',dst)
#cv2.imshow("THERMAL",frame2)
# FINAL RESULT
dst2 = cv2.addWeighted(res2, 1, frame, 1, 0)
cv2.imshow("RESULT",dst2)
# End the video with "Esc"
key = cv2.waitKey(1)
if key == 27:
break
cap.release()
cv2.destroyAllWindows() | [
[
[
7,
10
],
[
353,
356
],
[
699,
702
],
[
786,
789
],
[
839,
842
],
[
1099,
1102
],
[
1135,
1138
],
[
1291,
1294
],
[
1319,
1322
],
[
1411,
1414
],
[
1453,
1456
],
[
4129,
4132
],
[
4179,
4182
],
[
4223,
4226
],
[
4256,
4259
],
[
4410,
4413
],
[
4495,
4498
],
[
5215,
5218
],
[
5286,
5289
],
[
5403,
5406
],
[
5504,
5507
],
[
5525,
5528
],
[
5733,
5736
],
[
5787,
5790
],
[
5924,
5927
],
[
5943,
5946
],
[
5984,
5987
],
[
6015,
6018
],
[
6060,
6063
],
[
6086,
6089
],
[
6102,
6105
],
[
6170,
6173
],
[
6240,
6243
],
[
6407,
6410
],
[
6551,
6554
],
[
6594,
6597
],
[
6667,
6670
],
[
6734,
6737
],
[
1928,
1931
],
[
2276,
2279
],
[
2352,
2355
],
[
3677,
3680
]
],
[
[
19,
22
],
[
1080,
1083
]
],
[
[
31,
33
],
[
987,
989
]
],
[
[
42,
53
],
[
5176,
5178
],
[
5588,
5590
],
[
5633,
5635
],
[
5662,
5664
],
[
5705,
5707
],
[
2957,
2959
]
],
[
[
62,
66
],
[
4885,
4889
]
],
[
[
99,
112
],
[
3046,
3059
],
[
3714,
3727
]
],
[
[
144,
156
],
[
3729,
3741
]
],
[
[
202,
210
],
[
4534,
4542
]
],
[
[
257,
266
],
[
4544,
4553
]
],
[
[
313,
326
],
[
4923,
4936
]
],
[
[
332,
340
]
],
[
[
346,
350
],
[
6291,
6295
],
[
2299,
2303
],
[
2391,
2395
]
],
[
[
404,
415
],
[
458,
469
]
],
[
[
432,
439
]
],
[
[
480,
481
],
[
498,
499
]
],
[
[
488,
495
],
[
2088,
2095
],
[
2128,
2135
],
[
2166,
2173
]
],
[
[
625,
643
],
[
726,
744
]
],
[
[
660,
672
],
[
746,
758
]
],
[
[
693,
696
],
[
761,
764
],
[
815,
818
],
[
4626,
4629
],
[
4723,
4726
],
[
4751,
4754
]
],
[
[
866,
875
],
[
1002,
1011
],
[
1046,
1055
],
[
1116,
1125
],
[
1178,
1187
]
],
[
[
894,
904
],
[
1152,
1162
]
],
[
[
923,
933
]
],
[
[
1093,
1096
],
[
1403,
1406
],
[
1445,
1448
],
[
4066,
4069
],
[
6719,
6722
]
],
[
[
1128,
1132
],
[
4102,
4106
]
],
[
[
1165,
1175
],
[
1307,
1317
]
],
[
[
1278,
1288
],
[
5146,
5156
]
],
[
[
1528,
1543
],
[
4735,
4750
]
],
[
[
1844,
1852
],
[
3911,
3919
]
],
[
[
2501,
2512
],
[
4817,
4828
]
],
[
[
4048,
4056
],
[
4353,
4361
]
],
[
[
4058,
4063
],
[
4140,
4145
]
],
[
[
4082,
4091
]
],
[
[
4093,
4099
],
[
4190,
4196
]
],
[
[
4121,
4126
],
[
4244,
4249
],
[
4517,
4522
],
[
4829,
4834
],
[
5163,
5168
],
[
5246,
5251
],
[
6576,
6581
],
[
1942,
1947
],
[
2364,
2369
]
],
[
[
4170,
4176
],
[
4285,
4291
],
[
5377,
5383
],
[
5421,
5427
],
[
5517,
5523
],
[
5803,
5809
],
[
5811,
5817
],
[
6432,
6438
]
],
[
[
4488,
4492
],
[
4639,
4643
]
],
[
[
4716,
4720
],
[
4836,
4840
]
],
[
[
4870,
4882
],
[
4908,
4920
],
[
4958,
4970
]
],
[
[
4902,
4905
],
[
4987,
4990
]
],
[
[
4942,
4955
],
[
4923,
4936
]
],
[
[
4976,
4979
],
[
5020,
5023
]
],
[
[
4997,
5001
]
],
[
[
5262,
5266
],
[
5433,
5437
]
],
[
[
5279,
5283
],
[
5326,
5330
],
[
5365,
5369
]
],
[
[
5360,
5362
],
[
5429,
5431
]
],
[
[
5396,
5400
]
],
[
[
5498,
5501
],
[
5745,
5748
]
],
[
[
5549,
5563
],
[
5610,
5624
],
[
5677,
5691
]
],
[
[
5574,
5585
],
[
5750,
5761
]
],
[
[
5648,
5659
],
[
5763,
5774
]
],
[
[
5720,
5730
],
[
5824,
5834
]
],
[
[
5781,
5784
],
[
5907,
5910
]
],
[
[
5883,
5887
]
],
[
[
5900,
5904
],
[
5937,
5941
],
[
6187,
6191
],
[
6252,
6256
],
[
6423,
6427
],
[
6567,
6571
]
],
[
[
5916,
5921
],
[
5998,
6003
]
],
[
[
5968,
5972
]
],
[
[
5974,
5981
],
[
6077,
6084
]
],
[
[
6039,
6047
],
[
6149,
6157
],
[
6194,
6202
],
[
6271,
6279
]
],
[
[
6049,
6057
]
],
[
[
6134,
6135
],
[
6203,
6204
],
[
6280,
6281
]
],
[
[
6401,
6404
]
],
[
[
6544,
6548
],
[
6614,
6618
]
],
[
[
6661,
6664
],
[
6690,
6693
]
]
] |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: ClientMessageDtos.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='ClientMessageDtos.proto',
package='EventStore.Client.Messages',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x17\x43lientMessageDtos.proto\x12\x1a\x45ventStore.Client.Messages\"\x8a\x01\n\x08NewEvent\x12\x10\n\x08\x65vent_id\x18\x01 \x02(\x0c\x12\x12\n\nevent_type\x18\x02 \x02(\t\x12\x19\n\x11\x64\x61ta_content_type\x18\x03 \x02(\x05\x12\x1d\n\x15metadata_content_type\x18\x04 \x02(\x05\x12\x0c\n\x04\x64\x61ta\x18\x05 \x02(\x0c\x12\x10\n\x08metadata\x18\x06 \x01(\x0c\"\xe4\x01\n\x0b\x45ventRecord\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x14\n\x0c\x65vent_number\x18\x02 \x02(\x03\x12\x10\n\x08\x65vent_id\x18\x03 \x02(\x0c\x12\x12\n\nevent_type\x18\x04 \x02(\t\x12\x19\n\x11\x64\x61ta_content_type\x18\x05 \x02(\x05\x12\x1d\n\x15metadata_content_type\x18\x06 \x02(\x05\x12\x0c\n\x04\x64\x61ta\x18\x07 \x02(\x0c\x12\x10\n\x08metadata\x18\x08 \x01(\x0c\x12\x0f\n\x07\x63reated\x18\t \x01(\x03\x12\x15\n\rcreated_epoch\x18\n \x01(\x03\"\x85\x01\n\x14ResolvedIndexedEvent\x12\x36\n\x05\x65vent\x18\x01 \x02(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x35\n\x04link\x18\x02 \x01(\x0b\x32\'.EventStore.Client.Messages.EventRecord\"\xb1\x01\n\rResolvedEvent\x12\x36\n\x05\x65vent\x18\x01 \x02(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x35\n\x04link\x18\x02 \x01(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x17\n\x0f\x63ommit_position\x18\x03 \x02(\x03\x12\x18\n\x10prepare_position\x18\x04 \x02(\x03\"\x8e\x01\n\x0bWriteEvents\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x34\n\x06\x65vents\x18\x03 \x03(\x0b\x32$.EventStore.Client.Messages.NewEvent\x12\x16\n\x0erequire_leader\x18\x04 \x02(\x08\"\xe7\x01\n\x14WriteEventsCompleted\x12;\n\x06result\x18\x01 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x1a\n\x12\x66irst_event_number\x18\x03 \x02(\x03\x12\x19\n\x11last_event_number\x18\x04 \x02(\x03\x12\x18\n\x10prepare_position\x18\x05 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x06 \x01(\x03\x12\x17\n\x0f\x63urrent_version\x18\x07 \x01(\x03\"n\n\x0c\x44\x65leteStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\x12\x13\n\x0bhard_delete\x18\x04 \x01(\x08\"\x98\x01\n\x15\x44\x65leteStreamCompleted\x12;\n\x06result\x18\x01 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x18\n\x10prepare_position\x18\x03 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x04 \x01(\x03\"]\n\x10TransactionStart\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\"\x81\x01\n\x19TransactionStartCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\"x\n\x10TransactionWrite\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12\x34\n\x06\x65vents\x18\x02 \x03(\x0b\x32$.EventStore.Client.Messages.NewEvent\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\"\x81\x01\n\x19TransactionWriteCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\"C\n\x11TransactionCommit\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x02 \x02(\x08\"\xec\x01\n\x1aTransactionCommitCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x1a\n\x12\x66irst_event_number\x18\x04 \x02(\x03\x12\x19\n\x11last_event_number\x18\x05 \x02(\x03\x12\x18\n\x10prepare_position\x18\x06 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x07 \x01(\x03\"l\n\tReadEvent\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x14\n\x0c\x65vent_number\x18\x02 \x02(\x03\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x04 \x02(\x08\"\xa0\x02\n\x12ReadEventCompleted\x12N\n\x06result\x18\x01 \x02(\x0e\x32>.EventStore.Client.Messages.ReadEventCompleted.ReadEventResult\x12?\n\x05\x65vent\x18\x02 \x02(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12\r\n\x05\x65rror\x18\x03 \x01(\t\"j\n\x0fReadEventResult\x12\x0b\n\x07Success\x10\x00\x12\x0c\n\x08NotFound\x10\x01\x12\x0c\n\x08NoStream\x10\x02\x12\x11\n\rStreamDeleted\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x05\"\x8b\x01\n\x10ReadStreamEvents\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x19\n\x11\x66rom_event_number\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x18\n\x10resolve_link_tos\x18\x04 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x05 \x02(\x08\"\xa2\x03\n\x19ReadStreamEventsCompleted\x12@\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12V\n\x06result\x18\x02 \x02(\x0e\x32\x46.EventStore.Client.Messages.ReadStreamEventsCompleted.ReadStreamResult\x12\x19\n\x11next_event_number\x18\x03 \x02(\x03\x12\x19\n\x11last_event_number\x18\x04 \x02(\x03\x12\x18\n\x10is_end_of_stream\x18\x05 \x02(\x08\x12\x1c\n\x14last_commit_position\x18\x06 \x02(\x03\x12\r\n\x05\x65rror\x18\x07 \x01(\t\"n\n\x10ReadStreamResult\x12\x0b\n\x07Success\x10\x00\x12\x0c\n\x08NoStream\x10\x01\x12\x11\n\rStreamDeleted\x10\x02\x12\x0f\n\x0bNotModified\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x05\"\x87\x01\n\rReadAllEvents\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x18\n\x10resolve_link_tos\x18\x04 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x05 \x02(\x08\"\xf9\x02\n\x16ReadAllEventsCompleted\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x39\n\x06\x65vents\x18\x03 \x03(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\x12\x1c\n\x14next_commit_position\x18\x04 \x02(\x03\x12\x1d\n\x15next_prepare_position\x18\x05 \x02(\x03\x12Y\n\x06result\x18\x06 \x01(\x0e\x32@.EventStore.Client.Messages.ReadAllEventsCompleted.ReadAllResult:\x07Success\x12\r\n\x05\x65rror\x18\x07 \x01(\t\"J\n\rReadAllResult\x12\x0b\n\x07Success\x10\x00\x12\x0f\n\x0bNotModified\x10\x01\x12\t\n\x05\x45rror\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\xe9\x01\n\x06\x46ilter\x12\x41\n\x07\x63ontext\x18\x01 \x02(\x0e\x32\x30.EventStore.Client.Messages.Filter.FilterContext\x12;\n\x04type\x18\x02 \x02(\x0e\x32-.EventStore.Client.Messages.Filter.FilterType\x12\x0c\n\x04\x64\x61ta\x18\x03 \x03(\t\",\n\rFilterContext\x12\x0c\n\x08StreamId\x10\x00\x12\r\n\tEventType\x10\x01\"#\n\nFilterType\x12\t\n\x05Regex\x10\x00\x12\n\n\x06Prefix\x10\x01\"\xde\x01\n\x15\x46ilteredReadAllEvents\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x19\n\x11max_search_window\x18\x04 \x01(\x05\x12\x18\n\x10resolve_link_tos\x18\x05 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x06 \x02(\x08\x12\x32\n\x06\x66ilter\x18\x07 \x02(\x0b\x32\".EventStore.Client.Messages.Filter\"\xb3\x03\n\x1e\x46ilteredReadAllEventsCompleted\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x39\n\x06\x65vents\x18\x03 \x03(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\x12\x1c\n\x14next_commit_position\x18\x04 \x02(\x03\x12\x1d\n\x15next_prepare_position\x18\x05 \x02(\x03\x12\x18\n\x10is_end_of_stream\x18\x06 \x02(\x08\x12i\n\x06result\x18\x07 \x01(\x0e\x32P.EventStore.Client.Messages.FilteredReadAllEventsCompleted.FilteredReadAllResult:\x07Success\x12\r\n\x05\x65rror\x18\x08 \x01(\t\"R\n\x15\x46ilteredReadAllResult\x12\x0b\n\x07Success\x10\x00\x12\x0f\n\x0bNotModified\x10\x01\x12\t\n\x05\x45rror\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\xde\x03\n\x1c\x43reatePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x12\n\nstart_from\x18\x04 \x02(\x03\x12$\n\x1cmessage_timeout_milliseconds\x18\x05 \x02(\x05\x12\x19\n\x11record_statistics\x18\x06 \x02(\x08\x12\x18\n\x10live_buffer_size\x18\x07 \x02(\x05\x12\x17\n\x0fread_batch_size\x18\x08 \x02(\x05\x12\x13\n\x0b\x62uffer_size\x18\t \x02(\x05\x12\x17\n\x0fmax_retry_count\x18\n \x02(\x05\x12\x1a\n\x12prefer_round_robin\x18\x0b \x02(\x08\x12\x1d\n\x15\x63heckpoint_after_time\x18\x0c \x02(\x05\x12\x1c\n\x14\x63heckpoint_max_count\x18\r \x02(\x05\x12\x1c\n\x14\x63heckpoint_min_count\x18\x0e \x02(\x05\x12\x1c\n\x14subscriber_max_count\x18\x0f \x02(\x05\x12\x1f\n\x17named_consumer_strategy\x18\x10 \x01(\t\"X\n\x1c\x44\x65letePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\"\xde\x03\n\x1cUpdatePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x12\n\nstart_from\x18\x04 \x02(\x03\x12$\n\x1cmessage_timeout_milliseconds\x18\x05 \x02(\x05\x12\x19\n\x11record_statistics\x18\x06 \x02(\x08\x12\x18\n\x10live_buffer_size\x18\x07 \x02(\x05\x12\x17\n\x0fread_batch_size\x18\x08 \x02(\x05\x12\x13\n\x0b\x62uffer_size\x18\t \x02(\x05\x12\x17\n\x0fmax_retry_count\x18\n \x02(\x05\x12\x1a\n\x12prefer_round_robin\x18\x0b \x02(\x08\x12\x1d\n\x15\x63heckpoint_after_time\x18\x0c \x02(\x05\x12\x1c\n\x14\x63heckpoint_max_count\x18\r \x02(\x05\x12\x1c\n\x14\x63heckpoint_min_count\x18\x0e \x02(\x05\x12\x1c\n\x14subscriber_max_count\x18\x0f \x02(\x05\x12\x1f\n\x17named_consumer_strategy\x18\x10 \x01(\t\"\x97\x02\n%UpdatePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.UpdatePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"_\n\"UpdatePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x10\n\x0c\x44oesNotExist\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\x98\x02\n%CreatePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.CreatePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"`\n\"CreatePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x11\n\rAlreadyExists\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\x97\x02\n%DeletePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.DeletePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"_\n\"DeletePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x10\n\x0c\x44oesNotExist\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"w\n\x1f\x43onnectToPersistentSubscription\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\"\n\x1a\x61llowed_in_flight_messages\x18\x03 \x02(\x05\"W\n\x1fPersistentSubscriptionAckEvents\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x1b\n\x13processed_event_ids\x18\x02 \x03(\x0c\"\x8b\x02\n\x1fPersistentSubscriptionNakEvents\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x1b\n\x13processed_event_ids\x18\x02 \x03(\x0c\x12\x0f\n\x07message\x18\x03 \x01(\t\x12^\n\x06\x61\x63tion\x18\x04 \x02(\x0e\x32\x45.EventStore.Client.Messages.PersistentSubscriptionNakEvents.NakAction:\x07Unknown\"A\n\tNakAction\x12\x0b\n\x07Unknown\x10\x00\x12\x08\n\x04Park\x10\x01\x12\t\n\x05Retry\x10\x02\x12\x08\n\x04Skip\x10\x03\x12\x08\n\x04Stop\x10\x04\"v\n\"PersistentSubscriptionConfirmation\x12\x1c\n\x14last_commit_position\x18\x01 \x02(\x03\x12\x17\n\x0fsubscription_id\x18\x02 \x02(\t\x12\x19\n\x11last_event_number\x18\x03 \x01(\x03\"\x80\x01\n)PersistentSubscriptionStreamEventAppeared\x12?\n\x05\x65vent\x18\x01 \x02(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12\x12\n\nretryCount\x18\x02 \x01(\x05\"F\n\x11SubscribeToStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x02 \x02(\x08\"\x9f\x01\n\x19\x46ilteredSubscribeToStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x02 \x02(\x08\x12\x32\n\x06\x66ilter\x18\x03 \x02(\x0b\x32\".EventStore.Client.Messages.Filter\x12\x1b\n\x13\x63heckpoint_interval\x18\x04 \x02(\x05\"F\n\x11\x43heckpointReached\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\"S\n\x18SubscriptionConfirmation\x12\x1c\n\x14last_commit_position\x18\x01 \x02(\x03\x12\x19\n\x11last_event_number\x18\x02 \x01(\x03\"O\n\x13StreamEventAppeared\x12\x38\n\x05\x65vent\x18\x01 \x02(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\"\x17\n\x15UnsubscribeFromStream\"\x8a\x02\n\x13SubscriptionDropped\x12\x64\n\x06reason\x18\x01 \x01(\x0e\x32\x46.EventStore.Client.Messages.SubscriptionDropped.SubscriptionDropReason:\x0cUnsubscribed\"\x8c\x01\n\x16SubscriptionDropReason\x12\x10\n\x0cUnsubscribed\x10\x00\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x01\x12\x0c\n\x08NotFound\x10\x02\x12!\n\x1dPersistentSubscriptionDeleted\x10\x03\x12\x1d\n\x19SubscriberMaxCountReached\x10\x04\"\xf4\x02\n\nNotHandled\x12G\n\x06reason\x18\x01 \x02(\x0e\x32\x37.EventStore.Client.Messages.NotHandled.NotHandledReason\x12\x17\n\x0f\x61\x64\x64itional_info\x18\x02 \x01(\x0c\x1a\xb5\x01\n\nLeaderInfo\x12\x1c\n\x14\x65xternal_tcp_address\x18\x01 \x02(\t\x12\x19\n\x11\x65xternal_tcp_port\x18\x02 \x02(\x05\x12\x14\n\x0chttp_address\x18\x03 \x02(\t\x12\x11\n\thttp_port\x18\x04 \x02(\x05\x12#\n\x1b\x65xternal_secure_tcp_address\x18\x05 \x01(\t\x12 \n\x18\x65xternal_secure_tcp_port\x18\x06 \x01(\x05\"L\n\x10NotHandledReason\x12\x0c\n\x08NotReady\x10\x00\x12\x0b\n\x07TooBusy\x10\x01\x12\r\n\tNotLeader\x10\x02\x12\x0e\n\nIsReadOnly\x10\x03\"\x12\n\x10ScavengeDatabase\"\xc4\x01\n\x18ScavengeDatabaseResponse\x12S\n\x06result\x18\x01 \x02(\x0e\x32\x43.EventStore.Client.Messages.ScavengeDatabaseResponse.ScavengeResult\x12\x12\n\nscavengeId\x18\x02 \x01(\t\"?\n\x0eScavengeResult\x12\x0b\n\x07Started\x10\x00\x12\x0e\n\nInProgress\x10\x01\x12\x10\n\x0cUnauthorized\x10\x02\":\n\x0eIdentifyClient\x12\x0f\n\x07version\x18\x01 \x02(\x05\x12\x17\n\x0f\x63onnection_name\x18\x02 \x01(\t\"\x12\n\x10\x43lientIdentified*\xb0\x01\n\x0fOperationResult\x12\x0b\n\x07Success\x10\x00\x12\x12\n\x0ePrepareTimeout\x10\x01\x12\x11\n\rCommitTimeout\x10\x02\x12\x12\n\x0e\x46orwardTimeout\x10\x03\x12\x18\n\x14WrongExpectedVersion\x10\x04\x12\x11\n\rStreamDeleted\x10\x05\x12\x16\n\x12InvalidTransaction\x10\x06\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x07'
)
_OPERATIONRESULT = _descriptor.EnumDescriptor(
name='OperationResult',
full_name='EventStore.Client.Messages.OperationResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PrepareTimeout', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CommitTimeout', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ForwardTimeout', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WrongExpectedVersion', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='StreamDeleted', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='InvalidTransaction', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=8630,
serialized_end=8806,
)
_sym_db.RegisterEnumDescriptor(_OPERATIONRESULT)
OperationResult = enum_type_wrapper.EnumTypeWrapper(_OPERATIONRESULT)
Success = 0
PrepareTimeout = 1
CommitTimeout = 2
ForwardTimeout = 3
WrongExpectedVersion = 4
StreamDeleted = 5
InvalidTransaction = 6
AccessDenied = 7
_READEVENTCOMPLETED_READEVENTRESULT = _descriptor.EnumDescriptor(
name='ReadEventResult',
full_name='EventStore.Client.Messages.ReadEventCompleted.ReadEventResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NotFound', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NoStream', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='StreamDeleted', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Error', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2471,
serialized_end=2577,
)
_sym_db.RegisterEnumDescriptor(_READEVENTCOMPLETED_READEVENTRESULT)
_READSTREAMEVENTSCOMPLETED_READSTREAMRESULT = _descriptor.EnumDescriptor(
name='ReadStreamResult',
full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.ReadStreamResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NoStream', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='StreamDeleted', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NotModified', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Error', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3030,
serialized_end=3140,
)
_sym_db.RegisterEnumDescriptor(_READSTREAMEVENTSCOMPLETED_READSTREAMRESULT)
_READALLEVENTSCOMPLETED_READALLRESULT = _descriptor.EnumDescriptor(
name='ReadAllResult',
full_name='EventStore.Client.Messages.ReadAllEventsCompleted.ReadAllResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NotModified', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Error', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3584,
serialized_end=3658,
)
_sym_db.RegisterEnumDescriptor(_READALLEVENTSCOMPLETED_READALLRESULT)
_FILTER_FILTERCONTEXT = _descriptor.EnumDescriptor(
name='FilterContext',
full_name='EventStore.Client.Messages.Filter.FilterContext',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='StreamId', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EventType', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3813,
serialized_end=3857,
)
_sym_db.RegisterEnumDescriptor(_FILTER_FILTERCONTEXT)
_FILTER_FILTERTYPE = _descriptor.EnumDescriptor(
name='FilterType',
full_name='EventStore.Client.Messages.Filter.FilterType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Regex', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Prefix', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3859,
serialized_end=3894,
)
_sym_db.RegisterEnumDescriptor(_FILTER_FILTERTYPE)
_FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT = _descriptor.EnumDescriptor(
name='FilteredReadAllResult',
full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.FilteredReadAllResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NotModified', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Error', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4475,
serialized_end=4557,
)
_sym_db.RegisterEnumDescriptor(_FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT)
_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor(
name='UpdatePersistentSubscriptionResult',
full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.UpdatePersistentSubscriptionResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DoesNotExist', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Fail', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5796,
serialized_end=5891,
)
_sym_db.RegisterEnumDescriptor(_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT)
_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor(
name='CreatePersistentSubscriptionResult',
full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.CreatePersistentSubscriptionResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AlreadyExists', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Fail', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6078,
serialized_end=6174,
)
_sym_db.RegisterEnumDescriptor(_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT)
_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor(
name='DeletePersistentSubscriptionResult',
full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.DeletePersistentSubscriptionResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Success', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DoesNotExist', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Fail', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6361,
serialized_end=6456,
)
_sym_db.RegisterEnumDescriptor(_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT)
_PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION = _descriptor.EnumDescriptor(
name='NakAction',
full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.NakAction',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Unknown', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Park', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Retry', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Skip', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Stop', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6871,
serialized_end=6936,
)
_sym_db.RegisterEnumDescriptor(_PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION)
_SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON = _descriptor.EnumDescriptor(
name='SubscriptionDropReason',
full_name='EventStore.Client.Messages.SubscriptionDropped.SubscriptionDropReason',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Unsubscribed', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AccessDenied', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NotFound', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PersistentSubscriptionDeleted', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SubscriberMaxCountReached', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7813,
serialized_end=7953,
)
_sym_db.RegisterEnumDescriptor(_SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON)
_NOTHANDLED_NOTHANDLEDREASON = _descriptor.EnumDescriptor(
name='NotHandledReason',
full_name='EventStore.Client.Messages.NotHandled.NotHandledReason',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NotReady', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TooBusy', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NotLeader', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IsReadOnly', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=8252,
serialized_end=8328,
)
_sym_db.RegisterEnumDescriptor(_NOTHANDLED_NOTHANDLEDREASON)
_SCAVENGEDATABASERESPONSE_SCAVENGERESULT = _descriptor.EnumDescriptor(
name='ScavengeResult',
full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.ScavengeResult',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Started', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='InProgress', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Unauthorized', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=8484,
serialized_end=8547,
)
_sym_db.RegisterEnumDescriptor(_SCAVENGEDATABASERESPONSE_SCAVENGERESULT)
_NEWEVENT = _descriptor.Descriptor(
name='NewEvent',
full_name='EventStore.Client.Messages.NewEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_id', full_name='EventStore.Client.Messages.NewEvent.event_id', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_type', full_name='EventStore.Client.Messages.NewEvent.event_type', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data_content_type', full_name='EventStore.Client.Messages.NewEvent.data_content_type', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata_content_type', full_name='EventStore.Client.Messages.NewEvent.metadata_content_type', index=3,
number=4, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='EventStore.Client.Messages.NewEvent.data', index=4,
number=5, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='EventStore.Client.Messages.NewEvent.metadata', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=56,
serialized_end=194,
)
_EVENTRECORD = _descriptor.Descriptor(
name='EventRecord',
full_name='EventStore.Client.Messages.EventRecord',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.EventRecord.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_number', full_name='EventStore.Client.Messages.EventRecord.event_number', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_id', full_name='EventStore.Client.Messages.EventRecord.event_id', index=2,
number=3, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_type', full_name='EventStore.Client.Messages.EventRecord.event_type', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data_content_type', full_name='EventStore.Client.Messages.EventRecord.data_content_type', index=4,
number=5, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata_content_type', full_name='EventStore.Client.Messages.EventRecord.metadata_content_type', index=5,
number=6, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='EventStore.Client.Messages.EventRecord.data', index=6,
number=7, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='EventStore.Client.Messages.EventRecord.metadata', index=7,
number=8, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created', full_name='EventStore.Client.Messages.EventRecord.created', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='created_epoch', full_name='EventStore.Client.Messages.EventRecord.created_epoch', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=197,
serialized_end=425,
)
_RESOLVEDINDEXEDEVENT = _descriptor.Descriptor(
name='ResolvedIndexedEvent',
full_name='EventStore.Client.Messages.ResolvedIndexedEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='EventStore.Client.Messages.ResolvedIndexedEvent.event', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='link', full_name='EventStore.Client.Messages.ResolvedIndexedEvent.link', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=428,
serialized_end=561,
)
_RESOLVEDEVENT = _descriptor.Descriptor(
name='ResolvedEvent',
full_name='EventStore.Client.Messages.ResolvedEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='EventStore.Client.Messages.ResolvedEvent.event', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='link', full_name='EventStore.Client.Messages.ResolvedEvent.link', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.ResolvedEvent.commit_position', index=2,
number=3, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.ResolvedEvent.prepare_position', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=564,
serialized_end=741,
)
_WRITEEVENTS = _descriptor.Descriptor(
name='WriteEvents',
full_name='EventStore.Client.Messages.WriteEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.WriteEvents.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expected_version', full_name='EventStore.Client.Messages.WriteEvents.expected_version', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='EventStore.Client.Messages.WriteEvents.events', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.WriteEvents.require_leader', index=3,
number=4, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=744,
serialized_end=886,
)
_WRITEEVENTSCOMPLETED = _descriptor.Descriptor(
name='WriteEventsCompleted',
full_name='EventStore.Client.Messages.WriteEventsCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.WriteEventsCompleted.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='EventStore.Client.Messages.WriteEventsCompleted.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='first_event_number', full_name='EventStore.Client.Messages.WriteEventsCompleted.first_event_number', index=2,
number=3, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_event_number', full_name='EventStore.Client.Messages.WriteEventsCompleted.last_event_number', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.WriteEventsCompleted.prepare_position', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.WriteEventsCompleted.commit_position', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='current_version', full_name='EventStore.Client.Messages.WriteEventsCompleted.current_version', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=889,
serialized_end=1120,
)
_DELETESTREAM = _descriptor.Descriptor(
name='DeleteStream',
full_name='EventStore.Client.Messages.DeleteStream',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.DeleteStream.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expected_version', full_name='EventStore.Client.Messages.DeleteStream.expected_version', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.DeleteStream.require_leader', index=2,
number=3, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hard_delete', full_name='EventStore.Client.Messages.DeleteStream.hard_delete', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1122,
serialized_end=1232,
)
_DELETESTREAMCOMPLETED = _descriptor.Descriptor(
name='DeleteStreamCompleted',
full_name='EventStore.Client.Messages.DeleteStreamCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.DeleteStreamCompleted.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='EventStore.Client.Messages.DeleteStreamCompleted.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.DeleteStreamCompleted.prepare_position', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.DeleteStreamCompleted.commit_position', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1235,
serialized_end=1387,
)
_TRANSACTIONSTART = _descriptor.Descriptor(
name='TransactionStart',
full_name='EventStore.Client.Messages.TransactionStart',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.TransactionStart.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expected_version', full_name='EventStore.Client.Messages.TransactionStart.expected_version', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.TransactionStart.require_leader', index=2,
number=3, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1389,
serialized_end=1482,
)
_TRANSACTIONSTARTCOMPLETED = _descriptor.Descriptor(
name='TransactionStartCompleted',
full_name='EventStore.Client.Messages.TransactionStartCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='EventStore.Client.Messages.TransactionStartCompleted.transaction_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.TransactionStartCompleted.result', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='EventStore.Client.Messages.TransactionStartCompleted.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1485,
serialized_end=1614,
)
_TRANSACTIONWRITE = _descriptor.Descriptor(
name='TransactionWrite',
full_name='EventStore.Client.Messages.TransactionWrite',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='EventStore.Client.Messages.TransactionWrite.transaction_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='EventStore.Client.Messages.TransactionWrite.events', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.TransactionWrite.require_leader', index=2,
number=3, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1616,
serialized_end=1736,
)
_TRANSACTIONWRITECOMPLETED = _descriptor.Descriptor(
name='TransactionWriteCompleted',
full_name='EventStore.Client.Messages.TransactionWriteCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='EventStore.Client.Messages.TransactionWriteCompleted.transaction_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.TransactionWriteCompleted.result', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='EventStore.Client.Messages.TransactionWriteCompleted.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1739,
serialized_end=1868,
)
_TRANSACTIONCOMMIT = _descriptor.Descriptor(
name='TransactionCommit',
full_name='EventStore.Client.Messages.TransactionCommit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='EventStore.Client.Messages.TransactionCommit.transaction_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.TransactionCommit.require_leader', index=1,
number=2, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1870,
serialized_end=1937,
)
_TRANSACTIONCOMMITCOMPLETED = _descriptor.Descriptor(
name='TransactionCommitCompleted',
full_name='EventStore.Client.Messages.TransactionCommitCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='transaction_id', full_name='EventStore.Client.Messages.TransactionCommitCompleted.transaction_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.TransactionCommitCompleted.result', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='EventStore.Client.Messages.TransactionCommitCompleted.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='first_event_number', full_name='EventStore.Client.Messages.TransactionCommitCompleted.first_event_number', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_event_number', full_name='EventStore.Client.Messages.TransactionCommitCompleted.last_event_number', index=4,
number=5, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.TransactionCommitCompleted.prepare_position', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.TransactionCommitCompleted.commit_position', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1940,
serialized_end=2176,
)
_READEVENT = _descriptor.Descriptor(
name='ReadEvent',
full_name='EventStore.Client.Messages.ReadEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.ReadEvent.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_number', full_name='EventStore.Client.Messages.ReadEvent.event_number', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadEvent.resolve_link_tos', index=2,
number=3, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.ReadEvent.require_leader', index=3,
number=4, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2178,
serialized_end=2286,
)
_READEVENTCOMPLETED = _descriptor.Descriptor(
name='ReadEventCompleted',
full_name='EventStore.Client.Messages.ReadEventCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.ReadEventCompleted.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event', full_name='EventStore.Client.Messages.ReadEventCompleted.event', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error', full_name='EventStore.Client.Messages.ReadEventCompleted.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_READEVENTCOMPLETED_READEVENTRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2289,
serialized_end=2577,
)
_READSTREAMEVENTS = _descriptor.Descriptor(
name='ReadStreamEvents',
full_name='EventStore.Client.Messages.ReadStreamEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.ReadStreamEvents.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='from_event_number', full_name='EventStore.Client.Messages.ReadStreamEvents.from_event_number', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_count', full_name='EventStore.Client.Messages.ReadStreamEvents.max_count', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadStreamEvents.resolve_link_tos', index=3,
number=4, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.ReadStreamEvents.require_leader', index=4,
number=5, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2580,
serialized_end=2719,
)
_READSTREAMEVENTSCOMPLETED = _descriptor.Descriptor(
name='ReadStreamEventsCompleted',
full_name='EventStore.Client.Messages.ReadStreamEventsCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='events', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.events', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.result', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_event_number', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.next_event_number', index=2,
number=3, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_event_number', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.last_event_number', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_end_of_stream', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.is_end_of_stream', index=4,
number=5, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_commit_position', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.last_commit_position', index=5,
number=6, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.error', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_READSTREAMEVENTSCOMPLETED_READSTREAMRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2722,
serialized_end=3140,
)
_READALLEVENTS = _descriptor.Descriptor(
name='ReadAllEvents',
full_name='EventStore.Client.Messages.ReadAllEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.ReadAllEvents.commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.ReadAllEvents.prepare_position', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_count', full_name='EventStore.Client.Messages.ReadAllEvents.max_count', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadAllEvents.resolve_link_tos', index=3,
number=4, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.ReadAllEvents.require_leader', index=4,
number=5, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3143,
serialized_end=3278,
)
_READALLEVENTSCOMPLETED = _descriptor.Descriptor(
name='ReadAllEventsCompleted',
full_name='EventStore.Client.Messages.ReadAllEventsCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.prepare_position', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.events', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_commit_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.next_commit_position', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_prepare_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.next_prepare_position', index=4,
number=5, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.result', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.error', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_READALLEVENTSCOMPLETED_READALLRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3281,
serialized_end=3658,
)
_FILTER = _descriptor.Descriptor(
name='Filter',
full_name='EventStore.Client.Messages.Filter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='context', full_name='EventStore.Client.Messages.Filter.context', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='EventStore.Client.Messages.Filter.type', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='EventStore.Client.Messages.Filter.data', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_FILTER_FILTERCONTEXT,
_FILTER_FILTERTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3661,
serialized_end=3894,
)
_FILTEREDREADALLEVENTS = _descriptor.Descriptor(
name='FilteredReadAllEvents',
full_name='EventStore.Client.Messages.FilteredReadAllEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEvents.commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEvents.prepare_position', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_count', full_name='EventStore.Client.Messages.FilteredReadAllEvents.max_count', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_search_window', full_name='EventStore.Client.Messages.FilteredReadAllEvents.max_search_window', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.FilteredReadAllEvents.resolve_link_tos', index=4,
number=5, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_leader', full_name='EventStore.Client.Messages.FilteredReadAllEvents.require_leader', index=5,
number=6, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filter', full_name='EventStore.Client.Messages.FilteredReadAllEvents.filter', index=6,
number=7, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=3897,
serialized_end=4119,
)
_FILTEREDREADALLEVENTSCOMPLETED = _descriptor.Descriptor(
name='FilteredReadAllEventsCompleted',
full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.prepare_position', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.events', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.next_commit_position', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.next_prepare_position', index=4,
number=5, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_end_of_stream', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.is_end_of_stream', index=5,
number=6, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.result', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.error', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4122,
serialized_end=4557,
)
_CREATEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor(
name='CreatePersistentSubscription',
full_name='EventStore.Client.Messages.CreatePersistentSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subscription_group_name', full_name='EventStore.Client.Messages.CreatePersistentSubscription.subscription_group_name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.CreatePersistentSubscription.event_stream_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.CreatePersistentSubscription.resolve_link_tos', index=2,
number=3, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_from', full_name='EventStore.Client.Messages.CreatePersistentSubscription.start_from', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message_timeout_milliseconds', full_name='EventStore.Client.Messages.CreatePersistentSubscription.message_timeout_milliseconds', index=4,
number=5, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='record_statistics', full_name='EventStore.Client.Messages.CreatePersistentSubscription.record_statistics', index=5,
number=6, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='live_buffer_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.live_buffer_size', index=6,
number=7, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='read_batch_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.read_batch_size', index=7,
number=8, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='buffer_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.buffer_size', index=8,
number=9, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_retry_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.max_retry_count', index=9,
number=10, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prefer_round_robin', full_name='EventStore.Client.Messages.CreatePersistentSubscription.prefer_round_robin', index=10,
number=11, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_after_time', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_after_time', index=11,
number=12, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_max_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_max_count', index=12,
number=13, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_min_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_min_count', index=13,
number=14, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subscriber_max_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.subscriber_max_count', index=14,
number=15, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='named_consumer_strategy', full_name='EventStore.Client.Messages.CreatePersistentSubscription.named_consumer_strategy', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=4560,
serialized_end=5038,
)
_DELETEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor(
name='DeletePersistentSubscription',
full_name='EventStore.Client.Messages.DeletePersistentSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subscription_group_name', full_name='EventStore.Client.Messages.DeletePersistentSubscription.subscription_group_name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.DeletePersistentSubscription.event_stream_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5040,
serialized_end=5128,
)
_UPDATEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor(
name='UpdatePersistentSubscription',
full_name='EventStore.Client.Messages.UpdatePersistentSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subscription_group_name', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.subscription_group_name', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.event_stream_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.resolve_link_tos', index=2,
number=3, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_from', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.start_from', index=3,
number=4, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message_timeout_milliseconds', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.message_timeout_milliseconds', index=4,
number=5, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='record_statistics', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.record_statistics', index=5,
number=6, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='live_buffer_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.live_buffer_size', index=6,
number=7, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='read_batch_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.read_batch_size', index=7,
number=8, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='buffer_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.buffer_size', index=8,
number=9, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_retry_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.max_retry_count', index=9,
number=10, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prefer_round_robin', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.prefer_round_robin', index=10,
number=11, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_after_time', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_after_time', index=11,
number=12, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_max_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_max_count', index=12,
number=13, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_min_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_min_count', index=13,
number=14, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subscriber_max_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.subscriber_max_count', index=14,
number=15, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='named_consumer_strategy', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.named_consumer_strategy', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5131,
serialized_end=5609,
)
_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor(
name='UpdatePersistentSubscriptionCompleted',
full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reason', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.reason', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5612,
serialized_end=5891,
)
_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor(
name='CreatePersistentSubscriptionCompleted',
full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reason', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.reason', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=5894,
serialized_end=6174,
)
_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor(
name='DeletePersistentSubscriptionCompleted',
full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reason', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.reason', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6177,
serialized_end=6456,
)
_CONNECTTOPERSISTENTSUBSCRIPTION = _descriptor.Descriptor(
name='ConnectToPersistentSubscription',
full_name='EventStore.Client.Messages.ConnectToPersistentSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subscription_id', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.subscription_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.event_stream_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='allowed_in_flight_messages', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.allowed_in_flight_messages', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6458,
serialized_end=6577,
)
_PERSISTENTSUBSCRIPTIONACKEVENTS = _descriptor.Descriptor(
name='PersistentSubscriptionAckEvents',
full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents.subscription_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='processed_event_ids', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents.processed_event_ids', index=1,
number=2, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6579,
serialized_end=6666,
)
_PERSISTENTSUBSCRIPTIONNAKEVENTS = _descriptor.Descriptor(
name='PersistentSubscriptionNakEvents',
full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.subscription_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='processed_event_ids', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.processed_event_ids', index=1,
number=2, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.action', index=3,
number=4, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6669,
serialized_end=6936,
)
_PERSISTENTSUBSCRIPTIONCONFIRMATION = _descriptor.Descriptor(
name='PersistentSubscriptionConfirmation',
full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='last_commit_position', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.last_commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.subscription_id', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_event_number', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.last_event_number', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=6938,
serialized_end=7056,
)
_PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED = _descriptor.Descriptor(
name='PersistentSubscriptionStreamEventAppeared',
full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared.event', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='retryCount', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared.retryCount', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7059,
serialized_end=7187,
)
_SUBSCRIBETOSTREAM = _descriptor.Descriptor(
name='SubscribeToStream',
full_name='EventStore.Client.Messages.SubscribeToStream',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.SubscribeToStream.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.SubscribeToStream.resolve_link_tos', index=1,
number=2, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7189,
serialized_end=7259,
)
_FILTEREDSUBSCRIBETOSTREAM = _descriptor.Descriptor(
name='FilteredSubscribeToStream',
full_name='EventStore.Client.Messages.FilteredSubscribeToStream',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_stream_id', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.event_stream_id', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resolve_link_tos', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.resolve_link_tos', index=1,
number=2, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filter', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.filter', index=2,
number=3, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='checkpoint_interval', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.checkpoint_interval', index=3,
number=4, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7262,
serialized_end=7421,
)
_CHECKPOINTREACHED = _descriptor.Descriptor(
name='CheckpointReached',
full_name='EventStore.Client.Messages.CheckpointReached',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='commit_position', full_name='EventStore.Client.Messages.CheckpointReached.commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='prepare_position', full_name='EventStore.Client.Messages.CheckpointReached.prepare_position', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7423,
serialized_end=7493,
)
_SUBSCRIPTIONCONFIRMATION = _descriptor.Descriptor(
name='SubscriptionConfirmation',
full_name='EventStore.Client.Messages.SubscriptionConfirmation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='last_commit_position', full_name='EventStore.Client.Messages.SubscriptionConfirmation.last_commit_position', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_event_number', full_name='EventStore.Client.Messages.SubscriptionConfirmation.last_event_number', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7495,
serialized_end=7578,
)
_STREAMEVENTAPPEARED = _descriptor.Descriptor(
name='StreamEventAppeared',
full_name='EventStore.Client.Messages.StreamEventAppeared',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='EventStore.Client.Messages.StreamEventAppeared.event', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7580,
serialized_end=7659,
)
_UNSUBSCRIBEFROMSTREAM = _descriptor.Descriptor(
name='UnsubscribeFromStream',
full_name='EventStore.Client.Messages.UnsubscribeFromStream',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7661,
serialized_end=7684,
)
_SUBSCRIPTIONDROPPED = _descriptor.Descriptor(
name='SubscriptionDropped',
full_name='EventStore.Client.Messages.SubscriptionDropped',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='reason', full_name='EventStore.Client.Messages.SubscriptionDropped.reason', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7687,
serialized_end=7953,
)
_NOTHANDLED_LEADERINFO = _descriptor.Descriptor(
name='LeaderInfo',
full_name='EventStore.Client.Messages.NotHandled.LeaderInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='external_tcp_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_tcp_address', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_tcp_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_tcp_port', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='http_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.http_address', index=2,
number=3, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='http_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.http_port', index=3,
number=4, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_secure_tcp_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_secure_tcp_address', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_secure_tcp_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_secure_tcp_port', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8069,
serialized_end=8250,
)
_NOTHANDLED = _descriptor.Descriptor(
name='NotHandled',
full_name='EventStore.Client.Messages.NotHandled',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='reason', full_name='EventStore.Client.Messages.NotHandled.reason', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='additional_info', full_name='EventStore.Client.Messages.NotHandled.additional_info', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_NOTHANDLED_LEADERINFO, ],
enum_types=[
_NOTHANDLED_NOTHANDLEDREASON,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=7956,
serialized_end=8328,
)
_SCAVENGEDATABASE = _descriptor.Descriptor(
name='ScavengeDatabase',
full_name='EventStore.Client.Messages.ScavengeDatabase',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8330,
serialized_end=8348,
)
_SCAVENGEDATABASERESPONSE = _descriptor.Descriptor(
name='ScavengeDatabaseResponse',
full_name='EventStore.Client.Messages.ScavengeDatabaseResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.result', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scavengeId', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.scavengeId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_SCAVENGEDATABASERESPONSE_SCAVENGERESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8351,
serialized_end=8547,
)
_IDENTIFYCLIENT = _descriptor.Descriptor(
name='IdentifyClient',
full_name='EventStore.Client.Messages.IdentifyClient',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='EventStore.Client.Messages.IdentifyClient.version', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection_name', full_name='EventStore.Client.Messages.IdentifyClient.connection_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8549,
serialized_end=8607,
)
_CLIENTIDENTIFIED = _descriptor.Descriptor(
name='ClientIdentified',
full_name='EventStore.Client.Messages.ClientIdentified',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=8609,
serialized_end=8627,
)
_RESOLVEDINDEXEDEVENT.fields_by_name['event'].message_type = _EVENTRECORD
_RESOLVEDINDEXEDEVENT.fields_by_name['link'].message_type = _EVENTRECORD
_RESOLVEDEVENT.fields_by_name['event'].message_type = _EVENTRECORD
_RESOLVEDEVENT.fields_by_name['link'].message_type = _EVENTRECORD
_WRITEEVENTS.fields_by_name['events'].message_type = _NEWEVENT
_WRITEEVENTSCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT
_DELETESTREAMCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT
_TRANSACTIONSTARTCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT
_TRANSACTIONWRITE.fields_by_name['events'].message_type = _NEWEVENT
_TRANSACTIONWRITECOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT
_TRANSACTIONCOMMITCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT
_READEVENTCOMPLETED.fields_by_name['result'].enum_type = _READEVENTCOMPLETED_READEVENTRESULT
_READEVENTCOMPLETED.fields_by_name['event'].message_type = _RESOLVEDINDEXEDEVENT
_READEVENTCOMPLETED_READEVENTRESULT.containing_type = _READEVENTCOMPLETED
_READSTREAMEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDINDEXEDEVENT
_READSTREAMEVENTSCOMPLETED.fields_by_name['result'].enum_type = _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT
_READSTREAMEVENTSCOMPLETED_READSTREAMRESULT.containing_type = _READSTREAMEVENTSCOMPLETED
_READALLEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDEVENT
_READALLEVENTSCOMPLETED.fields_by_name['result'].enum_type = _READALLEVENTSCOMPLETED_READALLRESULT
_READALLEVENTSCOMPLETED_READALLRESULT.containing_type = _READALLEVENTSCOMPLETED
_FILTER.fields_by_name['context'].enum_type = _FILTER_FILTERCONTEXT
_FILTER.fields_by_name['type'].enum_type = _FILTER_FILTERTYPE
_FILTER_FILTERCONTEXT.containing_type = _FILTER
_FILTER_FILTERTYPE.containing_type = _FILTER
_FILTEREDREADALLEVENTS.fields_by_name['filter'].message_type = _FILTER
_FILTEREDREADALLEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDEVENT
_FILTEREDREADALLEVENTSCOMPLETED.fields_by_name['result'].enum_type = _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT
_FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT.containing_type = _FILTEREDREADALLEVENTSCOMPLETED
_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT
_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED
_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT
_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED
_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT
_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED
_PERSISTENTSUBSCRIPTIONNAKEVENTS.fields_by_name['action'].enum_type = _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION
_PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION.containing_type = _PERSISTENTSUBSCRIPTIONNAKEVENTS
_PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED.fields_by_name['event'].message_type = _RESOLVEDINDEXEDEVENT
_FILTEREDSUBSCRIBETOSTREAM.fields_by_name['filter'].message_type = _FILTER
_STREAMEVENTAPPEARED.fields_by_name['event'].message_type = _RESOLVEDEVENT
_SUBSCRIPTIONDROPPED.fields_by_name['reason'].enum_type = _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON
_SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON.containing_type = _SUBSCRIPTIONDROPPED
_NOTHANDLED_LEADERINFO.containing_type = _NOTHANDLED
_NOTHANDLED.fields_by_name['reason'].enum_type = _NOTHANDLED_NOTHANDLEDREASON
_NOTHANDLED_NOTHANDLEDREASON.containing_type = _NOTHANDLED
_SCAVENGEDATABASERESPONSE.fields_by_name['result'].enum_type = _SCAVENGEDATABASERESPONSE_SCAVENGERESULT
_SCAVENGEDATABASERESPONSE_SCAVENGERESULT.containing_type = _SCAVENGEDATABASERESPONSE
DESCRIPTOR.message_types_by_name['NewEvent'] = _NEWEVENT
DESCRIPTOR.message_types_by_name['EventRecord'] = _EVENTRECORD
DESCRIPTOR.message_types_by_name['ResolvedIndexedEvent'] = _RESOLVEDINDEXEDEVENT
DESCRIPTOR.message_types_by_name['ResolvedEvent'] = _RESOLVEDEVENT
DESCRIPTOR.message_types_by_name['WriteEvents'] = _WRITEEVENTS
DESCRIPTOR.message_types_by_name['WriteEventsCompleted'] = _WRITEEVENTSCOMPLETED
DESCRIPTOR.message_types_by_name['DeleteStream'] = _DELETESTREAM
DESCRIPTOR.message_types_by_name['DeleteStreamCompleted'] = _DELETESTREAMCOMPLETED
DESCRIPTOR.message_types_by_name['TransactionStart'] = _TRANSACTIONSTART
DESCRIPTOR.message_types_by_name['TransactionStartCompleted'] = _TRANSACTIONSTARTCOMPLETED
DESCRIPTOR.message_types_by_name['TransactionWrite'] = _TRANSACTIONWRITE
DESCRIPTOR.message_types_by_name['TransactionWriteCompleted'] = _TRANSACTIONWRITECOMPLETED
DESCRIPTOR.message_types_by_name['TransactionCommit'] = _TRANSACTIONCOMMIT
DESCRIPTOR.message_types_by_name['TransactionCommitCompleted'] = _TRANSACTIONCOMMITCOMPLETED
DESCRIPTOR.message_types_by_name['ReadEvent'] = _READEVENT
DESCRIPTOR.message_types_by_name['ReadEventCompleted'] = _READEVENTCOMPLETED
DESCRIPTOR.message_types_by_name['ReadStreamEvents'] = _READSTREAMEVENTS
DESCRIPTOR.message_types_by_name['ReadStreamEventsCompleted'] = _READSTREAMEVENTSCOMPLETED
DESCRIPTOR.message_types_by_name['ReadAllEvents'] = _READALLEVENTS
DESCRIPTOR.message_types_by_name['ReadAllEventsCompleted'] = _READALLEVENTSCOMPLETED
DESCRIPTOR.message_types_by_name['Filter'] = _FILTER
DESCRIPTOR.message_types_by_name['FilteredReadAllEvents'] = _FILTEREDREADALLEVENTS
DESCRIPTOR.message_types_by_name['FilteredReadAllEventsCompleted'] = _FILTEREDREADALLEVENTSCOMPLETED
DESCRIPTOR.message_types_by_name['CreatePersistentSubscription'] = _CREATEPERSISTENTSUBSCRIPTION
DESCRIPTOR.message_types_by_name['DeletePersistentSubscription'] = _DELETEPERSISTENTSUBSCRIPTION
DESCRIPTOR.message_types_by_name['UpdatePersistentSubscription'] = _UPDATEPERSISTENTSUBSCRIPTION
DESCRIPTOR.message_types_by_name['UpdatePersistentSubscriptionCompleted'] = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED
DESCRIPTOR.message_types_by_name['CreatePersistentSubscriptionCompleted'] = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED
DESCRIPTOR.message_types_by_name['DeletePersistentSubscriptionCompleted'] = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED
DESCRIPTOR.message_types_by_name['ConnectToPersistentSubscription'] = _CONNECTTOPERSISTENTSUBSCRIPTION
DESCRIPTOR.message_types_by_name['PersistentSubscriptionAckEvents'] = _PERSISTENTSUBSCRIPTIONACKEVENTS
DESCRIPTOR.message_types_by_name['PersistentSubscriptionNakEvents'] = _PERSISTENTSUBSCRIPTIONNAKEVENTS
DESCRIPTOR.message_types_by_name['PersistentSubscriptionConfirmation'] = _PERSISTENTSUBSCRIPTIONCONFIRMATION
DESCRIPTOR.message_types_by_name['PersistentSubscriptionStreamEventAppeared'] = _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED
DESCRIPTOR.message_types_by_name['SubscribeToStream'] = _SUBSCRIBETOSTREAM
DESCRIPTOR.message_types_by_name['FilteredSubscribeToStream'] = _FILTEREDSUBSCRIBETOSTREAM
DESCRIPTOR.message_types_by_name['CheckpointReached'] = _CHECKPOINTREACHED
DESCRIPTOR.message_types_by_name['SubscriptionConfirmation'] = _SUBSCRIPTIONCONFIRMATION
DESCRIPTOR.message_types_by_name['StreamEventAppeared'] = _STREAMEVENTAPPEARED
DESCRIPTOR.message_types_by_name['UnsubscribeFromStream'] = _UNSUBSCRIBEFROMSTREAM
DESCRIPTOR.message_types_by_name['SubscriptionDropped'] = _SUBSCRIPTIONDROPPED
DESCRIPTOR.message_types_by_name['NotHandled'] = _NOTHANDLED
DESCRIPTOR.message_types_by_name['ScavengeDatabase'] = _SCAVENGEDATABASE
DESCRIPTOR.message_types_by_name['ScavengeDatabaseResponse'] = _SCAVENGEDATABASERESPONSE
DESCRIPTOR.message_types_by_name['IdentifyClient'] = _IDENTIFYCLIENT
DESCRIPTOR.message_types_by_name['ClientIdentified'] = _CLIENTIDENTIFIED
DESCRIPTOR.enum_types_by_name['OperationResult'] = _OPERATIONRESULT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
NewEvent = _reflection.GeneratedProtocolMessageType('NewEvent', (_message.Message,), {
'DESCRIPTOR' : _NEWEVENT,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.NewEvent)
})
_sym_db.RegisterMessage(NewEvent)
EventRecord = _reflection.GeneratedProtocolMessageType('EventRecord', (_message.Message,), {
'DESCRIPTOR' : _EVENTRECORD,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.EventRecord)
})
_sym_db.RegisterMessage(EventRecord)
ResolvedIndexedEvent = _reflection.GeneratedProtocolMessageType('ResolvedIndexedEvent', (_message.Message,), {
'DESCRIPTOR' : _RESOLVEDINDEXEDEVENT,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ResolvedIndexedEvent)
})
_sym_db.RegisterMessage(ResolvedIndexedEvent)
ResolvedEvent = _reflection.GeneratedProtocolMessageType('ResolvedEvent', (_message.Message,), {
'DESCRIPTOR' : _RESOLVEDEVENT,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ResolvedEvent)
})
_sym_db.RegisterMessage(ResolvedEvent)
WriteEvents = _reflection.GeneratedProtocolMessageType('WriteEvents', (_message.Message,), {
'DESCRIPTOR' : _WRITEEVENTS,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.WriteEvents)
})
_sym_db.RegisterMessage(WriteEvents)
WriteEventsCompleted = _reflection.GeneratedProtocolMessageType('WriteEventsCompleted', (_message.Message,), {
'DESCRIPTOR' : _WRITEEVENTSCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.WriteEventsCompleted)
})
_sym_db.RegisterMessage(WriteEventsCompleted)
DeleteStream = _reflection.GeneratedProtocolMessageType('DeleteStream', (_message.Message,), {
'DESCRIPTOR' : _DELETESTREAM,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeleteStream)
})
_sym_db.RegisterMessage(DeleteStream)
DeleteStreamCompleted = _reflection.GeneratedProtocolMessageType('DeleteStreamCompleted', (_message.Message,), {
'DESCRIPTOR' : _DELETESTREAMCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeleteStreamCompleted)
})
_sym_db.RegisterMessage(DeleteStreamCompleted)
TransactionStart = _reflection.GeneratedProtocolMessageType('TransactionStart', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONSTART,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionStart)
})
_sym_db.RegisterMessage(TransactionStart)
TransactionStartCompleted = _reflection.GeneratedProtocolMessageType('TransactionStartCompleted', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONSTARTCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionStartCompleted)
})
_sym_db.RegisterMessage(TransactionStartCompleted)
TransactionWrite = _reflection.GeneratedProtocolMessageType('TransactionWrite', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONWRITE,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionWrite)
})
_sym_db.RegisterMessage(TransactionWrite)
TransactionWriteCompleted = _reflection.GeneratedProtocolMessageType('TransactionWriteCompleted', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONWRITECOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionWriteCompleted)
})
_sym_db.RegisterMessage(TransactionWriteCompleted)
TransactionCommit = _reflection.GeneratedProtocolMessageType('TransactionCommit', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONCOMMIT,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionCommit)
})
_sym_db.RegisterMessage(TransactionCommit)
TransactionCommitCompleted = _reflection.GeneratedProtocolMessageType('TransactionCommitCompleted', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONCOMMITCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionCommitCompleted)
})
_sym_db.RegisterMessage(TransactionCommitCompleted)
ReadEvent = _reflection.GeneratedProtocolMessageType('ReadEvent', (_message.Message,), {
'DESCRIPTOR' : _READEVENT,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadEvent)
})
_sym_db.RegisterMessage(ReadEvent)
ReadEventCompleted = _reflection.GeneratedProtocolMessageType('ReadEventCompleted', (_message.Message,), {
'DESCRIPTOR' : _READEVENTCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadEventCompleted)
})
_sym_db.RegisterMessage(ReadEventCompleted)
ReadStreamEvents = _reflection.GeneratedProtocolMessageType('ReadStreamEvents', (_message.Message,), {
'DESCRIPTOR' : _READSTREAMEVENTS,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadStreamEvents)
})
_sym_db.RegisterMessage(ReadStreamEvents)
ReadStreamEventsCompleted = _reflection.GeneratedProtocolMessageType('ReadStreamEventsCompleted', (_message.Message,), {
'DESCRIPTOR' : _READSTREAMEVENTSCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadStreamEventsCompleted)
})
_sym_db.RegisterMessage(ReadStreamEventsCompleted)
ReadAllEvents = _reflection.GeneratedProtocolMessageType('ReadAllEvents', (_message.Message,), {
'DESCRIPTOR' : _READALLEVENTS,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadAllEvents)
})
_sym_db.RegisterMessage(ReadAllEvents)
ReadAllEventsCompleted = _reflection.GeneratedProtocolMessageType('ReadAllEventsCompleted', (_message.Message,), {
'DESCRIPTOR' : _READALLEVENTSCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadAllEventsCompleted)
})
_sym_db.RegisterMessage(ReadAllEventsCompleted)
Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), {
'DESCRIPTOR' : _FILTER,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.Filter)
})
_sym_db.RegisterMessage(Filter)
FilteredReadAllEvents = _reflection.GeneratedProtocolMessageType('FilteredReadAllEvents', (_message.Message,), {
'DESCRIPTOR' : _FILTEREDREADALLEVENTS,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.FilteredReadAllEvents)
})
_sym_db.RegisterMessage(FilteredReadAllEvents)
FilteredReadAllEventsCompleted = _reflection.GeneratedProtocolMessageType('FilteredReadAllEventsCompleted', (_message.Message,), {
'DESCRIPTOR' : _FILTEREDREADALLEVENTSCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.FilteredReadAllEventsCompleted)
})
_sym_db.RegisterMessage(FilteredReadAllEventsCompleted)
CreatePersistentSubscription = _reflection.GeneratedProtocolMessageType('CreatePersistentSubscription', (_message.Message,), {
'DESCRIPTOR' : _CREATEPERSISTENTSUBSCRIPTION,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.CreatePersistentSubscription)
})
_sym_db.RegisterMessage(CreatePersistentSubscription)
DeletePersistentSubscription = _reflection.GeneratedProtocolMessageType('DeletePersistentSubscription', (_message.Message,), {
'DESCRIPTOR' : _DELETEPERSISTENTSUBSCRIPTION,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeletePersistentSubscription)
})
_sym_db.RegisterMessage(DeletePersistentSubscription)
UpdatePersistentSubscription = _reflection.GeneratedProtocolMessageType('UpdatePersistentSubscription', (_message.Message,), {
'DESCRIPTOR' : _UPDATEPERSISTENTSUBSCRIPTION,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.UpdatePersistentSubscription)
})
_sym_db.RegisterMessage(UpdatePersistentSubscription)
UpdatePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('UpdatePersistentSubscriptionCompleted', (_message.Message,), {
'DESCRIPTOR' : _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted)
})
_sym_db.RegisterMessage(UpdatePersistentSubscriptionCompleted)
CreatePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('CreatePersistentSubscriptionCompleted', (_message.Message,), {
'DESCRIPTOR' : _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.CreatePersistentSubscriptionCompleted)
})
_sym_db.RegisterMessage(CreatePersistentSubscriptionCompleted)
DeletePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('DeletePersistentSubscriptionCompleted', (_message.Message,), {
'DESCRIPTOR' : _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeletePersistentSubscriptionCompleted)
})
_sym_db.RegisterMessage(DeletePersistentSubscriptionCompleted)
ConnectToPersistentSubscription = _reflection.GeneratedProtocolMessageType('ConnectToPersistentSubscription', (_message.Message,), {
'DESCRIPTOR' : _CONNECTTOPERSISTENTSUBSCRIPTION,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ConnectToPersistentSubscription)
})
_sym_db.RegisterMessage(ConnectToPersistentSubscription)
PersistentSubscriptionAckEvents = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionAckEvents', (_message.Message,), {
'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONACKEVENTS,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionAckEvents)
})
_sym_db.RegisterMessage(PersistentSubscriptionAckEvents)
PersistentSubscriptionNakEvents = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionNakEvents', (_message.Message,), {
'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONNAKEVENTS,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionNakEvents)
})
_sym_db.RegisterMessage(PersistentSubscriptionNakEvents)
PersistentSubscriptionConfirmation = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionConfirmation', (_message.Message,), {
'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONCONFIRMATION,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionConfirmation)
})
_sym_db.RegisterMessage(PersistentSubscriptionConfirmation)
PersistentSubscriptionStreamEventAppeared = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionStreamEventAppeared', (_message.Message,), {
'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared)
})
_sym_db.RegisterMessage(PersistentSubscriptionStreamEventAppeared)
SubscribeToStream = _reflection.GeneratedProtocolMessageType('SubscribeToStream', (_message.Message,), {
'DESCRIPTOR' : _SUBSCRIBETOSTREAM,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.SubscribeToStream)
})
_sym_db.RegisterMessage(SubscribeToStream)
FilteredSubscribeToStream = _reflection.GeneratedProtocolMessageType('FilteredSubscribeToStream', (_message.Message,), {
'DESCRIPTOR' : _FILTEREDSUBSCRIBETOSTREAM,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.FilteredSubscribeToStream)
})
_sym_db.RegisterMessage(FilteredSubscribeToStream)
CheckpointReached = _reflection.GeneratedProtocolMessageType('CheckpointReached', (_message.Message,), {
'DESCRIPTOR' : _CHECKPOINTREACHED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.CheckpointReached)
})
_sym_db.RegisterMessage(CheckpointReached)
SubscriptionConfirmation = _reflection.GeneratedProtocolMessageType('SubscriptionConfirmation', (_message.Message,), {
'DESCRIPTOR' : _SUBSCRIPTIONCONFIRMATION,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.SubscriptionConfirmation)
})
_sym_db.RegisterMessage(SubscriptionConfirmation)
StreamEventAppeared = _reflection.GeneratedProtocolMessageType('StreamEventAppeared', (_message.Message,), {
'DESCRIPTOR' : _STREAMEVENTAPPEARED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.StreamEventAppeared)
})
_sym_db.RegisterMessage(StreamEventAppeared)
UnsubscribeFromStream = _reflection.GeneratedProtocolMessageType('UnsubscribeFromStream', (_message.Message,), {
'DESCRIPTOR' : _UNSUBSCRIBEFROMSTREAM,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.UnsubscribeFromStream)
})
_sym_db.RegisterMessage(UnsubscribeFromStream)
SubscriptionDropped = _reflection.GeneratedProtocolMessageType('SubscriptionDropped', (_message.Message,), {
'DESCRIPTOR' : _SUBSCRIPTIONDROPPED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.SubscriptionDropped)
})
_sym_db.RegisterMessage(SubscriptionDropped)
NotHandled = _reflection.GeneratedProtocolMessageType('NotHandled', (_message.Message,), {
'LeaderInfo' : _reflection.GeneratedProtocolMessageType('LeaderInfo', (_message.Message,), {
'DESCRIPTOR' : _NOTHANDLED_LEADERINFO,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.NotHandled.LeaderInfo)
})
,
'DESCRIPTOR' : _NOTHANDLED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.NotHandled)
})
_sym_db.RegisterMessage(NotHandled)
_sym_db.RegisterMessage(NotHandled.LeaderInfo)
ScavengeDatabase = _reflection.GeneratedProtocolMessageType('ScavengeDatabase', (_message.Message,), {
'DESCRIPTOR' : _SCAVENGEDATABASE,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ScavengeDatabase)
})
_sym_db.RegisterMessage(ScavengeDatabase)
ScavengeDatabaseResponse = _reflection.GeneratedProtocolMessageType('ScavengeDatabaseResponse', (_message.Message,), {
'DESCRIPTOR' : _SCAVENGEDATABASERESPONSE,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ScavengeDatabaseResponse)
})
_sym_db.RegisterMessage(ScavengeDatabaseResponse)
IdentifyClient = _reflection.GeneratedProtocolMessageType('IdentifyClient', (_message.Message,), {
'DESCRIPTOR' : _IDENTIFYCLIENT,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.IdentifyClient)
})
_sym_db.RegisterMessage(IdentifyClient)
ClientIdentified = _reflection.GeneratedProtocolMessageType('ClientIdentified', (_message.Message,), {
'DESCRIPTOR' : _CLIENTIDENTIFIED,
'__module__' : 'ClientMessageDtos_pb2'
# @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ClientIdentified)
})
_sym_db.RegisterMessage(ClientIdentified)
# @@protoc_insertion_point(module_scope)
| [
[
[
192,
209
],
[
17396,
17413
]
],
[
[
238,
263
],
[
521,
532
],
[
682,
693
],
[
15537,
15548
],
[
15698,
15709
],
[
15747,
15758
],
[
15886,
15897
],
[
15925,
15936
],
[
16071,
16082
],
[
16110,
16121
],
[
16255,
16266
],
[
16294,
16305
],
[
16440,
16451
],
[
16479,
16490
],
[
16631,
16642
],
[
16670,
16681
],
[
16815,
16826
],
[
16854,
16865
],
[
17004,
17015
],
[
17043,
17054
],
[
17187,
17198
],
[
17639,
17650
],
[
17819,
17830
],
[
17868,
17879
],
[
18007,
18018
],
[
18046,
18057
],
[
18186,
18197
],
[
18225,
18236
],
[
18365,
18376
],
[
18404,
18415
],
[
18549,
18560
],
[
18588,
18599
],
[
18725,
18736
],
[
18764,
18775
],
[
18908,
18919
],
[
19164,
19175
],
[
19353,
19364
],
[
19402,
19413
],
[
19541,
19552
],
[
19580,
19591
],
[
19720,
19731
],
[
19759,
19770
],
[
19904,
19915
],
[
19943,
19954
],
[
20086,
20097
],
[
20125,
20136
],
[
20262,
20273
],
[
20301,
20312
],
[
20445,
20456
],
[
20703,
20714
],
[
20883,
20894
],
[
20932,
20943
],
[
21071,
21082
],
[
21110,
21121
],
[
21253,
21264
],
[
21292,
21303
],
[
21429,
21440
],
[
21468,
21479
],
[
21612,
21623
],
[
21848,
21859
],
[
22012,
22023
],
[
22061,
22072
],
[
22201,
22212
],
[
22240,
22251
],
[
22381,
22392
],
[
22598,
22609
],
[
22756,
22767
],
[
22805,
22816
],
[
22942,
22953
],
[
22981,
22992
],
[
23119,
23130
],
[
23368,
23379
],
[
23572,
23583
],
[
23621,
23632
],
[
23760,
23771
],
[
23799,
23810
],
[
23942,
23953
],
[
23981,
23992
],
[
24118,
24129
],
[
24157,
24168
],
[
24301,
24312
],
[
24605,
24616
],
[
24842,
24853
],
[
24891,
24902
],
[
25030,
25041
],
[
25069,
25080
],
[
25213,
25224
],
[
25252,
25263
],
[
25388,
25399
],
[
25427,
25438
],
[
25571,
25582
],
[
25895,
25906
],
[
26132,
26143
],
[
26181,
26192
],
[
26320,
26331
],
[
26359,
26370
],
[
26504,
26515
],
[
26543,
26554
],
[
26679,
26690
],
[
26718,
26729
],
[
26862,
26873
],
[
27186,
27197
],
[
27423,
27434
],
[
27472,
27483
],
[
27611,
27622
],
[
27650,
27661
],
[
27794,
27805
],
[
27833,
27844
],
[
27969,
27980
],
[
28008,
28019
],
[
28152,
28163
],
[
28445,
28456
],
[
28626,
28637
],
[
28675,
28686
],
[
28814,
28825
],
[
28853,
28864
],
[
28989,
29000
],
[
29028,
29039
],
[
29165,
29176
],
[
29204,
29215
],
[
29340,
29351
],
[
29379,
29390
],
[
29515,
29526
],
[
29778,
29789
],
[
29973,
29984
],
[
30022,
30033
],
[
30166,
30177
],
[
30205,
30216
],
[
30349,
30360
],
[
30388,
30399
],
[
30528,
30539
],
[
30567,
30578
],
[
30728,
30739
],
[
30767,
30778
],
[
30924,
30935
],
[
31173,
31184
],
[
31347,
31358
],
[
31396,
31407
],
[
31536,
31547
],
[
31575,
31586
],
[
31714,
31725
],
[
31753,
31764
],
[
31894,
31905
],
[
31933,
31944
],
[
32075,
32086
],
[
32321,
32332
],
[
32505,
32516
],
[
32554,
32565
],
[
32693,
32704
],
[
32732,
32743
],
[
32874,
32885
],
[
32913,
32924
],
[
33057,
33068
],
[
33285,
33296
],
[
33452,
33463
],
[
33501,
33512
],
[
33887,
33898
],
[
33926,
33937
],
[
34331,
34342
],
[
34370,
34381
],
[
34771,
34782
],
[
34810,
34821
],
[
35219,
35230
],
[
35258,
35269
],
[
35636,
35647
],
[
35675,
35686
],
[
36061,
36072
],
[
36332,
36343
],
[
36505,
36516
],
[
36554,
36565
],
[
36972,
36983
],
[
37011,
37022
],
[
37405,
37416
],
[
37444,
37455
],
[
37833,
37844
],
[
37872,
37883
],
[
38280,
38291
],
[
38319,
38330
],
[
38723,
38734
],
[
38762,
38773
],
[
39174,
39185
],
[
39213,
39224
],
[
39594,
39605
],
[
39633,
39644
],
[
40022,
40033
],
[
40061,
40072
],
[
40445,
40456
],
[
40484,
40495
],
[
40881,
40892
],
[
41162,
41173
],
[
41353,
41364
],
[
41402,
41413
],
[
41796,
41807
],
[
41835,
41846
],
[
42227,
42238
],
[
42501,
42512
],
[
42678,
42689
],
[
42727,
42738
],
[
43114,
43125
],
[
43153,
43164
],
[
43538,
43549
],
[
43577,
43588
],
[
43979,
43990
],
[
44018,
44029
],
[
44422,
44433
],
[
44694,
44705
],
[
44867,
44878
],
[
44916,
44927
],
[
45334,
45345
],
[
45373,
45384
],
[
45775,
45786
],
[
45814,
45825
],
[
46199,
46210
],
[
46238,
46249
],
[
46640,
46651
],
[
46921,
46932
],
[
47112,
47123
],
[
47161,
47172
],
[
47553,
47564
],
[
47592,
47603
],
[
48003,
48014
],
[
48042,
48053
],
[
48457,
48468
],
[
48496,
48507
],
[
48909,
48920
],
[
48948,
48959
],
[
49359,
49370
],
[
49398,
49409
],
[
49807,
49818
],
[
49846,
49857
],
[
50255,
50266
],
[
50529,
50540
],
[
50704,
50715
],
[
50753,
50764
],
[
51172,
51183
],
[
51211,
51222
],
[
51614,
51625
],
[
51653,
51664
],
[
52056,
52067
],
[
52095,
52106
],
[
52492,
52503
],
[
52776,
52787
],
[
52969,
52980
],
[
53018,
53029
],
[
53411,
53422
],
[
53450,
53461
],
[
53862,
53873
],
[
53901,
53912
],
[
54313,
54324
],
[
54352,
54363
],
[
54762,
54773
],
[
55041,
55052
],
[
55224,
55235
],
[
55273,
55284
],
[
55696,
55707
],
[
55735,
55746
],
[
56142,
56153
],
[
56181,
56192
],
[
56588,
56599
],
[
56876,
56887
],
[
57077,
57088
],
[
57126,
57137
],
[
57538,
57549
],
[
57577,
57588
],
[
57974,
57985
],
[
58013,
58024
],
[
58429,
58440
],
[
58708,
58719
],
[
58891,
58902
],
[
58940,
58951
],
[
59343,
59354
],
[
59382,
59393
],
[
59772,
59783
],
[
59811,
59822
],
[
60218,
60229
],
[
60506,
60517
],
[
60707,
60718
],
[
60756,
60767
],
[
61168,
61179
],
[
61207,
61218
],
[
61604,
61615
],
[
61643,
61654
],
[
62059,
62070
],
[
62339,
62350
],
[
62524,
62535
],
[
62573,
62584
],
[
62977,
62988
],
[
63016,
63027
],
[
63424,
63435
],
[
63713,
63724
],
[
63916,
63927
],
[
63965,
63976
],
[
64378,
64389
],
[
64417,
64428
],
[
64815,
64826
],
[
64854,
64865
],
[
65271,
65282
],
[
65310,
65321
],
[
65731,
65742
],
[
65770,
65781
],
[
66189,
66200
],
[
66228,
66239
],
[
66645,
66656
],
[
66684,
66695
],
[
67099,
67110
],
[
67371,
67382
],
[
67540,
67551
],
[
67589,
67600
],
[
68005,
68016
],
[
68044,
68055
],
[
68436,
68447
],
[
68475,
68486
],
[
68879,
68890
],
[
68918,
68929
],
[
69318,
69329
],
[
69599,
69610
],
[
69786,
69797
],
[
69835,
69846
],
[
70225,
70236
],
[
70264,
70275
],
[
70656,
70667
],
[
70695,
70706
],
[
71100,
71111
],
[
71420,
71431
],
[
71603,
71614
],
[
71652,
71663
],
[
72075,
72086
],
[
72114,
72125
],
[
72523,
72534
],
[
72562,
72573
],
[
72955,
72966
],
[
72994,
73005
],
[
73405,
73416
],
[
73444,
73455
],
[
73851,
73862
],
[
74139,
74150
],
[
74340,
74351
],
[
74389,
74400
],
[
74788,
74799
],
[
74827,
74838
],
[
75224,
75235
],
[
75263,
75274
],
[
75681,
75692
],
[
75720,
75731
],
[
76138,
76149
],
[
76177,
76188
],
[
76597,
76608
],
[
76636,
76647
],
[
77060,
77071
],
[
77099,
77110
],
[
77511,
77522
],
[
77836,
77847
],
[
78013,
78024
],
[
78062,
78073
],
[
78464,
78475
],
[
78503,
78514
],
[
78907,
78918
],
[
78946,
78957
],
[
79336,
79347
],
[
79375,
79386
],
[
79783,
79794
],
[
79822,
79833
],
[
80226,
80237
],
[
80511,
80522
],
[
80706,
80717
],
[
80755,
80766
],
[
81166,
81177
],
[
81205,
81216
],
[
81618,
81629
],
[
81657,
81668
],
[
82053,
82064
],
[
82092,
82103
],
[
82513,
82524
],
[
82552,
82563
],
[
82975,
82986
],
[
83014,
83025
],
[
83407,
83418
],
[
83446,
83457
],
[
83855,
83866
],
[
84167,
84178
],
[
84330,
84341
],
[
84379,
84390
],
[
84759,
84770
],
[
84798,
84809
],
[
85172,
85183
],
[
85211,
85222
],
[
85585,
85596
],
[
85920,
85931
],
[
86113,
86124
],
[
86162,
86173
],
[
86572,
86583
],
[
86611,
86622
],
[
87023,
87034
],
[
87062,
87073
],
[
87460,
87471
],
[
87499,
87510
],
[
87913,
87924
],
[
87952,
87963
],
[
88368,
88379
],
[
88407,
88418
],
[
88819,
88830
],
[
88858,
88869
],
[
89255,
89266
],
[
89548,
89559
],
[
89759,
89770
],
[
89808,
89819
],
[
90227,
90238
],
[
90266,
90277
],
[
90687,
90698
],
[
90726,
90737
],
[
91130,
91141
],
[
91169,
91180
],
[
91598,
91609
],
[
91637,
91648
],
[
92068,
92079
],
[
92107,
92118
],
[
92532,
92543
],
[
92571,
92582
],
[
92972,
92983
],
[
93011,
93022
],
[
93428,
93439
],
[
93778,
93789
],
[
93985,
93996
],
[
94034,
94045
],
[
94485,
94496
],
[
94524,
94535
],
[
94959,
94970
],
[
94998,
95009
],
[
95421,
95432
],
[
95460,
95471
],
[
95867,
95878
],
[
95906,
95917
],
[
96349,
96360
],
[
96388,
96399
],
[
96813,
96824
],
[
96852,
96863
],
[
97271,
97282
],
[
97310,
97321
],
[
97727,
97738
],
[
97766,
97777
],
[
98175,
98186
],
[
98214,
98225
],
[
98632,
98643
],
[
98671,
98682
],
[
99100,
99111
],
[
99139,
99150
],
[
99570,
99581
],
[
99609,
99620
],
[
100038,
100049
],
[
100077,
100088
],
[
100506,
100517
],
[
100545,
100556
],
[
100974,
100985
],
[
101013,
101024
],
[
101466,
101477
],
[
101757,
101768
],
[
101964,
101975
],
[
102013,
102024
],
[
102464,
102475
],
[
102503,
102514
],
[
102938,
102949
],
[
103229,
103240
],
[
103436,
103447
],
[
103485,
103496
],
[
103936,
103947
],
[
103975,
103986
],
[
104410,
104421
],
[
104449,
104460
],
[
104872,
104883
],
[
104911,
104922
],
[
105318,
105329
],
[
105357,
105368
],
[
105800,
105811
],
[
105839,
105850
],
[
106264,
106275
],
[
106303,
106314
],
[
106722,
106733
],
[
106761,
106772
],
[
107178,
107189
],
[
107217,
107228
],
[
107626,
107637
],
[
107665,
107676
],
[
108083,
108094
],
[
108122,
108133
],
[
108551,
108562
],
[
108590,
108601
],
[
109021,
109032
],
[
109060,
109071
],
[
109489,
109500
],
[
109528,
109539
],
[
109957,
109968
],
[
109996,
110007
],
[
110425,
110436
],
[
110464,
110475
],
[
110917,
110928
],
[
111217,
111228
],
[
111442,
111453
],
[
111491,
111502
],
[
111899,
111910
],
[
111938,
111949
],
[
112364,
112375
],
[
112743,
112754
],
[
112968,
112979
],
[
113017,
113028
],
[
113425,
113436
],
[
113464,
113475
],
[
113890,
113901
],
[
114269,
114280
],
[
114494,
114505
],
[
114543,
114554
],
[
114951,
114962
],
[
114990,
115001
],
[
115416,
115427
],
[
115789,
115800
],
[
116002,
116013
],
[
116051,
116062
],
[
116489,
116500
],
[
116528,
116539
],
[
116966,
116977
],
[
117005,
117016
],
[
117447,
117458
],
[
117741,
117752
],
[
117954,
117965
],
[
118003,
118014
],
[
118441,
118452
],
[
118480,
118491
],
[
118910,
118921
],
[
119204,
119215
],
[
119417,
119428
],
[
119466,
119477
],
[
119904,
119915
],
[
119943,
119954
],
[
120373,
120384
],
[
120412,
120423
],
[
120834,
120845
],
[
120873,
120884
],
[
121275,
121286
],
[
121620,
121631
],
[
121839,
121850
],
[
121888,
121899
],
[
122321,
122332
],
[
122360,
122371
],
[
122801,
122812
],
[
122840,
122851
],
[
123267,
123278
],
[
123571,
123582
],
[
123804,
123815
],
[
123853,
123864
],
[
124268,
124279
],
[
124307,
124318
],
[
124727,
124738
],
[
125007,
125018
],
[
125192,
125203
],
[
125241,
125252
],
[
125665,
125676
],
[
125704,
125715
],
[
126116,
126127
],
[
126404,
126415
],
[
126605,
126616
],
[
126654,
126665
],
[
127086,
127097
],
[
127125,
127136
],
[
127545,
127556
],
[
127584,
127595
],
[
127985,
127996
],
[
128024,
128035
],
[
128446,
128457
],
[
128726,
128737
],
[
128911,
128922
],
[
128960,
128971
],
[
129366,
129377
],
[
129405,
129416
],
[
129813,
129824
],
[
130100,
130111
],
[
130299,
130310
],
[
130348,
130359
],
[
130771,
130782
],
[
130810,
130821
],
[
131227,
131238
],
[
131509,
131520
],
[
131698,
131709
],
[
131747,
131758
],
[
132140,
132151
],
[
132424,
132435
],
[
132617,
132628
],
[
132909,
132920
],
[
133098,
133109
],
[
133147,
133158
],
[
133537,
133548
],
[
133870,
133881
],
[
134052,
134063
],
[
134101,
134112
],
[
134539,
134550
],
[
134578,
134589
],
[
134992,
135003
],
[
135031,
135042
],
[
135453,
135464
],
[
135492,
135503
],
[
135890,
135901
],
[
135929,
135940
],
[
136381,
136392
],
[
136420,
136431
],
[
136848,
136859
],
[
137120,
137131
],
[
137291,
137302
],
[
137340,
137351
],
[
137722,
137733
],
[
137761,
137772
],
[
138163,
138174
],
[
138500,
138511
],
[
138683,
138694
],
[
138980,
138991
],
[
139179,
139190
],
[
139228,
139239
],
[
139624,
139635
],
[
139663,
139674
],
[
140084,
140095
],
[
140407,
140418
],
[
140586,
140597
],
[
140635,
140646
],
[
141022,
141033
],
[
141061,
141072
],
[
141482,
141493
],
[
141761,
141772
],
[
141944,
141955
]
],
[
[
292,
311
],
[
150323,
150331
],
[
150603,
150611
],
[
150910,
150918
],
[
151230,
151238
],
[
151525,
151533
],
[
151832,
151840
],
[
152150,
152158
],
[
152462,
152470
],
[
152791,
152799
],
[
153123,
153131
],
[
153464,
153472
],
[
153796,
153804
],
[
154139,
154147
],
[
154476,
154484
],
[
154806,
154814
],
[
155103,
155111
],
[
155423,
155431
],
[
155755,
155763
],
[
156090,
156098
],
[
156407,
156415
],
[
156719,
156727
],
[
157013,
157021
],
[
157370,
157378
],
[
157750,
157758
],
[
158124,
158132
],
[
158498,
158506
],
[
158890,
158898
],
[
159309,
159317
],
[
159728,
159736
],
[
160135,
160143
],
[
160524,
160532
],
[
160913,
160921
],
[
161308,
161316
],
[
161726,
161734
],
[
162117,
162125
],
[
162452,
162460
],
[
162795,
162803
],
[
163128,
163136
],
[
163472,
163480
],
[
163805,
163813
],
[
164140,
164148
],
[
164451,
164459
],
[
164547,
164555
],
[
165080,
165088
],
[
165410,
165418
],
[
165744,
165752
],
[
166052,
166060
]
],
[
[
340,
365
],
[
150269,
150280
],
[
150546,
150557
],
[
150844,
150855
],
[
151171,
151182
],
[
151468,
151479
],
[
151766,
151777
],
[
152092,
152103
],
[
152395,
152406
],
[
152729,
152740
],
[
153052,
153063
],
[
153402,
153413
],
[
153725,
153736
],
[
154076,
154087
],
[
154404,
154415
],
[
154751,
154762
],
[
155039,
155050
],
[
155361,
155372
],
[
155684,
155695
],
[
156031,
156042
],
[
156339,
156350
],
[
156667,
156678
],
[
156946,
156957
],
[
157294,
157305
],
[
157676,
157687
],
[
158050,
158061
],
[
158424,
158435
],
[
158807,
158818
],
[
159226,
159237
],
[
159645,
159656
],
[
160058,
160069
],
[
160447,
160458
],
[
160836,
160847
],
[
161228,
161239
],
[
161639,
161650
],
[
162054,
162065
],
[
162381,
162392
],
[
162732,
162743
],
[
163058,
163069
],
[
163407,
163418
],
[
163738,
163749
],
[
164075,
164086
],
[
164395,
164406
],
[
164491,
164502
],
[
165018,
165029
],
[
165340,
165351
],
[
165684,
165695
],
[
165990,
166001
]
],
[
[
394,
429
],
[
477,
493
]
],
[
[
467,
474
],
[
17328,
17335
],
[
19049,
19056
],
[
20586,
20593
],
[
21753,
21760
],
[
22522,
22529
],
[
23260,
23267
],
[
24442,
24449
],
[
25712,
25719
],
[
27003,
27010
],
[
28293,
28300
],
[
29656,
29663
],
[
31065,
31072
],
[
32216,
32223
],
[
33198,
33205
],
[
150214,
150221
],
[
150497,
150504
],
[
150783,
150790
],
[
151108,
151115
],
[
151414,
151421
],
[
151705,
151712
],
[
152030,
152037
],
[
152332,
152339
],
[
152662,
152669
],
[
152981,
152988
],
[
153331,
153338
],
[
153654,
153661
],
[
154004,
154011
],
[
154331,
154338
],
[
154686,
154693
],
[
154982,
154989
],
[
155297,
155304
],
[
155613,
155620
],
[
155963,
155970
],
[
156274,
156281
],
[
156609,
156616
],
[
156889,
156896
],
[
157213,
157220
],
[
157588,
157595
],
[
157964,
157971
],
[
158338,
158345
],
[
158712,
158719
],
[
159122,
159129
],
[
159541,
159548
],
[
159960,
159967
],
[
160355,
160362
],
[
160744,
160751
],
[
161133,
161140
],
[
161534,
161541
],
[
161966,
161973
],
[
162309,
162316
],
[
162660,
162667
],
[
162987,
162994
],
[
163334,
163341
],
[
163668,
163675
],
[
164005,
164012
],
[
164336,
164343
],
[
164915,
164922
],
[
164951,
164958
],
[
165270,
165277
],
[
165616,
165623
],
[
165930,
165937
],
[
166242,
166249
]
],
[
[
508,
518
],
[
15673,
15683
],
[
17794,
17804
],
[
19328,
19338
],
[
20858,
20868
],
[
21987,
21997
],
[
22731,
22741
],
[
23547,
23557
],
[
24817,
24827
],
[
26107,
26117
],
[
27398,
27408
],
[
28601,
28611
],
[
29948,
29958
],
[
31322,
31332
],
[
32480,
32490
],
[
33403,
33413
],
[
33863,
33873
],
[
34307,
34317
],
[
34747,
34757
],
[
35195,
35205
],
[
35612,
35622
],
[
36037,
36047
],
[
36456,
36466
],
[
36948,
36958
],
[
37381,
37391
],
[
37809,
37819
],
[
38256,
38266
],
[
38699,
38709
],
[
39150,
39160
],
[
39570,
39580
],
[
39998,
40008
],
[
40421,
40431
],
[
40857,
40867
],
[
41304,
41314
],
[
41772,
41782
],
[
42203,
42213
],
[
42629,
42639
],
[
43090,
43100
],
[
43514,
43524
],
[
43955,
43965
],
[
44398,
44408
],
[
44818,
44828
],
[
45310,
45320
],
[
45751,
45761
],
[
46175,
46185
],
[
46616,
46626
],
[
47063,
47073
],
[
47529,
47539
],
[
47979,
47989
],
[
48433,
48443
],
[
48885,
48895
],
[
49335,
49345
],
[
49783,
49793
],
[
50231,
50241
],
[
50655,
50665
],
[
51148,
51158
],
[
51590,
51600
],
[
52032,
52042
],
[
52468,
52478
],
[
52920,
52930
],
[
53387,
53397
],
[
53838,
53848
],
[
54289,
54299
],
[
54738,
54748
],
[
55175,
55185
],
[
55672,
55682
],
[
56118,
56128
],
[
56564,
56574
],
[
57028,
57038
],
[
57514,
57524
],
[
57950,
57960
],
[
58405,
58415
],
[
58842,
58852
],
[
59319,
59329
],
[
59748,
59758
],
[
60194,
60204
],
[
60658,
60668
],
[
61144,
61154
],
[
61580,
61590
],
[
62035,
62045
],
[
62475,
62485
],
[
62953,
62963
],
[
63400,
63410
],
[
63867,
63877
],
[
64354,
64364
],
[
64791,
64801
],
[
65247,
65257
],
[
65707,
65717
],
[
66165,
66175
],
[
66621,
66631
],
[
67075,
67085
],
[
67491,
67501
],
[
67981,
67991
],
[
68412,
68422
],
[
68855,
68865
],
[
69294,
69304
],
[
69737,
69747
],
[
70201,
70211
],
[
70632,
70642
],
[
71076,
71086
],
[
71554,
71564
],
[
72051,
72061
],
[
72499,
72509
],
[
72931,
72941
],
[
73381,
73391
],
[
73827,
73837
],
[
74291,
74301
],
[
74764,
74774
],
[
75200,
75210
],
[
75657,
75667
],
[
76114,
76124
],
[
76573,
76583
],
[
77036,
77046
],
[
77487,
77497
],
[
77964,
77974
],
[
78440,
78450
],
[
78883,
78893
],
[
79312,
79322
],
[
79759,
79769
],
[
80202,
80212
],
[
80657,
80667
],
[
81142,
81152
],
[
81594,
81604
],
[
82029,
82039
],
[
82489,
82499
],
[
82951,
82961
],
[
83383,
83393
],
[
83831,
83841
],
[
84281,
84291
],
[
84735,
84745
],
[
85148,
85158
],
[
85561,
85571
],
[
86064,
86074
],
[
86548,
86558
],
[
86999,
87009
],
[
87436,
87446
],
[
87889,
87899
],
[
88344,
88354
],
[
88795,
88805
],
[
89231,
89241
],
[
89710,
89720
],
[
90203,
90213
],
[
90663,
90673
],
[
91106,
91116
],
[
91574,
91584
],
[
92044,
92054
],
[
92508,
92518
],
[
92948,
92958
],
[
93404,
93414
],
[
93936,
93946
],
[
94461,
94471
],
[
94935,
94945
],
[
95397,
95407
],
[
95843,
95853
],
[
96325,
96335
],
[
96789,
96799
],
[
97247,
97257
],
[
97703,
97713
],
[
98151,
98161
],
[
98608,
98618
],
[
99076,
99086
],
[
99546,
99556
],
[
100014,
100024
],
[
100482,
100492
],
[
100950,
100960
],
[
101442,
101452
],
[
101915,
101925
],
[
102440,
102450
],
[
102914,
102924
],
[
103387,
103397
],
[
103912,
103922
],
[
104386,
104396
],
[
104848,
104858
],
[
105294,
105304
],
[
105776,
105786
],
[
106240,
106250
],
[
106698,
106708
],
[
107154,
107164
],
[
107602,
107612
],
[
108059,
108069
],
[
108527,
108537
],
[
108997,
109007
],
[
109465,
109475
],
[
109933,
109943
],
[
110401,
110411
],
[
110893,
110903
],
[
111393,
111403
],
[
111875,
111885
],
[
112340,
112350
],
[
112919,
112929
],
[
113401,
113411
],
[
113866,
113876
],
[
114445,
114455
],
[
114927,
114937
],
[
115392,
115402
],
[
115953,
115963
],
[
116465,
116475
],
[
116942,
116952
],
[
117423,
117433
],
[
117905,
117915
],
[
118417,
118427
],
[
118886,
118896
],
[
119368,
119378
],
[
119880,
119890
],
[
120349,
120359
],
[
120810,
120820
],
[
121251,
121261
],
[
121790,
121800
],
[
122297,
122307
],
[
122777,
122787
],
[
123243,
123253
],
[
123755,
123765
],
[
124244,
124254
],
[
124703,
124713
],
[
125143,
125153
],
[
125641,
125651
],
[
126092,
126102
],
[
126556,
126566
],
[
127062,
127072
],
[
127521,
127531
],
[
127961,
127971
],
[
128422,
128432
],
[
128862,
128872
],
[
129342,
129352
],
[
129789,
129799
],
[
130250,
130260
],
[
130747,
130757
],
[
131203,
131213
],
[
131649,
131659
],
[
132116,
132126
],
[
132568,
132578
],
[
133049,
133059
],
[
133513,
133523
],
[
134003,
134013
],
[
134515,
134525
],
[
134968,
134978
],
[
135429,
135439
],
[
135866,
135876
],
[
136357,
136367
],
[
136824,
136834
],
[
137242,
137252
],
[
137698,
137708
],
[
138139,
138149
],
[
138634,
138644
],
[
139130,
139140
],
[
139600,
139610
],
[
140060,
140070
],
[
140537,
140547
],
[
140998,
141008
],
[
141458,
141468
],
[
141895,
141905
],
[
146288,
146298
],
[
146345,
146355
],
[
146408,
146418
],
[
146489,
146499
],
[
146556,
146566
],
[
146619,
146629
],
[
146700,
146710
],
[
146765,
146775
],
[
146848,
146858
],
[
146921,
146931
],
[
147012,
147022
],
[
147085,
147095
],
[
147176,
147186
],
[
147251,
147261
],
[
147344,
147354
],
[
147403,
147413
],
[
147480,
147490
],
[
147553,
147563
],
[
147644,
147654
],
[
147711,
147721
],
[
147796,
147806
],
[
147849,
147859
],
[
147932,
147942
],
[
148033,
148043
],
[
148130,
148140
],
[
148227,
148237
],
[
148324,
148334
],
[
148439,
148449
],
[
148554,
148564
],
[
148669,
148679
],
[
148772,
148782
],
[
148875,
148885
],
[
148978,
148988
],
[
149087,
149097
],
[
149210,
149220
],
[
149285,
149295
],
[
149376,
149386
],
[
149451,
149461
],
[
149540,
149550
],
[
149619,
149629
],
[
149702,
149712
],
[
149781,
149791
],
[
149842,
149852
],
[
149915,
149925
],
[
150004,
150014
],
[
150073,
150083
],
[
150146,
150156
],
[
150245,
150255
]
],
[
[
15518,
15534
],
[
17359,
17375
],
[
17430,
17446
],
[
142614,
142630
],
[
142691,
142707
],
[
142772,
142788
],
[
142921,
142937
],
[
143003,
143019
],
[
150197,
150213
]
],
[
[
17378,
17393
]
],
[
[
17448,
17455
]
],
[
[
17460,
17474
]
],
[
[
17479,
17492
]
],
[
[
17497,
17511
]
],
[
[
17516,
17536
]
],
[
[
17541,
17554
]
],
[
[
17559,
17577
]
],
[
[
17582,
17594
]
],
[
[
17601,
17636
],
[
19080,
19115
],
[
71198,
71233
],
[
143077,
143112
],
[
143194,
143229
]
],
[
[
19118,
19161
],
[
20617,
20660
],
[
77609,
77652
],
[
143421,
143464
],
[
143465,
143508
]
],
[
[
20663,
20700
],
[
21784,
21821
],
[
83953,
83990
],
[
143694,
143731
],
[
143732,
143769
]
],
[
[
21824,
21845
],
[
22553,
22574
],
[
85683,
85704
],
[
143858,
143879
],
[
143942,
143963
]
],
[
[
22577,
22595
],
[
23291,
23309
],
[
85710,
85728
],
[
143923,
143941
],
[
143990,
144008
]
],
[
[
23312,
23365
],
[
24473,
24526
],
[
93526,
93579
],
[
144262,
144315
],
[
144316,
144369
]
],
[
[
24529,
24602
],
[
25743,
25816
],
[
112462,
112535
],
[
144496,
144569
],
[
144570,
144643
]
],
[
[
25819,
25892
],
[
27034,
27107
],
[
113988,
114061
],
[
144777,
144850
],
[
144851,
144924
]
],
[
[
27110,
27183
],
[
28324,
28397
],
[
115514,
115587
],
[
145058,
145131
],
[
145132,
145205
]
],
[
[
28400,
28442
],
[
29687,
29729
],
[
121373,
121415
],
[
145333,
145375
],
[
145376,
145418
]
],
[
[
29732,
29775
],
[
31096,
31139
],
[
133635,
133678
],
[
145782,
145825
],
[
145826,
145869
]
],
[
[
31142,
31170
],
[
32247,
32275
],
[
138285,
138313
],
[
146011,
146039
],
[
146040,
146068
]
],
[
[
32278,
32318
],
[
33229,
33269
],
[
140182,
140222
],
[
146162,
146202
],
[
146203,
146243
]
],
[
[
33273,
33282
],
[
142545,
142554
],
[
142847,
142856
],
[
146335,
146344
],
[
150362,
150371
]
],
[
[
36317,
36329
],
[
142273,
142285
],
[
142346,
142358
],
[
142413,
142425
],
[
142479,
142491
],
[
146395,
146407
],
[
150642,
150654
]
],
[
[
41138,
41159
],
[
142212,
142233
],
[
142286,
142307
],
[
143172,
143193
],
[
143335,
143356
],
[
145552,
145573
],
[
146467,
146488
],
[
150949,
150970
]
],
[
[
42484,
42498
],
[
142359,
142373
],
[
142426,
142440
],
[
143618,
143632
],
[
144178,
144192
],
[
145709,
145723
],
[
146541,
146555
],
[
151269,
151283
]
],
[
[
44679,
44691
],
[
142492,
142504
],
[
146606,
146618
],
[
151564,
151576
]
],
[
[
46897,
46918
],
[
142555,
142576
],
[
146678,
146699
],
[
151871,
151892
]
],
[
[
50513,
50526
],
[
146751,
146764
],
[
152189,
152202
]
],
[
[
52751,
52773
],
[
142631,
142653
],
[
146825,
146847
],
[
152501,
152523
]
],
[
[
55021,
55038
],
[
146903,
146920
],
[
152830,
152847
]
],
[
[
56847,
56873
],
[
142708,
142734
],
[
146985,
147011
],
[
153162,
153188
]
],
[
[
58688,
58705
],
[
142789,
142806
],
[
147067,
147084
],
[
153503,
153520
]
],
[
[
60477,
60503
],
[
142857,
142883
],
[
147149,
147175
],
[
153835,
153861
]
],
[
[
62318,
62336
],
[
147232,
147250
],
[
154178,
154196
]
],
[
[
63683,
63710
],
[
142938,
142965
],
[
147316,
147343
],
[
154515,
154542
]
],
[
[
67358,
67368
],
[
147392,
147402
],
[
154845,
154855
]
],
[
[
69577,
69596
],
[
143020,
143039
],
[
143113,
143132
],
[
143248,
143267
],
[
147460,
147479
],
[
155142,
155161
]
],
[
[
71400,
71417
],
[
147535,
147552
],
[
155462,
155479
]
],
[
[
74110,
74136
],
[
143268,
143294
],
[
143357,
143383
],
[
143527,
143553
],
[
147617,
147643
],
[
155794,
155820
]
],
[
[
77819,
77833
],
[
147696,
147710
],
[
156129,
156143
]
],
[
[
80485,
80508
],
[
143554,
143577
],
[
143633,
143656
],
[
143788,
143811
],
[
147772,
147795
],
[
156446,
156469
]
],
[
[
84157,
84164
],
[
143812,
143819
],
[
143880,
143887
],
[
143982,
143989
],
[
144027,
144034
],
[
144098,
144105
],
[
145641,
145648
],
[
147841,
147848
],
[
156758,
156765
]
],
[
[
85895,
85917
],
[
144035,
144057
],
[
147909,
147931
],
[
157052,
157074
]
],
[
[
89514,
89545
],
[
144106,
144137
],
[
144193,
144224
],
[
144388,
144419
],
[
148001,
148032
],
[
157409,
157440
]
],
[
[
93746,
93775
],
[
148100,
148129
],
[
157789,
157818
]
],
[
[
101725,
101754
],
[
148197,
148226
],
[
158163,
158192
]
],
[
[
103197,
103226
],
[
148294,
148323
],
[
158537,
158566
]
],
[
[
111176,
111214
],
[
144420,
144458
],
[
144662,
144700
],
[
148400,
148438
],
[
158929,
158967
]
],
[
[
112702,
112740
],
[
144701,
144739
],
[
144943,
144981
],
[
148515,
148553
],
[
159348,
159386
]
],
[
[
114228,
114266
],
[
144982,
145020
],
[
145224,
145262
],
[
148630,
148668
],
[
159767,
159805
]
],
[
[
115754,
115786
],
[
148739,
148771
],
[
160174,
160206
]
],
[
[
117706,
117738
],
[
148842,
148874
],
[
160563,
160595
]
],
[
[
119169,
119201
],
[
145263,
145295
],
[
145437,
145469
],
[
148945,
148977
],
[
160952,
160984
]
],
[
[
121582,
121617
],
[
149051,
149086
],
[
161347,
161382
]
],
[
[
123526,
123568
],
[
145470,
145512
],
[
149167,
149209
],
[
161765,
161807
]
],
[
[
124986,
125004
],
[
149266,
149284
],
[
162156,
162174
]
],
[
[
126375,
126401
],
[
145574,
145600
],
[
149349,
149375
],
[
162491,
162517
]
],
[
[
128705,
128723
],
[
149432,
149450
],
[
162834,
162852
]
],
[
[
130072,
130097
],
[
149514,
149539
],
[
163167,
163192
]
],
[
[
131486,
131506
],
[
145649,
145669
],
[
149598,
149618
],
[
163511,
163531
]
],
[
[
132399,
132421
],
[
149679,
149701
],
[
163844,
163866
]
],
[
[
132886,
132906
],
[
145724,
145744
],
[
145888,
145908
],
[
149760,
149780
],
[
164179,
164199
]
],
[
[
133845,
133867
],
[
138239,
138261
],
[
145909,
145931
],
[
164588,
164610
]
],
[
[
137106,
137117
],
[
145950,
145961
],
[
145962,
145973
],
[
146087,
146098
],
[
149830,
149841
],
[
164776,
164787
]
],
[
[
138480,
138497
],
[
149897,
149914
],
[
165119,
165136
]
],
[
[
138952,
138977
],
[
146099,
146124
],
[
146262,
146287
],
[
149978,
150003
],
[
165449,
165474
]
],
[
[
140389,
140404
],
[
150057,
150072
],
[
165783,
165798
]
],
[
[
141741,
141758
],
[
150128,
150145
],
[
166091,
166108
]
],
[
[
150258,
150266
],
[
150521,
150529
]
],
[
[
150532,
150543
],
[
150807,
150818
]
],
[
[
150821,
150841
],
[
151132,
151152
]
],
[
[
151155,
151168
],
[
151438,
151451
]
],
[
[
151454,
151465
],
[
151729,
151740
]
],
[
[
151743,
151763
],
[
152054,
152074
]
],
[
[
152077,
152089
],
[
152356,
152368
]
],
[
[
152371,
152392
],
[
152686,
152707
]
],
[
[
152710,
152726
],
[
153005,
153021
]
],
[
[
153024,
153049
],
[
153355,
153380
]
],
[
[
153383,
153399
],
[
153678,
153694
]
],
[
[
153697,
153722
],
[
154028,
154053
]
],
[
[
154056,
154073
],
[
154355,
154372
]
],
[
[
154375,
154401
],
[
154710,
154736
]
],
[
[
154739,
154748
],
[
155006,
155015
]
],
[
[
155018,
155036
],
[
155321,
155339
]
],
[
[
155342,
155358
],
[
155637,
155653
]
],
[
[
155656,
155681
],
[
155987,
156012
]
],
[
[
156015,
156028
],
[
156298,
156311
]
],
[
[
156314,
156336
],
[
156633,
156655
]
],
[
[
156658,
156664
],
[
156913,
156919
]
],
[
[
156922,
156943
],
[
157237,
157258
]
],
[
[
157261,
157291
],
[
157612,
157642
]
],
[
[
157645,
157673
],
[
157988,
158016
]
],
[
[
158019,
158047
],
[
158362,
158390
]
],
[
[
158393,
158421
],
[
158736,
158764
]
],
[
[
158767,
158804
],
[
159146,
159183
]
],
[
[
159186,
159223
],
[
159565,
159602
]
],
[
[
159605,
159642
],
[
159984,
160021
]
],
[
[
160024,
160055
],
[
160379,
160410
]
],
[
[
160413,
160444
],
[
160768,
160799
]
],
[
[
160802,
160833
],
[
161157,
161188
]
],
[
[
161191,
161225
],
[
161558,
161592
]
],
[
[
161595,
161636
],
[
161990,
162031
]
],
[
[
162034,
162051
],
[
162333,
162350
]
],
[
[
162353,
162378
],
[
162684,
162709
]
],
[
[
162712,
162729
],
[
163011,
163028
]
],
[
[
163031,
163055
],
[
163358,
163382
]
],
[
[
163385,
163404
],
[
163692,
163711
]
],
[
[
163714,
163735
],
[
164029,
164050
]
],
[
[
164053,
164072
],
[
164360,
164379
]
],
[
[
164382,
164392
],
[
164939,
164949
],
[
164975,
164985
]
],
[
[
164999,
165015
],
[
165294,
165310
]
],
[
[
165313,
165337
],
[
165640,
165664
]
],
[
[
165667,
165681
],
[
165954,
165968
]
],
[
[
165971,
165987
],
[
166266,
166282
]
]
] |
from mmdet.datasets.pipelines import Compose
from .dbsampler import DataBaseSampler
from .formating import Collect3D, DefaultFormatBundle, DefaultFormatBundle3D
from .loading import (LoadAnnotations3D, LoadImageFromFileMono3D,
LoadMultiViewImageFromFiles, LoadPointsFromFile,
LoadPointsFromMultiSweeps, NormalizePointsColor,
PointSegClassMapping)
from .test_time_aug import MultiScaleFlipAug3D
from .transforms_3d import (BackgroundPointsFilter, GlobalAlignment,
GlobalRotScaleTrans, IndoorPatchPointSample,
IndoorPointSample, ObjectNameFilter, ObjectNoise,
ObjectRangeFilter, ObjectSample, PointSample,
PointShuffle, PointsRangeFilter,
RandomDropPointsColor, RandomFlip3D,
RandomJitterPoints, VoxelBasedPointSampler)
__all__ = [
'ObjectSample', 'RandomFlip3D', 'ObjectNoise', 'GlobalRotScaleTrans',
'PointShuffle', 'ObjectRangeFilter', 'PointsRangeFilter', 'Collect3D',
'Compose', 'LoadMultiViewImageFromFiles', 'LoadPointsFromFile',
'DefaultFormatBundle', 'DefaultFormatBundle3D', 'DataBaseSampler',
'NormalizePointsColor', 'LoadAnnotations3D', 'IndoorPointSample',
'PointSample', 'PointSegClassMapping', 'MultiScaleFlipAug3D',
'LoadPointsFromMultiSweeps', 'BackgroundPointsFilter',
'VoxelBasedPointSampler', 'GlobalAlignment', 'IndoorPatchPointSample',
'LoadImageFromFileMono3D', 'ObjectNameFilter', 'RandomDropPointsColor',
'RandomJitterPoints'
]
| [
[
[
37,
44
]
],
[
[
68,
83
]
],
[
[
107,
116
]
],
[
[
118,
137
]
],
[
[
139,
160
]
],
[
[
183,
200
]
],
[
[
202,
225
]
],
[
[
249,
276
]
],
[
[
278,
296
]
],
[
[
320,
345
]
],
[
[
347,
367
]
],
[
[
391,
411
]
],
[
[
440,
459
]
],
[
[
488,
510
]
],
[
[
512,
527
]
],
[
[
557,
576
]
],
[
[
578,
600
]
],
[
[
630,
647
]
],
[
[
649,
665
]
],
[
[
667,
678
]
],
[
[
708,
725
]
],
[
[
727,
739
]
],
[
[
741,
752
]
],
[
[
782,
794
]
],
[
[
796,
813
]
],
[
[
843,
864
]
],
[
[
866,
878
]
],
[
[
908,
926
]
],
[
[
928,
950
]
],
[
[
953,
960
]
]
] |
import pytest
from django.core.exceptions import PermissionDenied
from django.urls import reverse
from wagtail.core.models import Page
from wagtail_personalisation.models import Segment
from wagtail_personalisation.rules import VisitCountRule
from wagtail_personalisation.views import (
SegmentModelAdmin, SegmentModelDeleteView)
@pytest.mark.django_db
def test_segment_user_data_view_requires_admin_access(site, client, django_user_model):
user = django_user_model.objects.create(username='first')
segment = Segment(type=Segment.TYPE_STATIC, count=1)
segment.save()
client.force_login(user)
url = reverse('segment:segment_user_data', args=(segment.id,))
response = client.get(url)
assert response.status_code == 302
assert response.url == '/admin/login/?next=%s' % url
@pytest.mark.django_db
def test_segment_user_data_view(site, client, mocker, django_user_model):
user1 = django_user_model.objects.create(username='first')
user2 = django_user_model.objects.create(username='second')
admin_user = django_user_model.objects.create(
username='admin', is_superuser=True)
segment = Segment(type=Segment.TYPE_STATIC, count=1)
segment.save()
segment.static_users.add(user1)
segment.static_users.add(user2)
rule1 = VisitCountRule(counted_page=site.root_page, segment=segment)
rule2 = VisitCountRule(counted_page=site.root_page.get_last_child(),
segment=segment)
rule1.save()
rule2.save()
mocker.patch('wagtail_personalisation.rules.VisitCountRule.get_user_info_string',
side_effect=[3, 9, 0, 1])
client.force_login(admin_user)
response = client.get(
reverse('segment:segment_user_data', args=(segment.id,)))
assert response.status_code == 200
data_lines = response.content.decode().split("\n")
assert data_lines[0] == 'Username,Visit count - Test page,Visit count - Regular page\r'
assert data_lines[1] == 'first,3,9\r'
assert data_lines[2] == 'second,0,1\r'
@pytest.mark.django_db
def test_segment_delete_view_delete_instance(rf, segmented_page, user):
user.is_superuser = True
user.save()
segment = segmented_page.personalisation_metadata.segment
canonical_page = segmented_page.personalisation_metadata.canonical_page
variants_metadata = segment.get_used_pages()
page_variants = Page.objects.filter(pk__in=(
variants_metadata.values_list('variant_id', flat=True)
))
# Make sure all canonical page, variants and variants metadata exist
assert canonical_page
assert page_variants
assert variants_metadata
# Delete the segment via the method on the view.
request = rf.get('/'.format(segment.pk))
request.user = user
view = SegmentModelDeleteView(
instance_pk=str(segment.pk),
model_admin=SegmentModelAdmin()
)
view.request = request
view.delete_instance()
# Segment has been deleted.
with pytest.raises(segment.DoesNotExist):
segment.refresh_from_db()
# Canonical page stayed intact.
canonical_page.refresh_from_db()
# Variant pages and their metadata have been deleted.
assert not page_variants.all()
assert not variants_metadata.all()
@pytest.mark.django_db
def test_segment_delete_view_raises_permission_denied(rf, segmented_page, user):
segment = segmented_page.personalisation_metadata.segment
request = rf.get('/'.format(segment.pk))
request.user = user
view = SegmentModelDeleteView(
instance_pk=str(segment.pk),
model_admin=SegmentModelAdmin()
)
view.request = request
message = 'User have no permission to delete variant page objects.'
with pytest.raises(PermissionDenied):
view.delete_instance()
| [
[
[
7,
13
],
[
338,
344
],
[
815,
821
],
[
2043,
2049
],
[
3260,
3266
],
[
2979,
2985
],
[
3720,
3726
]
],
[
[
49,
65
],
[
3734,
3750
]
],
[
[
90,
97
],
[
627,
634
],
[
1709,
1716
]
],
[
[
130,
134
],
[
2389,
2393
]
],
[
[
179,
186
],
[
525,
532
],
[
538,
545
],
[
1149,
1156
],
[
1162,
1169
]
],
[
[
229,
243
],
[
1296,
1310
],
[
1369,
1383
]
],
[
[
292,
309
],
[
2857,
2874
],
[
3586,
3603
]
],
[
[
311,
333
],
[
2776,
2798
],
[
3505,
3527
]
],
[
[
364,
413
]
],
[
[
841,
868
]
],
[
[
2069,
2109
]
],
[
[
3286,
3335
]
]
] |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# HP.1910.get_chassis_id
# ---------------------------------------------------------------------
# Copyright (C) 2007-2018 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetchassisid import IGetChassisID
class Script(BaseScript):
name = "HP.1910.get_chassis_id"
interface = IGetChassisID
cache = True
rx_mac = re.compile(r"^MAC_ADDRESS\s+:\s+(?P<mac>\S+)$", re.MULTILINE)
def execute_cli(self):
v = self.cli("display device manuinfo", cached=True)
match = self.rx_mac.search(v)
mac = match.group("mac")
return {"first_chassis_mac": mac, "last_chassis_mac": mac}
| [
[
[
358,
360
],
[
603,
605
],
[
651,
653
]
],
[
[
409,
419
],
[
493,
503
]
],
[
[
464,
477
],
[
558,
571
]
],
[
[
486,
492
]
]
] |
import collections
import functools
from typing import Dict, List, Tuple, Counter
from tool.runners.python import SubmissionPy
def parse(s: str) -> Tuple[List[str], Dict[Tuple[str, str], str]]:
lines = s.splitlines()
initial = list(lines[0].strip())
mapping = {}
for line in lines[2:]:
if stripped_line := line.strip():
left, right = stripped_line.split(" -> ", 1)
mapping[left[0], left[1]] = right
return initial, mapping
DEPTH = 40
class SkaschSubmission(SubmissionPy):
@functools.lru_cache(None)
def dfs(self, left: str, right: str, depth: int) -> Counter[str]:
if depth == DEPTH:
return collections.Counter()
mid = self.mapping[left, right]
cnt = collections.Counter(mid)
return cnt + self.dfs(left, mid, depth + 1) + self.dfs(mid, right, depth + 1)
def run(self, s: str) -> int:
"""
:param s: input in string format
:return: solution flag
"""
# Your code goes here
self.dfs.cache_clear()
initial, self.mapping = parse(s)
cnt = collections.Counter(initial)
for left, right in zip(initial, initial[1:]):
cnt += self.dfs(left, right, 0)
return max(cnt.values()) - min(cnt.values())
def test_skasch() -> None:
"""
Run `python -m pytest ./day-14/part-2/skasch.py` to test the submission.
"""
assert (
SkaschSubmission().run(
"""
NNCB
CH -> B
HH -> N
CB -> H
NH -> C
HB -> C
HC -> B
HN -> C
NN -> C
BH -> H
NC -> B
NB -> B
BN -> B
BB -> N
BC -> B
CC -> N
CN -> C
""".strip()
)
== 2188189693529
)
| [
[
[
7,
18
],
[
677,
688
],
[
753,
764
],
[
1111,
1122
]
],
[
[
26,
35
],
[
535,
544
]
],
[
[
55,
59
],
[
167,
171
]
],
[
[
61,
65
],
[
156,
160
]
],
[
[
67,
72
],
[
150,
155
],
[
172,
177
]
],
[
[
74,
81
],
[
617,
624
]
],
[
[
114,
126
],
[
515,
527
]
],
[
[
133,
138
],
[
1088,
1093
]
],
[
[
479,
484
],
[
651,
656
]
],
[
[
498,
514
],
[
1434,
1450
]
],
[
[
1297,
1308
]
]
] |
"""add username to LoadTbl
Revision ID: 7fb7364b821a
Revises: 090128c02529
Create Date: 2018-10-24 17:10:03.781293
"""
# revision identifiers, used by Alembic.
revision = '7fb7364b821a'
down_revision = '090128c02529'
import sqlalchemy as sa
from alembic import op
def upgrade():
op.add_column('LoadTbl', sa.Column('username', sa.String(45)))
op.create_foreign_key('LoadTbl_ibfk_2', 'LoadTbl', 'UserTbl', ['username'], ['name'])
def downgrade():
op.drop_constraint('LoadTbl_ibfk_2', 'LoadTbl', type_='foreignkey')
op.drop_column('LoadTbl', 'username')
| [
[
[
163,
171
]
],
[
[
189,
202
]
],
[
[
228,
244
],
[
314,
316
],
[
336,
338
]
],
[
[
265,
267
],
[
289,
291
],
[
356,
358
],
[
465,
467
],
[
537,
539
]
],
[
[
274,
281
]
],
[
[
448,
457
]
]
] |
expected_output = {
'slot': {
'1': {
'lc': {
'card_type': 'CEF720 48 port 10/100/1000mb Ethernet',
'fw_ver': '12.2(14r)S',
'hw_ver': '2.7',
'mac_address_from': '001e.4aff.ee89',
'mac_address_to': '001e.4aff.eeb8',
'model': 'WS-X6748-GE-TX',
'online_diag_status': 'Pass',
'ports': 48,
'serial_number': 'SAL1209HMW3',
'status': 'Ok',
'subslot': {
'WS-F6700-CFC': {
'hw_ver': '4.0',
'model': 'WS-F6700-CFC',
'serial_number': 'SAL1207G5V1',
'status': 'Ok',
},
},
'sw_ver': '15.4(0.10)',
},
},
'2': {
'lc': {
'card_type': '2 port adapter Enhanced FlexWAN',
'fw_ver': '15.4(0.10)S',
'hw_ver': '2.1',
'mac_address_from': '0015.2bff.e884',
'mac_address_to': '0015.2bff.e8c3',
'model': 'WS-X6582-2PA',
'online_diag_status': 'Pass',
'ports': 0,
'serial_number': 'JAE0939LYNQ',
'status': 'Ok',
'sw_ver': '15.4(0.10)S',
},
},
'5': {
'rp': {
'card_type': 'Supervisor Engine 720 (Hot)',
'fw_ver': '8.1(3',
'hw_ver': '4.1',
'mac_address_from': '0011.21ff.441a',
'mac_address_to': '0011.21ff.441d',
'model': 'WS-SUP720-3BXL',
'online_diag_status': 'Pass',
'ports': 2,
'serial_number': 'SAD09020BF8',
'status': 'Ok',
'subslot': {
'WS-F6K-PFC3BXL': {
'hw_ver': '1.4',
'model': 'WS-F6K-PFC3BXL',
'serial_number': 'SAD090301K6',
'status': 'Ok',
},
'WS-SUP720': {
'hw_ver': '2.2',
'model': 'WS-SUP720',
'serial_number': 'SAD090105M6',
'status': 'Ok',
},
},
'sw_ver': '15.4(0.10)',
},
},
'6': {
'rp': {
'card_type': 'Supervisor Engine 720 (Active)',
'fw_ver': '8.5(4',
'hw_ver': '5.12',
'mac_address_from': '0022.55ff.039b',
'mac_address_to': '0022.55ff.039e',
'model': 'WS-SUP720-3BXL',
'online_diag_status': 'Pass',
'ports': 2,
'serial_number': 'SAL15129MRC',
'status': 'Ok',
'subslot': {
'WS-F6K-PFC3BXL': {
'hw_ver': '1.11',
'model': 'WS-F6K-PFC3BXL',
'serial_number': 'SAL15129KW4',
'status': 'Ok',
},
'WS-SUP720': {
'hw_ver': '5.1',
'model': 'WS-SUP720',
'serial_number': 'SAL15045PYS',
'status': 'Ok',
},
},
'sw_ver': '15.4(0.10)',
},
},
},
}
| [
[
[
0,
15
]
]
] |
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import io
import logging
import time
import unittest
import requests
import azure.mgmt.batch
from azure.mgmt.batch import models
from azure.common.exceptions import CloudError
from mgmt_batch_preparers import KeyVaultPreparer, SimpleBatchPreparer
from devtools_testutils import (
AzureMgmtTestCase,
ResourceGroupPreparer,
StorageAccountPreparer
)
AZURE_LOCATION = 'westcentralus'
EXISTING_BATCH_ACCOUNT = {'name': 'sdktest2', 'location': 'westcentralus'}
class MgmtBatchTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtBatchTest, self).setUp()
self.mgmt_batch_client = self.create_mgmt_client(
azure.mgmt.batch.BatchManagementClient)
self.mgmt_keyvault_client = self.create_mgmt_client(
azure.mgmt.keyvault.KeyVaultManagementClient)
def _get_account_name(self):
return self.get_resource_name('batch')[-24:]
def test_mgmt_batch_list_operations(self):
operations = self.mgmt_batch_client.operations.list()
all_ops = list(operations)
self.assertEqual(len(all_ops), 35)
self.assertEqual(all_ops[0].name, 'Microsoft.Batch/batchAccounts/providers/Microsoft.Insights/diagnosticSettings/read')
self.assertEqual(all_ops[0].origin, 'system')
self.assertEqual(all_ops[0].display.provider, 'Microsoft Batch')
self.assertEqual(all_ops[0].display.operation, 'Read diagnostic setting')
def test_mgmt_batch_subscription_quota(self):
quotas = self.mgmt_batch_client.location.get_quotas(AZURE_LOCATION)
self.assertIsInstance(quotas, models.BatchLocationQuota)
self.assertEqual(quotas.account_quota, 3)
def test_mgmt_batch_account_name(self):
# Test Invalid Account Name
availability = self.mgmt_batch_client.location.check_name_availability(
AZURE_LOCATION, "randombatchaccount@5^$g9873495873")
self.assertIsInstance(availability, models.CheckNameAvailabilityResult)
self.assertFalse(availability.name_available)
self.assertEqual(availability.reason, models.NameAvailabilityReason.invalid)
# Test Unvailable Account Name
availability = self.mgmt_batch_client.location.check_name_availability(
EXISTING_BATCH_ACCOUNT['location'], EXISTING_BATCH_ACCOUNT['name'])
self.assertIsInstance(availability, models.CheckNameAvailabilityResult)
self.assertFalse(availability.name_available)
self.assertEqual(availability.reason, models.NameAvailabilityReason.already_exists)
# Test Available Account Name
availability = self.mgmt_batch_client.location.check_name_availability(
AZURE_LOCATION, self._get_account_name())
self.assertIsInstance(availability, models.CheckNameAvailabilityResult)
self.assertTrue(availability.name_available)
@ResourceGroupPreparer(location=AZURE_LOCATION)
@KeyVaultPreparer(location=AZURE_LOCATION)
def test_mgmt_batch_byos_account(self, resource_group, location, keyvault):
if self.is_live:
keyvault = keyvault.result()
batch_account = models.BatchAccountCreateParameters(
location=location,
pool_allocation_mode=models.PoolAllocationMode.user_subscription)
with self.assertRaises(Exception): # TODO: What exception
creating = self.mgmt_batch_client.batch_account.create(
resource_group.name,
self._get_account_name(),
batch_account)
creating.result()
keyvault_id = "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.KeyVault/vaults/{}".format(
self.settings.SUBSCRIPTION_ID, resource_group.name, keyvault.name)
keyvault_url = "https://{}.vault.azure.net/".format(keyvault.name)
batch_account = models.BatchAccountCreateParameters(
location=location,
pool_allocation_mode=models.PoolAllocationMode.user_subscription,
key_vault_reference={'id': keyvault_id, 'url': keyvault_url})
creating = self.mgmt_batch_client.batch_account.create(
resource_group.name,
self._get_account_name(),
batch_account)
creating.result()
@ResourceGroupPreparer(location=AZURE_LOCATION)
def test_mgmt_batch_account(self, resource_group, location):
batch_account = models.BatchAccountCreateParameters(
location=location,
)
account_name = self._get_account_name()
account_setup = self.mgmt_batch_client.batch_account.create(
resource_group.name,
account_name,
batch_account)
account_setup.result()
# Test Get Account
account = self.mgmt_batch_client.batch_account.get(resource_group.name, account_name)
self.assertEqual(account.dedicated_core_quota, 20)
self.assertEqual(account.low_priority_core_quota, 100)
self.assertEqual(account.pool_quota, 100)
self.assertEqual(account.pool_allocation_mode.value, 'BatchService')
# Test List Accounts by Resource Group
accounts = self.mgmt_batch_client.batch_account.list_by_resource_group(resource_group.name)
self.assertEqual(len(list(accounts)), 1)
# Test List Account Keys
keys = self.mgmt_batch_client.batch_account.get_keys(resource_group.name, account_name)
self.assertIsInstance(keys, models.BatchAccountKeys)
self.assertEqual(keys.account_name, account_name)
secondary = keys.secondary
# Test Regenerate Account Key
keys = self.mgmt_batch_client.batch_account.regenerate_key(
resource_group.name, account_name, 'Secondary')
self.assertIsInstance(keys, models.BatchAccountKeys)
self.assertFalse(keys.secondary == secondary)
# Test Update Account
update_tags = {'Name': 'tagName', 'Value': 'tagValue'}
updated = self.mgmt_batch_client.batch_account.update(resource_group.name, account_name, update_tags)
self.assertIsInstance(updated, models.BatchAccount)
self.assertEqual(updated.tags['Name'], 'tagName')
self.assertEqual(updated.tags['Value'], 'tagValue')
# Test Delete Account
response = self.mgmt_batch_client.batch_account.delete(resource_group.name, account_name)
self.assertIsNone(response.result())
@ResourceGroupPreparer(location=AZURE_LOCATION)
@StorageAccountPreparer(name_prefix='batch', location=AZURE_LOCATION)
def test_mgmt_batch_applications(self, resource_group, location, storage_account, storage_account_key):
# Test Create Account with Auto-Storage
storage_resource = '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}'.format(
self.settings.SUBSCRIPTION_ID,
resource_group.name,
storage_account.name
)
batch_account = models.BatchAccountCreateParameters(
location=location,
auto_storage=models.AutoStorageBaseProperties(storage_resource)
)
account_name = self._get_account_name()
account_setup = self.mgmt_batch_client.batch_account.create(
resource_group.name,
account_name,
batch_account)
account_setup.result()
# Test Sync AutoStorage Keys
response = self.mgmt_batch_client.batch_account.synchronize_auto_storage_keys(
resource_group.name, account_name)
self.assertIsNone(response)
# Test Add Application
application_id = 'my_application_id'
application_name = 'my_application_name'
application_ver = 'v1.0'
application_properties = models.Application(display_name=application_name, allow_updates=True)
application = self.mgmt_batch_client.application.create(
resource_group.name, account_name, application_id, parameters=application_properties)
self.assertIsInstance(application, models.Application)
self.assertTrue(application_id in application.id)
self.assertTrue(application_name in application.display_name)
self.assertTrue(application.allow_updates)
# Test Mgmt Get Application
application = self.mgmt_batch_client.application.get(resource_group.name, account_name, application_id)
self.assertIsInstance(application, models.Application)
self.assertTrue(application_id in application.id)
self.assertTrue(application_name in application.display_name)
self.assertTrue(application.allow_updates)
# Test Mgmt List Applications
applications = self.mgmt_batch_client.application.list(resource_group.name, account_name)
self.assertTrue(len(list(applications)) > 0)
# Test Add Application Package
package_ref = self.mgmt_batch_client.application_package.create(
resource_group.name, account_name, application_id, application_ver)
self.assertIsInstance(package_ref, models.ApplicationPackage)
with io.BytesIO(b'Hello World') as f:
headers = {'x-ms-blob-type': 'BlockBlob'}
upload = requests.put(package_ref.storage_url, headers=headers, data=f.read())
if not upload:
raise ValueError('Upload failed: {!r}'.format(upload))
# Test Activate Application Package
response = self.mgmt_batch_client.application_package.activate(
resource_group.name, account_name, application_id, application_ver, 'zip')
self.assertTrue(response.state == models.PackageState.active)
# Test Update Application
params = models.Application(
allow_updates=False,
display_name='my_updated_name',
default_version=application_ver
)
response = self.mgmt_batch_client.application.update(
resource_group.name, account_name, application_id, params)
self.assertTrue(application_ver in response.default_version)
self.assertTrue('my_updated_name' in response.display_name)
self.assertFalse(response.allow_updates)
# Test Get Application Package
package_ref = self.mgmt_batch_client.application_package.get(
resource_group.name, account_name, application_id, application_ver)
self.assertIsInstance(package_ref, models.ApplicationPackage)
self.assertTrue(application_id in package_ref.id)
self.assertEqual(package_ref.format, 'zip')
self.assertEqual(package_ref.state, models.PackageState.active)
# Test Delete Application Package
response = self.mgmt_batch_client.application_package.delete(
resource_group.name, account_name, application_id, application_ver)
self.assertIsNone(response)
# Test Delete Application
response = self.mgmt_batch_client.application.delete(
resource_group.name, account_name, application_id)
self.assertIsNone(response)
# Test Delete Account
response = self.mgmt_batch_client.batch_account.delete(resource_group.name, account_name)
self.assertIsNone(response.result())
@ResourceGroupPreparer(location=AZURE_LOCATION)
@SimpleBatchPreparer(location=AZURE_LOCATION)
def test_mgmt_batch_certificates(self, resource_group, location, batch_account):
# Test Add Certificate
parameters = models.CertificateCreateOrUpdateParameters(
thumbprint='cff2ab63c8c955aaf71989efa641b906558d9fb7',
thumbprint_algorithm='sha1',
data='MIIGMQIBAzCCBe0GCSqGSIb3DQEHAaCCBd4EggXaMIIF1jCCA8AGCSqGSIb3DQEHAaCCA7EEggOtMIIDqTCCA6UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhyd3xCtln3iQICB9AEggKQhe5P10V9iV1BsDlwWT561Yu2hVq3JT8ae/ebx1ZR/gMApVereDKkS9Zg4vFyssusHebbK5pDpU8vfAqle0TM4m7wGsRj453ZorSPUfMpHvQnAOn+2pEpWdMThU7xvZ6DVpwhDOQk9166z+KnKdHGuJKh4haMT7Rw/6xZ1rsBt2423cwTrQVMQyACrEkianpuujubKltN99qRoFAxhQcnYE2KlYKw7lRcExq6mDSYAyk5xJZ1ZFdLj6MAryZroQit/0g5eyhoNEKwWbi8px5j71pRTf7yjN+deMGQKwbGl+3OgaL1UZ5fCjypbVL60kpIBxLZwIJ7p3jJ+q9pbq9zSdzshPYor5lxyUfXqaso/0/91ayNoBzg4hQGh618PhFI6RMGjwkzhB9xk74iweJ9HQyIHf8yx2RCSI22JuCMitPMWSGvOszhbNx3AEDLuiiAOHg391mprEtKZguOIr9LrJwem/YmcHbwyz5YAbZmiseKPkllfC7dafFfCFEkj6R2oegIsZo0pEKYisAXBqT0g+6/jGwuhlZcBo0f7UIZm88iA3MrJCjlXEgV5OcQdoWj+hq0lKEdnhtCKr03AIfukN6+4vjjarZeW1bs0swq0l3XFf5RHa11otshMS4mpewshB9iO9MuKWpRxuxeng4PlKZ/zuBqmPeUrjJ9454oK35Pq+dghfemt7AUpBH/KycDNIZgfdEWUZrRKBGnc519C+RTqxyt5hWL18nJk4LvSd3QKlJ1iyJxClhhb/NWEzPqNdyA5cxen+2T9bd/EqJ2KzRv5/BPVwTQkHH9W/TZElFyvFfOFIW2+03RKbVGw72Mr/0xKZ+awAnEfoU+SL/2Gj2m6PHkqFX2sOCi/tN9EA4xgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoANABjAGUANgAwADQAZABhAC0AMAA2ADgAMQAtADQANAAxADUALQBhADIAYwBhAC0ANQA3ADcAMwAwADgAZQA2AGQAOQBhAGMwggIOBgkqhkiG9w0BBwGgggH/BIIB+zCCAfcwggHzBgsqhkiG9w0BDAoBA6CCAcswggHHBgoqhkiG9w0BCRYBoIIBtwSCAbMwggGvMIIBXaADAgECAhAdka3aTQsIsUphgIXGUmeRMAkGBSsOAwIdBQAwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3kwHhcNMTYwMTAxMDcwMDAwWhcNMTgwMTAxMDcwMDAwWjASMRAwDgYDVQQDEwdub2Rlc2RrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5fhcxbJHxxBEIDzVOMc56s04U6k4GPY7yMR1m+rBGVRiAyV4RjY6U936dqXHCVD36ps2Q0Z+OeEgyCInkIyVeB1EwXcToOcyeS2YcUb0vRWZDouC3tuFdHwiK1Ed5iW/LksmXDotyV7kpqzaPhOFiMtBuMEwNJcPge9k17hRgRQIDAQABo0swSTBHBgNVHQEEQDA+gBAS5AktBh0dTwCNYSHcFmRjoRgwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3mCEAY3bACqAGSKEc+41KpcNfQwCQYFKw4DAh0FAANBAHl2M97QbpzdnwO5HoRBsiEExOcLTNg+GKCr7HUsbzfvrUivw+JLL7qjHAIc5phnK+F5bQ8HKe0L9YXBSKl+fvwxFTATBgkqhkiG9w0BCRUxBgQEAQAAADA7MB8wBwYFKw4DAhoEFGVtyGMqiBd32fGpzlGZQoRM6UQwBBTI0YHFFqTS4Go8CoLgswn29EiuUQICB9A=',
format=models.CertificateFormat.pfx,
password='nodesdk')
certificate = 'SHA1-cff2ab63c8c955aaf71989efa641b906558d9fb7'
response = self.mgmt_batch_client.certificate.create(resource_group.name, batch_account.name, certificate, parameters)
self.assertIsInstance(response.result(), models.Certificate)
# Test List Certificates
certs = self.mgmt_batch_client.certificate.list_by_batch_account(resource_group.name, batch_account.name)
self.assertEqual(len(list(certs)), 1)
# Test Get Certificate
cert = self.mgmt_batch_client.certificate.get(resource_group.name, batch_account.name, certificate)
self.assertIsInstance(cert, models.Certificate)
self.assertEqual(cert.thumbprint.lower(), 'cff2ab63c8c955aaf71989efa641b906558d9fb7')
self.assertEqual(cert.thumbprint_algorithm, 'SHA1')
self.assertIsNone(cert.delete_certificate_error)
# Test Update Certiciate
parameters = models.CertificateCreateOrUpdateParameters(
password='nodesdk',
data='MIIGMQIBAzCCBe0GCSqGSIb3DQEHAaCCBd4EggXaMIIF1jCCA8AGCSqGSIb3DQEHAaCCA7EEggOtMIIDqTCCA6UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhyd3xCtln3iQICB9AEggKQhe5P10V9iV1BsDlwWT561Yu2hVq3JT8ae/ebx1ZR/gMApVereDKkS9Zg4vFyssusHebbK5pDpU8vfAqle0TM4m7wGsRj453ZorSPUfMpHvQnAOn+2pEpWdMThU7xvZ6DVpwhDOQk9166z+KnKdHGuJKh4haMT7Rw/6xZ1rsBt2423cwTrQVMQyACrEkianpuujubKltN99qRoFAxhQcnYE2KlYKw7lRcExq6mDSYAyk5xJZ1ZFdLj6MAryZroQit/0g5eyhoNEKwWbi8px5j71pRTf7yjN+deMGQKwbGl+3OgaL1UZ5fCjypbVL60kpIBxLZwIJ7p3jJ+q9pbq9zSdzshPYor5lxyUfXqaso/0/91ayNoBzg4hQGh618PhFI6RMGjwkzhB9xk74iweJ9HQyIHf8yx2RCSI22JuCMitPMWSGvOszhbNx3AEDLuiiAOHg391mprEtKZguOIr9LrJwem/YmcHbwyz5YAbZmiseKPkllfC7dafFfCFEkj6R2oegIsZo0pEKYisAXBqT0g+6/jGwuhlZcBo0f7UIZm88iA3MrJCjlXEgV5OcQdoWj+hq0lKEdnhtCKr03AIfukN6+4vjjarZeW1bs0swq0l3XFf5RHa11otshMS4mpewshB9iO9MuKWpRxuxeng4PlKZ/zuBqmPeUrjJ9454oK35Pq+dghfemt7AUpBH/KycDNIZgfdEWUZrRKBGnc519C+RTqxyt5hWL18nJk4LvSd3QKlJ1iyJxClhhb/NWEzPqNdyA5cxen+2T9bd/EqJ2KzRv5/BPVwTQkHH9W/TZElFyvFfOFIW2+03RKbVGw72Mr/0xKZ+awAnEfoU+SL/2Gj2m6PHkqFX2sOCi/tN9EA4xgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoANABjAGUANgAwADQAZABhAC0AMAA2ADgAMQAtADQANAAxADUALQBhADIAYwBhAC0ANQA3ADcAMwAwADgAZQA2AGQAOQBhAGMwggIOBgkqhkiG9w0BBwGgggH/BIIB+zCCAfcwggHzBgsqhkiG9w0BDAoBA6CCAcswggHHBgoqhkiG9w0BCRYBoIIBtwSCAbMwggGvMIIBXaADAgECAhAdka3aTQsIsUphgIXGUmeRMAkGBSsOAwIdBQAwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3kwHhcNMTYwMTAxMDcwMDAwWhcNMTgwMTAxMDcwMDAwWjASMRAwDgYDVQQDEwdub2Rlc2RrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5fhcxbJHxxBEIDzVOMc56s04U6k4GPY7yMR1m+rBGVRiAyV4RjY6U936dqXHCVD36ps2Q0Z+OeEgyCInkIyVeB1EwXcToOcyeS2YcUb0vRWZDouC3tuFdHwiK1Ed5iW/LksmXDotyV7kpqzaPhOFiMtBuMEwNJcPge9k17hRgRQIDAQABo0swSTBHBgNVHQEEQDA+gBAS5AktBh0dTwCNYSHcFmRjoRgwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3mCEAY3bACqAGSKEc+41KpcNfQwCQYFKw4DAh0FAANBAHl2M97QbpzdnwO5HoRBsiEExOcLTNg+GKCr7HUsbzfvrUivw+JLL7qjHAIc5phnK+F5bQ8HKe0L9YXBSKl+fvwxFTATBgkqhkiG9w0BCRUxBgQEAQAAADA7MB8wBwYFKw4DAhoEFGVtyGMqiBd32fGpzlGZQoRM6UQwBBTI0YHFFqTS4Go8CoLgswn29EiuUQICB9A=',)
response = self.mgmt_batch_client.certificate.update(resource_group.name, batch_account.name, certificate, parameters)
self.assertIsInstance(response, models.Certificate)
# Test Cancel Certificate Delete
#with self.assertRaises(models.DeleteCertificateError):
self.mgmt_batch_client.certificate.cancel_deletion(
resource_group.name, batch_account.name, certificate)
# Test Delete Certificate
response = self.mgmt_batch_client.certificate.delete(resource_group.name, batch_account.name, certificate)
self.assertIsNone(response.result())
@ResourceGroupPreparer(location=AZURE_LOCATION)
@SimpleBatchPreparer(location=AZURE_LOCATION)
def test_mgmt_batch_pools(self, resource_group, location, batch_account):
# Test create PAAS pool
paas_pool = "test_paas_pool"
parameters = models.Pool(
display_name="test_pool",
vm_size='small',
deployment_configuration=models.DeploymentConfiguration(
cloud_service_configuration=models.CloudServiceConfiguration(os_family='5')
),
start_task=models.StartTask(
command_line="cmd.exe /c \"echo hello world\"",
resource_files=[models.ResourceFile(http_url='https://blobsource.com', file_path='filename.txt')],
environment_settings=[models.EnvironmentSetting('ENV_VAR', 'env_value')],
user_identity=models.UserIdentity(
auto_user=models.AutoUserSpecification(
elevation_level=models.ElevationLevel.admin
)
)
),
user_accounts=[models.UserAccount('UserName', 'p@55wOrd')],
scale_settings=models.ScaleSettings(
fixed_scale=models.FixedScaleSettings(
target_dedicated_nodes=0,
target_low_priority_nodes=0
)
)
)
response = self.mgmt_batch_client.pool.create(
resource_group.name, batch_account.name, paas_pool, parameters)
self.assertIsInstance(response.result(), models.Pool)
# Test create IAAS pool
iaas_pool = "test_iaas_pool"
parameters = models.Pool(
display_name="test_pool",
vm_size='Standard_A1',
deployment_configuration=models.DeploymentConfiguration(
virtual_machine_configuration=models.VirtualMachineConfiguration(
image_reference=models.ImageReference(
publisher='MicrosoftWindowsServer',
offer='WindowsServer',
sku='2016-Datacenter-smalldisk'
),
node_agent_sku_id='batch.node.windows amd64',
windows_configuration=models.WindowsConfiguration(True)
)
),
scale_settings=models.ScaleSettings(
fixed_scale=models.FixedScaleSettings(
target_dedicated_nodes=0,
target_low_priority_nodes=0
)
)
)
response = self.mgmt_batch_client.pool.create(
resource_group.name, batch_account.name, iaas_pool, parameters)
self.assertIsInstance(response.result(), models.Pool)
# Test list pools
pools = self.mgmt_batch_client.pool.list_by_batch_account(resource_group.name, batch_account.name)
self.assertEqual(len(list(pools)), 2)
# Test Update pool
parameters = models.Pool(
scale_settings=models.ScaleSettings(
auto_scale=models.AutoScaleSettings(
formula='$TargetDedicatedNodes=1'
)
)
)
response = self.mgmt_batch_client.pool.update(
resource_group.name, batch_account.name, iaas_pool, parameters)
self.assertIsInstance(response, models.Pool)
# Test Get pool
pool = self.mgmt_batch_client.pool.get(
resource_group.name, batch_account.name, iaas_pool)
self.assertIsInstance(pool, models.Pool)
self.assertEqual(pool.vm_size, 'STANDARD_A1'),
self.assertIsNone(pool.display_name),
self.assertEqual(pool.allocation_state, models.AllocationState.resizing)
self.assertEqual(
pool.deployment_configuration.virtual_machine_configuration.node_agent_sku_id,
'batch.node.windows amd64')
# Test stop resizing
with self.assertRaises(CloudError):
self.mgmt_batch_client.pool.stop_resize(resource_group.name, batch_account.name, iaas_pool)
if self.is_live:
time.sleep(300)
# Test disable auto-scale
response = self.mgmt_batch_client.pool.disable_auto_scale(
resource_group.name, batch_account.name, iaas_pool)
self.assertIsInstance(response, models.Pool)
# Test delete pool
response = self.mgmt_batch_client.pool.delete(
resource_group.name, batch_account.name, iaas_pool)
self.assertIsNone(response.result()) | [
[
[
332,
334
],
[
9415,
9417
]
],
[
[
342,
349
]
],
[
[
357,
361
],
[
22019,
22023
]
],
[
[
369,
377
]
],
[
[
386,
394
],
[
9523,
9531
]
],
[
[
403,
419
],
[
974,
979
],
[
1087,
1092
]
],
[
[
449,
455
],
[
1910,
1916
],
[
2257,
2263
],
[
2393,
2399
],
[
2676,
2682
],
[
2812,
2818
],
[
3075,
3081
],
[
3434,
3440
],
[
3543,
3549
],
[
4149,
4155
],
[
4258,
4264
],
[
4723,
4729
],
[
5769,
5775
],
[
6090,
6096
],
[
6412,
6418
],
[
7272,
7278
],
[
7365,
7371
],
[
8083,
8089
],
[
8359,
8365
],
[
8750,
8756
],
[
9375,
9381
],
[
9937,
9943
],
[
10017,
10023
],
[
10720,
10726
],
[
10901,
10907
],
[
11768,
11774
],
[
14080,
14086
],
[
14389,
14395
],
[
14779,
14785
],
[
15065,
15071
],
[
17450,
17456
],
[
18168,
18174
],
[
18285,
18291
],
[
18361,
18367
],
[
18447,
18453
],
[
18561,
18567
],
[
18682,
18688
],
[
18764,
18770
],
[
18815,
18821
],
[
18885,
18891
],
[
18995,
19001
],
[
19067,
19073
],
[
19117,
19123
],
[
19460,
19466
],
[
19564,
19570
],
[
19687,
19693
],
[
19765,
19771
],
[
19837,
19843
],
[
20154,
20160
],
[
20248,
20254
],
[
20298,
20304
],
[
20642,
20648
],
[
20884,
20890
],
[
20924,
20930
],
[
20973,
20979
],
[
21266,
21272
],
[
21452,
21458
],
[
21614,
21620
],
[
22240,
22246
]
],
[
[
492,
502
],
[
21865,
21875
]
],
[
[
536,
552
],
[
3222,
3238
]
],
[
[
554,
573
],
[
11586,
11605
],
[
17955,
17974
]
],
[
[
612,
629
],
[
819,
836
]
],
[
[
635,
656
],
[
3170,
3191
],
[
4587,
4608
],
[
6731,
6752
],
[
11534,
11555
],
[
17903,
17924
]
],
[
[
662,
684
],
[
6783,
6805
]
],
[
[
689,
703
],
[
3201,
3215
],
[
3248,
3262
],
[
4618,
4632
],
[
6762,
6776
],
[
6836,
6850
],
[
11565,
11579
],
[
11615,
11629
],
[
17934,
17948
],
[
17984,
17998
],
[
1856,
1870
],
[
2160,
2174
],
[
2989,
3003
]
],
[
[
722,
744
],
[
2564,
2586
],
[
2600,
2622
]
],
[
[
805,
818
],
[
875,
888
]
]
] |
"""Test timestamp."""
# --- import -------------------------------------------------------------------------------------
import WrightTools as wt
# --- test ---------------------------------------------------------------------------------------
def test_now():
wt.kit.TimeStamp() # exception will be raised upon failure
def test_utc():
wt.kit.timestamp_from_RFC3339("2017-11-13 16:09:17Z") # exception will be raised upon failure
def test_date():
ts = wt.kit.timestamp_from_RFC3339("2017-11-13 16:09:17-6")
assert len(ts.date) == 10
def test_hms():
ts = wt.kit.timestamp_from_RFC3339("2017-11-13 16:33:44-6")
assert len(ts.hms) == 8
def test_human():
ts = wt.kit.TimeStamp()
assert len(ts.human) == 19
def test_RFC3339():
ts = wt.kit.TimeStamp()
assert ts.RFC3339
assert wt.kit.timestamp_from_RFC3339(ts.RFC3339) == ts
def test_RFC5322():
ts = wt.kit.TimeStamp()
assert ts.RFC5322
def test_path():
ts = wt.kit.TimeStamp()
assert ts.path
| [
[
[
132,
149
],
[
273,
275
],
[
355,
357
],
[
478,
480
],
[
590,
592
],
[
702,
704
],
[
783,
785
],
[
835,
837
],
[
914,
916
],
[
983,
985
]
],
[
[
257,
265
]
],
[
[
339,
347
]
],
[
[
456,
465
]
],
[
[
569,
577
]
],
[
[
679,
689
]
],
[
[
758,
770
]
],
[
[
889,
901
]
],
[
[
961,
970
]
]
] |
#!/usr/bin/env python3
"""
USAGE:
yb_sysprocs_column_stats.py [options]
PURPOSE:
Table column metdata including estimates from statistics.
OPTIONS:
See the command line help message for all options.
(yb_sysprocs_column_stats.py --help)
Output:
The report as a formatted table, pipe seperated value rows, or inserted into a database table.
"""
from yb_sp_report_util import SPReportUtil
class report_column_stats(SPReportUtil):
"""Issue the ybsql commands used to create the column distribution report."""
config = {
'description': 'Table column metdata including estimates from statistics.'
, 'report_sp_location': 'sysviews'
, 'report_default_order': 'table_schema|table_name'
, 'required_args_single': ['database']
, 'optional_args_multi': ['schema', 'table']
, 'db_filter_args': {'database':'db_name', 'schema':'table_schema', 'table':'table_name'}
, 'usage_example_extra': {'cmd_line_args': "--database acme --schema_in dev --table_like 'cust%'" } }
def execute(self):
return self.build({
'_db_name': self.args_handler.args.database
, '_yb_util_filter' : self.db_filter_sql() })
def main():
print(report_column_stats().execute())
exit(0)
if __name__ == "__main__":
main() | [
[
[
406,
418
],
[
446,
458
]
],
[
[
426,
445
],
[
1241,
1260
]
],
[
[
1223,
1227
],
[
1318,
1322
]
]
] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['VirtualNetwork']
class VirtualNetwork(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
address_space: Optional[pulumi.Input[pulumi.InputType['AddressSpaceArgs']]] = None,
dhcp_options: Optional[pulumi.Input[pulumi.InputType['DhcpOptionsArgs']]] = None,
enable_ddos_protection: Optional[pulumi.Input[bool]] = None,
enable_vm_protection: Optional[pulumi.Input[bool]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_network_name: Optional[pulumi.Input[str]] = None,
virtual_network_peerings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkPeeringArgs']]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Virtual Network resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['AddressSpaceArgs']] address_space: The AddressSpace that contains an array of IP address ranges that can be used by subnets.
:param pulumi.Input[pulumi.InputType['DhcpOptionsArgs']] dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs deployed in the virtual network.
:param pulumi.Input[bool] enable_ddos_protection: Indicates if DDoS protection is enabled for all the protected resources in a Virtual Network.
:param pulumi.Input[bool] enable_vm_protection: Indicates if Vm protection is enabled for all the subnets in a Virtual Network.
:param pulumi.Input[str] etag: Gets a unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_guid: The resourceGuid property of the Virtual Network resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]] subnets: A list of subnets in a Virtual Network.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] virtual_network_name: The name of the virtual network.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkPeeringArgs']]]] virtual_network_peerings: A list of peerings in a Virtual Network.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['address_space'] = address_space
__props__['dhcp_options'] = dhcp_options
__props__['enable_ddos_protection'] = enable_ddos_protection
__props__['enable_vm_protection'] = enable_vm_protection
__props__['etag'] = etag
__props__['id'] = id
__props__['location'] = location
__props__['provisioning_state'] = provisioning_state
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['resource_guid'] = resource_guid
__props__['subnets'] = subnets
__props__['tags'] = tags
if virtual_network_name is None:
raise TypeError("Missing required property 'virtual_network_name'")
__props__['virtual_network_name'] = virtual_network_name
__props__['virtual_network_peerings'] = virtual_network_peerings
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20150615:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160330:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20161201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170301:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20171001:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180701:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181001:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190701:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20191101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20191201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200301:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200501:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200701:VirtualNetwork")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualNetwork, __self__).__init__(
'azure-nextgen:network/v20171101:VirtualNetwork',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualNetwork':
"""
Get an existing VirtualNetwork resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return VirtualNetwork(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="addressSpace")
def address_space(self) -> pulumi.Output[Optional['outputs.AddressSpaceResponse']]:
"""
The AddressSpace that contains an array of IP address ranges that can be used by subnets.
"""
return pulumi.get(self, "address_space")
@property
@pulumi.getter(name="dhcpOptions")
def dhcp_options(self) -> pulumi.Output[Optional['outputs.DhcpOptionsResponse']]:
"""
The dhcpOptions that contains an array of DNS servers available to VMs deployed in the virtual network.
"""
return pulumi.get(self, "dhcp_options")
@property
@pulumi.getter(name="enableDdosProtection")
def enable_ddos_protection(self) -> pulumi.Output[Optional[bool]]:
"""
Indicates if DDoS protection is enabled for all the protected resources in a Virtual Network.
"""
return pulumi.get(self, "enable_ddos_protection")
@property
@pulumi.getter(name="enableVmProtection")
def enable_vm_protection(self) -> pulumi.Output[Optional[bool]]:
"""
Indicates if Vm protection is enabled for all the subnets in a Virtual Network.
"""
return pulumi.get(self, "enable_vm_protection")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
Gets a unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[Optional[str]]:
"""
The resourceGuid property of the Virtual Network resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def subnets(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetResponse']]]:
"""
A list of subnets in a Virtual Network.
"""
return pulumi.get(self, "subnets")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualNetworkPeerings")
def virtual_network_peerings(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualNetworkPeeringResponse']]]:
"""
A list of peerings in a Virtual Network.
"""
return pulumi.get(self, "virtual_network_peerings")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| [
[
[
176,
184
],
[
3760,
3768
],
[
3918,
3926
]
],
[
[
192,
198
]
],
[
[
206,
220
],
[
414,
420
],
[
9143,
9149
],
[
9457,
9463
],
[
9781,
9787
],
[
10099,
10105
],
[
10397,
10403
],
[
10633,
10639
],
[
10818,
10824
],
[
10981,
10987
],
[
11301,
11307
],
[
11557,
11563
],
[
11792,
11798
],
[
11979,
11985
],
[
12142,
12148
],
[
534,
540
],
[
607,
613
],
[
620,
626
],
[
707,
713
],
[
720,
726
],
[
816,
822
],
[
892,
898
],
[
952,
958
],
[
1009,
1015
],
[
1072,
1078
],
[
1145,
1151
],
[
1219,
1225
],
[
1287,
1293
],
[
1349,
1355
],
[
1371,
1377
],
[
1384,
1390
],
[
1459,
1465
],
[
1485,
1491
],
[
1562,
1568
],
[
1641,
1647
],
[
1663,
1669
],
[
1676,
1682
],
[
4086,
4092
],
[
4143,
4149
],
[
5705,
5711
],
[
5737,
5743
],
[
5804,
5810
],
[
5881,
5887
],
[
5951,
5957
],
[
6021,
6027
],
[
6091,
6097
],
[
6161,
6167
],
[
6231,
6237
],
[
6301,
6307
],
[
6371,
6377
],
[
6441,
6447
],
[
6511,
6517
],
[
6581,
6587
],
[
6651,
6657
],
[
6721,
6727
],
[
6791,
6797
],
[
6861,
6867
],
[
6931,
6937
],
[
7001,
7007
],
[
7071,
7077
],
[
7141,
7147
],
[
7211,
7217
],
[
7281,
7287
],
[
7351,
7357
],
[
7421,
7427
],
[
7491,
7497
],
[
7561,
7567
],
[
7631,
7637
],
[
7701,
7707
],
[
7771,
7777
],
[
7841,
7847
],
[
7911,
7917
],
[
7981,
7987
],
[
8051,
8057
],
[
8137,
8143
],
[
8431,
8437
],
[
8477,
8483
],
[
8951,
8957
],
[
8986,
8992
],
[
9209,
9215
],
[
9403,
9409
],
[
9521,
9527
],
[
9728,
9734
],
[
9864,
9870
],
[
10036,
10042
],
[
10178,
10184
],
[
10336,
10342
],
[
10433,
10439
],
[
10588,
10594
],
[
10673,
10679
],
[
10769,
10775
],
[
10854,
10860
],
[
10936,
10942
],
[
11057,
11063
],
[
11242,
11248
],
[
11367,
11373
],
[
11503,
11509
],
[
11596,
11602
],
[
11744,
11750
],
[
11828,
11834
],
[
11934,
11940
],
[
12015,
12021
],
[
12097,
12103
],
[
12229,
12235
],
[
12393,
12399
]
],
[
[
240,
243
]
],
[
[
245,
252
],
[
1472,
1479
],
[
11851,
11858
]
],
[
[
254,
262
],
[
525,
533
],
[
598,
606
],
[
698,
706
],
[
807,
815
],
[
883,
891
],
[
943,
951
],
[
1000,
1008
],
[
1063,
1071
],
[
1136,
1144
],
[
1210,
1218
],
[
1278,
1286
],
[
1340,
1348
],
[
1450,
1458
],
[
1553,
1561
],
[
1632,
1640
],
[
8468,
8476
],
[
9223,
9231
],
[
9535,
9543
],
[
9878,
9886
],
[
10192,
10200
],
[
10447,
10455
],
[
10687,
10695
],
[
11071,
11079
],
[
11381,
11389
],
[
11610,
11618
],
[
11842,
11850
],
[
12243,
12251
]
],
[
[
264,
272
],
[
1362,
1370
],
[
1654,
1662
],
[
11619,
11627
],
[
12252,
12260
]
],
[
[
274,
279
]
],
[
[
296,
306
],
[
4318,
4328
]
],
[
[
308,
315
],
[
12501,
12508
],
[
12615,
12622
]
],
[
[
330,
337
]
],
[
[
359,
360
]
],
[
[
362,
369
]
],
[
[
399,
413
],
[
8198,
8212
],
[
9061,
9075
]
]
] |
import os
import re
import pipelinewise.cli as cli
import pytest
VIRTUALENVS_DIR = './virtualenvs-dummy'
# pylint: disable=no-self-use,fixme
class TestUtils:
"""
Unit Tests for PipelineWise CLI utility functions
"""
def test_json_detectors(self):
"""Testing JSON detector functions"""
assert cli.utils.is_json('{Invalid JSON}') is False
assert cli.utils.is_json('[]') is True
assert cli.utils.is_json('{}') is True
assert cli.utils.is_json('{"prop": 123}') is True
assert cli.utils.is_json('{"prop-str":"dummy-string","prop-int":123,"prop-bool":true}') is True
assert cli.utils.is_json_file('./dummy-json') is False
assert cli.utils.is_json_file('{}/resources/example.json'.format(os.path.dirname(__file__))) is True
assert cli.utils.is_json_file('{}/resources/invalid.json'.format(os.path.dirname(__file__))) is False
assert cli.utils.is_json_file('{}/resources'.format(os.path.dirname(__file__))) is False
def test_json_loader(self):
"""Testing JSON loader functions"""
# Loading JSON file that not exist should return None
assert cli.utils.load_json('/invalid/location/to/json') is None
# Loading JSON file with invalid JSON syntax should raise exception
with pytest.raises(Exception):
cli.utils.load_json('{}/resources/invalid.json'.format(os.path.dirname(__file__)))
# Loading JSON should return python dict
assert \
cli.utils.load_json('{}/resources/example.json'.format(os.path.dirname(__file__))) == \
{
'glossary': {
'title': 'example glossary',
'GlossDiv': {
'title': 'S',
'GlossList': {
'GlossEntry': {
'ID': 'SGML',
'SortAs': 'SGML',
'GlossTerm': 'Standard Generalized Markup Language',
'Acronym': 'SGML',
'Abbrev': 'ISO 8879:1986',
'GlossDef': {
'para': 'A meta-markup language, used to create markup languages such as DocBook.',
'GlossSeeAlso': ['GML', 'XML']
},
'GlossSee': 'markup'
}
}
}
}
}
def test_json_saver(self):
"""Testing JSON save functions"""
obj = {'foo': 'bar'}
# Saving to invalid path should raise exception
with pytest.raises(Exception):
cli.utils.save_json(obj, '/invalid/path')
# Saving and reloading should match
cli.utils.save_json(obj, 'test-json.json')
assert cli.utils.load_json('test-json.json') == obj
# Delete output file, it's not required
os.remove('test-json.json')
def test_yaml_detectors(self):
"""Testing YAML detector functions"""
assert cli.utils.is_yaml("""
foo:
-bar""") is False
assert cli.utils.is_yaml('id: 123') is True
assert cli.utils.is_yaml("""
id: 123
details:
- prop1: 123
- prop2: 456
""") is True
assert cli.utils.is_yaml_file('./dummy-yaml') is False
assert cli.utils.is_yaml_file('{}/resources/example.yml'.format(os.path.dirname(__file__))) is True
assert cli.utils.is_yaml_file('{}/resources/invalid.yml'.format(os.path.dirname(__file__))) is False
assert cli.utils.is_yaml_file('{}/resources'.format(os.path.dirname(__file__))) is False
def test_yaml_loader(self):
"""Testing YAML loader functions"""
# Loading YAML file that not exist should return None
assert cli.utils.load_yaml('/invalid/location/to/yaml') is None
# Loading YAML file with invalid YAML syntax should raise exception
with pytest.raises(Exception):
cli.utils.load_yaml('{}/resources/invalid.yml'.format(os.path.dirname(__file__)))
# Loading YAML file with valid YAML syntax but invalid vault secret file should raise exception
with pytest.raises(Exception):
cli.utils.load_yaml('{}/resources/example.yml'.format(os.path.dirname(__file__)),
'invalid-secret-file-path')
# Loading valid YAML file with no vault encryption
assert \
cli.utils.load_yaml('{}/resources/example.yml'.format(os.path.dirname(__file__))) == \
['Apple', 'Orange', 'Strawberry', 'Mango']
# Loading valid YAML file with vault encrypted properties
assert \
cli.utils.load_yaml(
'{}/resources/example-with-vault.yml'.format(os.path.dirname(__file__)),
'{}/resources/vault-secret.txt'.format(os.path.dirname(__file__))) == \
['Apple', 'Orange', 'Strawberry', 'Mango', 'Vault Encrypted Secret Fruit']
def test_sample_file_path(self):
"""Sample files must be global config, tap, target YAML or README file"""
for sample in cli.utils.get_sample_file_paths():
assert os.path.isfile(sample) is True
assert \
re.match('.*config.yml$', sample) or \
re.match('.*(tap|target)_.*.yml.sample$', sample) or \
re.match('.*README.md$', sample)
def test_extract_log_attributes(self):
"""Log files must match to certain pattern with embedded attributes in the file name"""
assert \
cli.utils.extract_log_attributes('snowflake-fx-20190508_000038.singer.log.success') == \
{
'filename': 'snowflake-fx-20190508_000038.singer.log.success',
'target_id': 'snowflake',
'tap_id': 'fx',
'timestamp': '2019-05-08T00:00:38',
'sync_engine': 'singer',
'status': 'success'
}
assert \
cli.utils.extract_log_attributes('snowflake-fx-20190508_231238.fastsync.log.running') == \
{
'filename': 'snowflake-fx-20190508_231238.fastsync.log.running',
'target_id': 'snowflake',
'tap_id': 'fx',
'timestamp': '2019-05-08T23:12:38',
'sync_engine': 'fastsync',
'status': 'running'
}
assert \
cli.utils.extract_log_attributes('dummy-log-file.log') == \
{
'filename': 'dummy-log-file.log',
'target_id': 'unknown',
'tap_id': 'unknown',
'timestamp': '1970-01-01T00:00:00',
'sync_engine': 'unknown',
'status': 'unknown'
}
def test_fastsync_bin(self):
"""Fastsync binary paths must point to pipelinewise virtual environments"""
# Giving tap and target types should be enough to generate full path to fastsync binaries
assert \
cli.utils.get_fastsync_bin(VIRTUALENVS_DIR, 'mysql', 'snowflake') == \
'{}/pipelinewise/bin/mysql-to-snowflake'.format(VIRTUALENVS_DIR)
def test_vault(self):
"""Test vault encrypt and decrypt functionalities"""
# Encrypting with not existing file with secret should exit
with pytest.raises(SystemExit) as pytest_wrapped_e:
cli.utils.vault_encrypt('plain_test', 'not-existing-secret-file')
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
# Encrypted string should start with $ANSIBLE_VAULT; identifier
encrypted_str = str(
cli.utils.vault_encrypt('plain_text', '{}/resources/vault-secret.txt'.format(os.path.dirname(__file__))))
assert encrypted_str.startswith("b'$ANSIBLE_VAULT;") is True
# Formatted encrypted string should start with special token and should keep the original vault encrypted value
formatted_encrypted_str = cli.utils.vault_format_ciphertext_yaml(encrypted_str)
assert formatted_encrypted_str.startswith('!vault |') and "b'$ANSIBLE_VAULT;" in formatted_encrypted_str
# Optional name argument should add the name to the output string as a key
formatted_encrypted_str = cli.utils.vault_format_ciphertext_yaml(encrypted_str, name='encrypted_plain_text')
assert formatted_encrypted_str.startswith(
'encrypted_plain_text: !vault |') and "b'$ANSIBLE_VAULT;" in formatted_encrypted_str
def test_schema_loader(self):
"""Test JSON Schema loader functions"""
# Loading JSON schema file that not exist should exit
with pytest.raises(SystemExit) as pytest_wrapped_e:
assert cli.utils.load_schema('/invalid/location/to/schema') is None
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
# Loading existing JSON schema should be loaded correctly
tap_schema = cli.utils.load_json('{}/../../../pipelinewise/cli/schemas/tap.json'.format(
os.path.dirname(__file__)))
assert cli.utils.load_schema('tap') == tap_schema
def test_json_validate(self):
"""Test JSON schema validator functions"""
schema = cli.utils.load_schema('tap')
# Valid instance should return None
valid_tap = cli.utils.load_yaml('{}/resources/tap-valid-mysql.yml'.format(os.path.dirname(__file__)))
assert cli.utils.validate(valid_tap, schema) is None
# Invalid instance should exit
invalid_tap = cli.utils.load_yaml('{}/resources/tap-invalid.yml'.format(os.path.dirname(__file__)))
with pytest.raises(SystemExit) as pytest_wrapped_e:
cli.utils.validate(invalid_tap, schema)
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
def test_delete_keys(self):
"""Test dictionary functions"""
# Delete single key with empty value
assert cli.utils.delete_empty_keys({'foo': 'bar', 'foo2': None}) == {'foo': 'bar'}
# Delete multiple keys with empty value
assert cli.utils.delete_empty_keys({
'foo': 'bar',
'foo2': None,
'foo3': None,
'foo4': 'bar4'
}) == {
'foo': 'bar',
'foo4': 'bar4'
}
# Delete single key by name
assert cli.utils.delete_keys_from_dict({'foo': 'bar', 'foo2': 'bar2'}, ['foo2']) == {'foo': 'bar'}
# Delete single key by name
assert cli.utils.delete_keys_from_dict({
'foo': 'bar',
'foo2': 'bar2',
'foo3': None,
'foo4': 'bar4'
}, ['foo2', 'foo4']) == {
'foo': 'bar',
'foo3': None
}
# Delete multiple keys from list of nested dictionaries
assert cli.utils.delete_keys_from_dict(
[{'foo': 'bar', 'foo2': 'bar2'},
{'foo3': {'nested_foo': 'nested_bar', 'nested_foo2': 'nested_bar2'}}], ['foo2', 'nested_foo']) == \
[{'foo': 'bar'},
{'foo3': {'nested_foo2': 'nested_bar2'}}]
def test_silentremove(self):
"""Test removing functions"""
# Deleting non existing file should not raise exception
assert cli.utils.silentremove('this-file-not-exists.json') is None
def test_tap_properties(self):
"""Test tap property getter functions"""
tap_mysql = cli.utils.load_yaml('{}/resources/tap-valid-mysql.yml'.format(os.path.dirname(__file__)))
# Every tap should have catalog argument --properties or --catalog
tap_catalog_argument = cli.utils.get_tap_property(tap_mysql, 'tap_catalog_argument')
assert tap_catalog_argument in ['--catalog', '--properties']
# Every tap should have extra_config_keys defined in dict
assert isinstance(cli.utils.get_tap_extra_config_keys(tap_mysql), dict) is True
# MySQL stream_id should be formatted as {{schema_name}}-{{table_name}}
assert cli.utils.get_tap_stream_id(tap_mysql, 'dummy_db', 'dummy_schema', 'dummy_table') == \
'dummy_schema-dummy_table'
# MySQL stream_name should be formatted as {{schema_name}}-{{table_name}}
assert cli.utils.get_tap_stream_name(tap_mysql, 'dummy_db', 'dummy_schema',
'dummy_table') == 'dummy_schema-dummy_table'
# MySQL stream_name should be formatted as {{schema_name}}-{{table_name}}
assert cli.utils.get_tap_default_replication_method(tap_mysql) == 'LOG_BASED'
# Get property value by tap type
assert cli.utils.get_tap_property_by_tap_type('tap-mysql', 'default_replication_method') == 'LOG_BASED'
# Kafka encoding and parameterised local_store_dir should be added as default extra config keys
tap_kafka = cli.utils.load_yaml('{}/resources/tap-valid-kafka.yml'.format(os.path.dirname(__file__)))
assert cli.utils.get_tap_extra_config_keys(tap_kafka, temp_dir='/my/temp/dir') == {
'local_store_dir': '/my/temp/dir',
'encoding': 'utf-8'
}
# Snwoflake tables list should be added to tap_config_extras
tap_snowflake = cli.utils.load_yaml('{}/resources/tap-valid-snowflake.yml'.format(os.path.dirname(__file__)))
assert cli.utils.get_tap_extra_config_keys(tap_snowflake) == {
'tables': 'SCHEMA_1.TABLE_ONE,SCHEMA_1.TABLE_TWO'
}
def test_get_tap_target_names(self):
"""Test get tap and target yamls"""
expected_tap_names = {'tap_test.yml', 'tap_2test.yml', 'tap_valid.yaml'}
expected_target_names = {'target_test.yml'}
tap_names, target_names = cli.utils.get_tap_target_names(f'{os.path.dirname(__file__)}'
f'/resources/test_tap_target_names')
assert tap_names == expected_tap_names
assert target_names == expected_target_names
def test_create_temp_file(self):
"""Test temp files created at the right location"""
# By default temp files should be created in system temp directory
temp_file = cli.utils.create_temp_file()[1]
assert os.path.isfile(temp_file)
os.remove(temp_file)
# Providing extra dir argument should create the target directory even if it's not exist
temp_file = cli.utils.create_temp_file(dir='./temp_dir_to_create_automatically/deep_temp_dir')[1]
assert os.path.isfile(temp_file)
os.remove(temp_file)
# Providing dir, suffix and prefix arguments should create the target_directory with custom prefix and suffix
temp_file = cli.utils.create_temp_file(dir='./temp_dir_to_create_automatically/deep_temp_dir',
suffix='.json',
prefix='pipelinewise_test_temp_file_')[1]
assert os.path.isfile(temp_file)
os.remove(temp_file)
def test_find_errors_in_log_file(self):
"""Test reading the last n lines of a file"""
# Should return an empty list if no error in the file
log_file = '{}/resources/sample_log_files/tap-run-no-errors.log'.format(os.path.dirname(__file__))
assert cli.utils.find_errors_in_log_file(log_file) == []
# Should return the line with errors
log_file = '{}/resources/sample_log_files/tap-run-errors.log'.format(os.path.dirname(__file__))
assert cli.utils.find_errors_in_log_file(log_file) == \
['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=EXCEPTION This is an exception\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=ERROR This is an error\n',
'pymysql.err.OperationalError: (2013, '
"'Lost connection to MySQL server during query ([Errno 104] Connection reset by peer)')\n",
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=ERROR '
'message=error with status PGRES_COPY_BOTH and no message from the libpq\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL '
'message=error with status PGRES_COPY_BOTH and no message from the libpq\n',
'snowflake.connector.errors.ProgrammingError: 091003 (22000): '
'Failure using stage area. Cause: [Access Denied (Status Code: 403; Error Code: AccessDenied)]\n']
# Should return the default max number of errors
log_file = '{}/resources/sample_log_files/tap-run-lot-of-errors.log'.format(os.path.dirname(__file__))
assert cli.utils.find_errors_in_log_file(log_file) == \
['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 1\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 2\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 3\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 4\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 5\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 6\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 7\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 8\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 9\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 10\n']
# Should return the custom max number of errors
log_file = '{}/resources/sample_log_files/tap-run-lot-of-errors.log'.format(os.path.dirname(__file__))
assert cli.utils.find_errors_in_log_file(log_file, max_errors=2) == \
['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 1\n',
'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 2\n']
# Should return the custom max number of errors
log_file = '{}/resources/sample_log_files/tap-run-errors.log'.format(os.path.dirname(__file__))
assert cli.utils.find_errors_in_log_file(log_file, error_pattern=re.compile('CUSTOM-ERR-PATTERN')) == \
['CUSTOM-ERR-PATTERN: This is a custom pattern error message\n']
| [
[
[
7,
9
],
[
768,
770
],
[
877,
879
],
[
974,
976
],
[
1405,
1407
],
[
1567,
1569
],
[
3035,
3037
],
[
3579,
3581
],
[
3687,
3689
],
[
3784,
3786
],
[
4214,
4216
],
[
4452,
4454
],
[
4683,
4685
],
[
4949,
4951
],
[
5032,
5034
],
[
5348,
5350
],
[
7930,
7932
],
[
9259,
9261
],
[
9604,
9606
],
[
9813,
9815
],
[
11706,
11708
],
[
13119,
13121
],
[
13488,
13490
],
[
13946,
13948
],
[
14417,
14419
],
[
14451,
14453
],
[
14691,
14693
],
[
14725,
14727
],
[
15135,
15137
],
[
15169,
15171
],
[
15431,
15433
],
[
15646,
15648
],
[
16928,
16930
],
[
18301,
18303
],
[
18768,
18770
]
],
[
[
17,
19
],
[
5416,
5418
],
[
5471,
5473
],
[
5542,
5544
],
[
18868,
18870
]
],
[
[
28,
51
],
[
329,
332
],
[
390,
393
],
[
437,
440
],
[
484,
487
],
[
542,
545
],
[
647,
650
],
[
710,
713
],
[
819,
822
],
[
929,
932
],
[
1165,
1168
],
[
1350,
1353
],
[
1512,
1515
],
[
2780,
2783
],
[
2875,
2878
],
[
2933,
2936
],
[
3160,
3163
],
[
3245,
3248
],
[
3297,
3300
],
[
3459,
3462
],
[
3522,
3525
],
[
3630,
3633
],
[
3739,
3742
],
[
3975,
3978
],
[
4160,
4163
],
[
4398,
4401
],
[
4629,
4632
],
[
4867,
4870
],
[
5294,
5297
],
[
5744,
5747
],
[
6173,
6176
],
[
6608,
6611
],
[
7198,
7201
],
[
7574,
7577
],
[
7853,
7856
],
[
8183,
8186
],
[
8468,
8471
],
[
8923,
8926
],
[
9171,
9174
],
[
9302,
9305
],
[
9448,
9451
],
[
9542,
9545
],
[
9647,
9650
],
[
9755,
9758
],
[
9913,
9916
],
[
10185,
10188
],
[
10325,
10328
],
[
10591,
10594
],
[
10735,
10738
],
[
11051,
11054
],
[
11479,
11482
],
[
11644,
11647
],
[
11841,
11844
],
[
12065,
12068
],
[
12223,
12226
],
[
12450,
12453
],
[
12707,
12710
],
[
12835,
12838
],
[
13057,
13060
],
[
13162,
13165
],
[
13422,
13425
],
[
13531,
13534
],
[
13912,
13915
],
[
14370,
14373
],
[
14590,
14593
],
[
14885,
14888
],
[
15473,
15476
],
[
15688,
15691
],
[
16970,
16973
],
[
18343,
18346
],
[
18810,
18813
]
],
[
[
59,
65
],
[
1312,
1318
],
[
2742,
2748
],
[
4122,
4128
],
[
4360,
4366
],
[
7515,
7521
],
[
8857,
8863
],
[
9854,
9860
]
],
[
[
67,
82
],
[
7225,
7240
],
[
7329,
7344
]
],
[
[
151,
160
]
]
] |
# -*- coding: utf-8 -*-
'''
Manage Kinesis Streams
======================
.. versionadded:: Nitrogen
Create and destroy Kinesis streams. Be aware that this interacts with Amazon's
services, and so may incur charges.
This module uses ``boto3``, which can be installed via package, or pip.
This module accepts explicit Kinesis credentials but can also utilize
IAM roles assigned to the instance through Instance Profiles. Dynamic
credentials are then automatically obtained from AWS API and no further
configuration is necessary. More information available `here
<http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_.
If IAM roles are not used you need to specify them either in a pillar file or
in the minion's config file:
.. code-block:: yaml
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
It's also possible to specify ``key``, ``keyid`` and ``region`` via a
profile, either passed in as a dict, or as a string to pull from
pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
.. code-block:: yaml
Ensure Kinesis stream does not exist:
boto_kinesis.absent:
- name: new_stream
- keyid: GKTADJGHEIQSXMKKRBJ08H
- key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
- region: us-east-1
Ensure Kinesis stream exists:
boto_kinesis.present:
- name: new_stream
- retention_hours: 168
- enhanced_monitoring: ['ALL']
- num_shards: 2
- keyid: GKTADJGHEIQSXMKKRBJ08H
- key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
- region: us-east-1
'''
# Keep pylint from chocking on ret
# pylint: disable=undefined-variable
# Import Python libs
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Only load if boto_kinesis is available.
'''
ret = 'boto_kinesis' if 'boto_kinesis.exists' in __salt__ else False
return ret
def present(name,
retention_hours=None,
enhanced_monitoring=None,
num_shards=None,
do_reshard=True,
region=None,
key=None,
keyid=None,
profile=None):
'''
Ensure the kinesis stream is properly configured and scaled.
name (string)
Stream name
retention_hours (int)
Retain data for this many hours.
AWS allows minimum 24 hours, maximum 168 hours.
enhanced_monitoring (list of string)
Turn on enhanced monitoring for the specified shard-level metrics.
Pass in ['ALL'] or True for all metrics, [] or False for no metrics.
Turn on individual metrics by passing in a list: ['IncomingBytes', 'OutgoingBytes']
Note that if only some metrics are supplied, the remaining metrics will be turned off.
num_shards (int)
Reshard stream (if necessary) to this number of shards
!!!!! Resharding is expensive! Each split or merge can take up to 30 seconds,
and the reshard method balances the partition space evenly.
Resharding from N to N+1 can require 2N operations.
Resharding is much faster with powers of 2 (e.g. 2^N to 2^N+1) !!!!!
do_reshard (boolean)
If set to False, this script will NEVER reshard the stream,
regardless of other input. Useful for testing.
region (string)
Region to connect to.
key (string)
Secret key to be used.
keyid (string)
Access key to be used.
profile (dict)
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
comments = []
changes_old = {}
changes_new = {}
# Ensure stream exists
exists = __salt__['boto_kinesis.exists'](
name,
region,
key,
keyid,
profile
)
if exists['result'] is False:
if __opts__['test']:
ret['result'] = None
comments.append('Kinesis stream {0} would be created'.format(name))
_add_changes(ret, changes_old, changes_new, comments)
return ret
else:
is_created = __salt__['boto_kinesis.create_stream'](
name,
num_shards,
region,
key,
keyid,
profile
)
if 'error' in is_created:
ret['result'] = False
comments.append('Failed to create stream {0}: {1}'.format(name, is_created['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
comments.append('Kinesis stream {0} successfully created'.format(name))
changes_new['name'] = name
changes_new['num_shards'] = num_shards
else:
comments.append('Kinesis stream {0} already exists'.format(name))
stream_response = __salt__['boto_kinesis.get_stream_when_active'](
name,
region,
key,
keyid,
profile
)
if 'error' in stream_response:
ret['result'] = False
comments.append('Kinesis stream {0}: error getting description: {1}'
.format(name, stream_response['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
stream_details = stream_response['result']["StreamDescription"]
# Configure retention hours
if retention_hours is not None:
old_retention_hours = stream_details["RetentionPeriodHours"]
retention_matches = (old_retention_hours == retention_hours)
if not retention_matches:
if __opts__['test']:
ret['result'] = None
comments.append('Kinesis stream {0}: retention hours would be updated to {1}'
.format(name, retention_hours))
else:
if old_retention_hours > retention_hours:
retention_updated = __salt__['boto_kinesis.decrease_stream_retention_period'](
name,
retention_hours,
region,
key,
keyid,
profile
)
else:
retention_updated = __salt__['boto_kinesis.increase_stream_retention_period'](
name,
retention_hours,
region,
key,
keyid,
profile
)
if 'error' in retention_updated:
ret['result'] = False
comments.append('Kinesis stream {0}: failed to update retention hours: {1}'
.format(name, retention_updated['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
comments.append('Kinesis stream {0}: retention hours was successfully updated'.format(name))
changes_old['retention_hours'] = old_retention_hours
changes_new['retention_hours'] = retention_hours
# wait until active again, otherwise it will log a lot of ResourceInUseExceptions
# note that this isn't required below; reshard() will itself handle waiting
stream_response = __salt__['boto_kinesis.get_stream_when_active'](
name,
region,
key,
keyid,
profile
)
if 'error' in stream_response:
ret['result'] = False
comments.append('Kinesis stream {0}: error getting description: {1}'
.format(name, stream_response['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
stream_details = stream_response['result']["StreamDescription"]
else:
comments.append('Kinesis stream {0}: retention hours did not require change, already set at {1}'
.format(name, old_retention_hours))
else:
comments.append('Kinesis stream {0}: did not configure retention hours'.format(name))
# Configure enhanced monitoring
if enhanced_monitoring is not None:
if enhanced_monitoring is True or enhanced_monitoring == ['ALL']:
# for ease of comparison; describe_stream will always return the full list of metrics, never 'ALL'
enhanced_monitoring = [
"IncomingBytes",
"OutgoingRecords",
"IteratorAgeMilliseconds",
"IncomingRecords",
"ReadProvisionedThroughputExceeded",
"WriteProvisionedThroughputExceeded",
"OutgoingBytes"
]
elif enhanced_monitoring is False or enhanced_monitoring == "None":
enhanced_monitoring = []
old_enhanced_monitoring = stream_details.get("EnhancedMonitoring")[0]["ShardLevelMetrics"]
new_monitoring_set = set(enhanced_monitoring)
old_monitoring_set = set(old_enhanced_monitoring)
matching_metrics = new_monitoring_set.intersection(old_monitoring_set)
enable_metrics = list(new_monitoring_set.difference(matching_metrics))
disable_metrics = list(old_monitoring_set.difference(matching_metrics))
if len(enable_metrics) != 0:
if __opts__['test']:
ret['result'] = None
comments.append('Kinesis stream {0}: would enable enhanced monitoring for {1}'
.format(name, enable_metrics))
else:
metrics_enabled = __salt__['boto_kinesis.enable_enhanced_monitoring'](
name,
enable_metrics,
region,
key,
keyid,
profile
)
if 'error' in metrics_enabled:
ret['result'] = False
comments.append('Kinesis stream {0}: failed to enable enhanced monitoring: {1}'
.format(name, metrics_enabled['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
comments.append('Kinesis stream {0}: enhanced monitoring was enabled for shard-level metrics {1}'
.format(name, enable_metrics))
if len(disable_metrics) != 0:
if __opts__['test']:
ret['result'] = None
comments.append('Kinesis stream {0}: would disable enhanced monitoring for {1}'
.format(name, disable_metrics))
else:
metrics_disabled = __salt__['boto_kinesis.disable_enhanced_monitoring'](
name,
disable_metrics,
region,
key,
keyid,
profile
)
if 'error' in metrics_disabled:
ret['result'] = False
comments.append('Kinesis stream {0}: failed to disable enhanced monitoring: {1}'
.format(name, metrics_disabled['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
comments.append('Kinesis stream {0}: enhanced monitoring was disabled for shard-level metrics {1}'
.format(name, disable_metrics))
if len(disable_metrics) == 0 and len(enable_metrics) == 0:
comments.append('Kinesis stream {0}: enhanced monitoring did not require change, already set at {1}'
.format(name, (old_enhanced_monitoring if len(old_enhanced_monitoring) > 0 else "None")))
elif not __opts__['test']:
changes_old['enhanced_monitoring'] = (old_enhanced_monitoring if len(old_enhanced_monitoring) > 0
else "None")
changes_new['enhanced_monitoring'] = (enhanced_monitoring if len(enhanced_monitoring) > 0
else "None")
else:
comments.append('Kinesis stream {0}: did not configure enhanced monitoring'.format(name))
# Reshard stream if necessary
min_hash_key, max_hash_key, full_stream_details = __salt__['boto_kinesis.get_info_for_reshard'](
stream_details
)
old_num_shards = len(full_stream_details["OpenShards"])
if num_shards is not None and do_reshard:
num_shards_matches = (old_num_shards == num_shards)
if not num_shards_matches:
if __opts__['test']:
ret['result'] = None
comments.append('Kinesis stream {0}: would be resharded from {1} to {2} shards'
.format(name, old_num_shards, num_shards))
else:
log.info("Resharding stream from {0} to {1} shards, this could take a while"
.format(old_num_shards, num_shards))
# reshard returns True when a split/merge action is taken,
# or False when no more actions are required
continue_reshard = True
while continue_reshard:
reshard_response = __salt__['boto_kinesis.reshard'](
name,
num_shards,
do_reshard,
region,
key,
keyid,
profile)
if 'error' in reshard_response:
ret['result'] = False
comments.append('Encountered error while resharding {0}: {1}'
.format(name, reshard_response['error']))
_add_changes(ret, changes_old, changes_new, comments)
return ret
continue_reshard = reshard_response['result']
comments.append('Kinesis stream {0}: successfully resharded to {1} shards'.format(name, num_shards))
changes_old['num_shards'] = old_num_shards
changes_new['num_shards'] = num_shards
else:
comments.append('Kinesis stream {0}: did not require resharding, remains at {1} shards'
.format(name, old_num_shards))
else:
comments.append('Kinesis stream {0}: did not reshard, remains at {1} shards'.format(name, old_num_shards))
_add_changes(ret, changes_old, changes_new, comments)
return ret
def absent(name,
region=None,
key=None,
keyid=None,
profile=None):
'''
Delete the kinesis stream, if it exists.
name (string)
Stream name
region (string)
Region to connect to.
key (string)
Secret key to be used.
keyid (string)
Access key to be used.
profile (dict)
A dict with region, key and keyid, or a pillar key (string)
that contains a dict with region, key and keyid.
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
exists = __salt__['boto_kinesis.exists'](
name,
region,
key,
keyid,
profile
)
if exists['result'] is False:
ret['comment'] = 'Kinesis stream {0} does not exist'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'Kinesis stream {0} would be deleted'.format(name)
ret['result'] = None
return ret
is_deleted = __salt__['boto_kinesis.delete_stream'](
name,
region,
key,
keyid,
profile
)
if 'error' in is_deleted:
ret['comment'] = 'Failed to delete stream {0}: {1}'.format(name, is_deleted['error'])
ret['result'] = False
else:
ret['comment'] = 'Deleted stream {0}'.format(name)
ret['changes'].setdefault('old', 'Stream {0} exists'.format(name))
ret['changes'].setdefault('new', 'Stream {0} deleted'.format(name))
return ret
def _add_changes(ret, changes_old, changes_new, comments):
ret['comment'] = ',\n'.join(comments)
if changes_old:
ret['changes']['old'] = changes_old
if changes_new:
ret['changes']['new'] = changes_new
| [
[
[
1885,
1900
]
],
[
[
1908,
1915
],
[
1923,
1930
]
],
[
[
1917,
1920
],
[
13462,
13465
]
],
[
[
1957,
1968
]
],
[
[
2126,
2133
]
],
[
[
15174,
15180
]
],
[
[
16686,
16698
],
[
4289,
4301
],
[
4795,
4807
],
[
5502,
5514
],
[
7130,
7142
],
[
8160,
8172
],
[
10657,
10669
],
[
11775,
11787
],
[
14408,
14420
],
[
15099,
15111
]
]
] |
# Copyright 2019, The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the `tensor_encoding` package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import assert_compatible
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import merge_dicts
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import OrderedEnum
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import split_dict_py_tf
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import static_or_dynamic_shape
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import fast_walsh_hadamard_transform
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_floats
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_floats_cmwc
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_signs
from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_signs_cmwc
| [
[
[
685,
700
]
],
[
[
724,
732
]
],
[
[
756,
770
]
],
[
[
866,
883
]
],
[
[
978,
989
]
],
[
[
1084,
1095
]
],
[
[
1190,
1206
]
],
[
[
1301,
1324
]
],
[
[
1420,
1449
]
],
[
[
1544,
1557
]
],
[
[
1652,
1670
]
],
[
[
1765,
1777
]
],
[
[
1872,
1889
]
]
] |
import os, sys
import time
import warnings
import argparse
import configparser
import ast
import numpy as np
from math import log
from rdkit import Chem
from rdkit import rdBase
rdBase.DisableLog('rdApp.*')
from rdkit.Chem import Draw
from keras.models import load_model
sys.path.append('../src/')
from python import helper as hp
from python import fixed_parameters as FP
parser = argparse.ArgumentParser(description='SMILES generation')
parser.add_argument('-fn','--filename', type=str, help='Path to the fine-tuning txt file', required=True)
parser.add_argument('-m','--model_path', type=str, help='Path to a pretrained model', required=True)
parser.add_argument('-v','--verbose', type=bool, help='Verbose', required=True)
def int_to_smile(array, indices_token, pad_char):
"""
From an array of int, return a list of
molecules in string smile format
Note: remove the padding char
"""
all_mols = []
for seq in array:
new_mol = [indices_token[str(int(x))] for x in seq]
all_mols.append(''.join(new_mol).replace(pad_char, ''))
return all_mols
def one_hot_encode(token_lists, n_chars):
output = np.zeros((len(token_lists), len(token_lists[0]), n_chars))
for i, token_list in enumerate(token_lists):
for j, token in enumerate(token_list):
output[i, j, int(token)] = 1
return output
def sample(model, temp, start_char, end_char, max_len, indices_token, token_indices):
n_chars = len(indices_token)
seed_token = [token_indices[start_char]]
generated = indices_token[str(seed_token[0])]
while generated[-1] != end_char and len(generated) < max_len:
x_seed = one_hot_encode([seed_token], n_chars)
full_preds = model.predict(x_seed, verbose=0)[0]
logits = full_preds[-1]
probas, next_char_ind = get_token_proba(logits, temp)
next_char = indices_token[str(next_char_ind)]
generated += next_char
seed_token += [next_char_ind]
return generated
def get_token_proba(preds, temp):
preds = np.asarray(preds).astype('float64')
preds = np.log(preds) / temp
exp_preds = np.exp(preds)
probas = exp_preds / np.sum(exp_preds)
char_ind = np.argmax(np.random.multinomial(1, probas, 1))
return probas, char_ind
def softmax(preds):
return np.exp(preds)/np.sum(np.exp(preds))
if __name__ == '__main__':
start = time.time()
####################################
# get back parameters
args = vars(parser.parse_args())
verbose = args['verbose']
filename = args['filename']
model_path = args['model_path']
name_data = filename.split('/')[-1].replace('.txt','')
config = configparser.ConfigParser()
config.read('parameters.ini')
if verbose: print('\nSTART SAMPLING')
####################################
####################################
# path to save data
save_path = f'results/{name_data}/generated_data/'
os.makedirs(save_path, exist_ok=True)
# path to checkpoints
dir_ckpts = f'results/{name_data}/models/'
####################################
####################################
# Parameters to sample novo smiles
temp = float(config['EXPERIMENTS']['temp'])
n_sample = int(config['EXPERIMENTS']['n_sample'])
if n_sample>5000:
warnings.warn('You will sample more than 5000 SMILES; this will take a while')
max_len = int(config['PROCESSING']['max_len'])
pad_char = FP.PROCESSING_FIXED['pad_char']
start_char = FP.PROCESSING_FIXED['start_char']
end_char = FP.PROCESSING_FIXED['end_char']
indices_token = FP.INDICES_TOKEN
token_indices = FP.TOKEN_INDICES
####################################
####################################
# start the sampling of new SMILES
epoch = model_path.split('/')[-1].replace('.h5', '')
if verbose: print(f'Sampling from model saved at epoch {epoch}')
model = load_model(model_path)
generated_smi = []
for n in range(n_sample):
generated_smi.append(sample(model, temp,
start_char, end_char, max_len+1,
indices_token, token_indices))
hp.save_obj(generated_smi, f'{save_path}{epoch}_{temp}')
end = time.time()
if verbose: print(f'SAMPLING DONE for model from epoch {epoch} in {end-start:.2f} seconds')
####################################
| [
[
[
7,
9
],
[
3059,
3061
]
],
[
[
11,
14
],
[
273,
276
]
],
[
[
22,
26
],
[
2470,
2474
],
[
4425,
4429
]
],
[
[
34,
42
],
[
3448,
3456
]
],
[
[
50,
58
],
[
384,
392
]
],
[
[
66,
78
],
[
2770,
2782
]
],
[
[
86,
89
]
],
[
[
97,
108
],
[
1158,
1160
],
[
2114,
2116
],
[
2162,
2164
],
[
2199,
2201
],
[
2243,
2245
],
[
2276,
2278
],
[
2286,
2288
],
[
2388,
2390
],
[
2402,
2404
],
[
2409,
2411
]
],
[
[
126,
129
]
],
[
[
148,
152
]
],
[
[
171,
177
],
[
178,
184
]
],
[
[
230,
234
]
],
[
[
261,
271
],
[
4081,
4091
]
],
[
[
319,
331
],
[
4353,
4355
]
],
[
[
351,
373
],
[
3598,
3600
],
[
3647,
3649
],
[
3696,
3698
],
[
3748,
3750
],
[
3785,
3787
]
],
[
[
375,
381
],
[
441,
447
],
[
547,
553
],
[
648,
654
],
[
2570,
2576
]
],
[
[
734,
746
]
],
[
[
1102,
1116
],
[
1690,
1704
]
],
[
[
1386,
1392
],
[
4191,
4197
]
],
[
[
2067,
2082
],
[
1858,
1873
]
],
[
[
2361,
2368
]
],
[
[
2462,
2467
],
[
4512,
4517
]
],
[
[
2558,
2562
],
[
2614,
2618
],
[
2645,
2649
],
[
2679,
2683
]
],
[
[
2604,
2611
],
[
2844,
2851
],
[
4002,
4009
],
[
4444,
4451
]
],
[
[
2634,
2642
],
[
2714,
2722
]
],
[
[
2666,
2676
],
[
3950,
3960
],
[
4092,
4102
]
],
[
[
2702,
2711
],
[
3027,
3036
],
[
3155,
3164
]
],
[
[
2761,
2767
],
[
2802,
2808
],
[
3333,
3339
],
[
3383,
3389
],
[
3550,
3556
]
],
[
[
3004,
3013
],
[
3071,
3080
],
[
4383,
4392
]
],
[
[
3132,
3141
]
],
[
[
3320,
3324
],
[
4205,
4209
],
[
4402,
4406
]
],
[
[
3368,
3376
],
[
3425,
3433
],
[
4151,
4159
]
],
[
[
3536,
3543
],
[
4270,
4277
]
],
[
[
3587,
3595
]
],
[
[
3634,
3644
],
[
4248,
4258
]
],
[
[
3685,
3693
],
[
4260,
4268
]
],
[
[
3732,
3745
],
[
4318,
4331
]
],
[
[
3769,
3782
],
[
4333,
4346
]
],
[
[
3942,
3947
],
[
4055,
4060
],
[
4394,
4399
],
[
4497,
4502
]
],
[
[
4073,
4078
],
[
4198,
4203
]
],
[
[
4113,
4126
],
[
4170,
4183
],
[
4365,
4378
]
],
[
[
4140,
4141
]
],
[
[
4419,
4422
],
[
4508,
4511
]
]
] |
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class VpcSecurityGroupCreateTask(object):
def __init__(self, task=None):
"""
:param task: (Optional)
"""
self.task = task
| [
[
[
676,
702
]
]
] |
import torch, torchvision
import torch.nn as nn
import torch.nn.functional as F
from torchvision.models.resnet import Bottleneck
import numpy as np
from itertools import product
from math import sqrt
from typing import List
from collections import defaultdict
from data.config import cfg, mask_type
from layers import Detect
from layers.interpolate import InterpolateModule
from backbone import construct_backbone
import torch.backends.cudnn as cudnn
from utils import timer
from utils.functions import MovingAverage, make_net
# This is required for Pytorch 1.0.1 on Windows to initialize Cuda on some driver versions.
# See the bug report here: https://github.com/pytorch/pytorch/issues/17108
torch.cuda.current_device()
# As of March 10, 2019, Pytorch DataParallel still doesn't support JIT Script Modules
use_jit = torch.cuda.device_count() <= 1
if not use_jit:
print('Multiple GPUs detected! Turning off JIT.')
ScriptModuleWrapper = torch.jit.ScriptModule if use_jit else nn.Module
script_method_wrapper = torch.jit.script_method if use_jit else lambda fn, _rcn=None: fn
class Concat(nn.Module):
def __init__(self, nets, extra_params):
super().__init__()
self.nets = nn.ModuleList(nets)
self.extra_params = extra_params
def forward(self, x):
# Concat each along the channel dimension
return torch.cat([net(x) for net in self.nets], dim=1, **self.extra_params)
prior_cache = defaultdict(lambda: None)
class PredictionModule(nn.Module):
"""
The (c) prediction module adapted from DSSD:
https://arxiv.org/pdf/1701.06659.pdf
Note that this is slightly different to the module in the paper
because the Bottleneck block actually has a 3x3 convolution in
the middle instead of a 1x1 convolution. Though, I really can't
be arsed to implement it myself, and, who knows, this might be
better.
Args:
- in_channels: The input feature size.
- out_channels: The output feature size (must be a multiple of 4).
- aspect_ratios: A list of lists of priorbox aspect ratios (one list per scale).
- scales: A list of priorbox scales relative to this layer's convsize.
For instance: If this layer has convouts of size 30x30 for
an image of size 600x600, the 'default' (scale
of 1) for this layer would produce bounding
boxes with an area of 20x20px. If the scale is
.5 on the other hand, this layer would consider
bounding boxes with area 10x10px, etc.
- parent: If parent is a PredictionModule, this module will use all the layers
from parent instead of from this module.
"""
def __init__(self, in_channels, out_channels=1024, aspect_ratios=[[1]], scales=[1], parent=None, index=0):
super().__init__()
self.num_classes = cfg.num_classes
self.mask_dim = cfg.mask_dim # Defined by Yolact
self.num_priors = sum(len(x)*len(scales) for x in aspect_ratios)
self.parent = [parent] # Don't include this in the state dict
self.index = index
self.num_heads = cfg.num_heads # Defined by Yolact
if cfg.mask_proto_split_prototypes_by_head and cfg.mask_type == mask_type.lincomb:
self.mask_dim = self.mask_dim // self.num_heads
if cfg.mask_proto_prototypes_as_features:
in_channels += self.mask_dim
if parent is None:
if cfg.extra_head_net is None:
out_channels = in_channels
else:
self.upfeature, out_channels = make_net(in_channels, cfg.extra_head_net)
if cfg.use_prediction_module:
self.block = Bottleneck(out_channels, out_channels // 4)
self.conv = nn.Conv2d(out_channels, out_channels, kernel_size=1, bias=True)
self.bn = nn.BatchNorm2d(out_channels)
self.bbox_layer = nn.Conv2d(out_channels, self.num_priors * 4, **cfg.head_layer_params)
self.conf_layer = nn.Conv2d(out_channels, self.num_priors * self.num_classes, **cfg.head_layer_params)
self.mask_layer = nn.Conv2d(out_channels, self.num_priors * self.mask_dim, **cfg.head_layer_params)
if cfg.use_mask_scoring:
self.score_layer = nn.Conv2d(out_channels, self.num_priors, **cfg.head_layer_params)
if cfg.use_instance_coeff:
self.inst_layer = nn.Conv2d(out_channels, self.num_priors * cfg.num_instance_coeffs, **cfg.head_layer_params)
# What is this ugly lambda doing in the middle of all this clean prediction module code?
def make_extra(num_layers):
if num_layers == 0:
return lambda x: x
else:
# Looks more complicated than it is. This just creates an array of num_layers alternating conv-relu
return nn.Sequential(*sum([[
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True)
] for _ in range(num_layers)], []))
self.bbox_extra, self.conf_extra, self.mask_extra = [make_extra(x) for x in cfg.extra_layers]
if cfg.mask_type == mask_type.lincomb and cfg.mask_proto_coeff_gate:
self.gate_layer = nn.Conv2d(out_channels, self.num_priors * self.mask_dim, kernel_size=3, padding=1)
self.aspect_ratios = aspect_ratios
self.scales = scales
self.priors = None
self.last_conv_size = None
self.last_img_size = None
def forward(self, x):
"""
Args:
- x: The convOut from a layer in the backbone network
Size: [batch_size, in_channels, conv_h, conv_w])
Returns a tuple (bbox_coords, class_confs, mask_output, prior_boxes) with sizes
- bbox_coords: [batch_size, conv_h*conv_w*num_priors, 4]
- class_confs: [batch_size, conv_h*conv_w*num_priors, num_classes]
- mask_output: [batch_size, conv_h*conv_w*num_priors, mask_dim]
- prior_boxes: [conv_h*conv_w*num_priors, 4]
"""
# In case we want to use another module's layers
src = self if self.parent[0] is None else self.parent[0]
conv_h = x.size(2)
conv_w = x.size(3)
if cfg.extra_head_net is not None:
x = src.upfeature(x)
if cfg.use_prediction_module:
# The two branches of PM design (c)
a = src.block(x)
b = src.conv(x)
b = src.bn(b)
b = F.relu(b)
# TODO: Possibly switch this out for a product
x = a + b
bbox_x = src.bbox_extra(x)
conf_x = src.conf_extra(x)
mask_x = src.mask_extra(x)
bbox = src.bbox_layer(bbox_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, 4)
conf = src.conf_layer(conf_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.num_classes)
if cfg.eval_mask_branch:
mask = src.mask_layer(mask_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.mask_dim)
else:
mask = torch.zeros(x.size(0), bbox.size(1), self.mask_dim, device=bbox.device)
if cfg.use_mask_scoring:
score = src.score_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, 1)
if cfg.use_instance_coeff:
inst = src.inst_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, cfg.num_instance_coeffs)
# See box_utils.decode for an explanation of this
if cfg.use_yolo_regressors:
bbox[:, :, :2] = torch.sigmoid(bbox[:, :, :2]) - 0.5
bbox[:, :, 0] /= conv_w
bbox[:, :, 1] /= conv_h
if cfg.eval_mask_branch:
if cfg.mask_type == mask_type.direct:
mask = torch.sigmoid(mask)
elif cfg.mask_type == mask_type.lincomb:
mask = cfg.mask_proto_coeff_activation(mask)
if cfg.mask_proto_coeff_gate:
gate = src.gate_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.mask_dim)
mask = mask * torch.sigmoid(gate)
if cfg.mask_proto_split_prototypes_by_head and cfg.mask_type == mask_type.lincomb:
mask = F.pad(mask, (self.index * self.mask_dim, (self.num_heads - self.index - 1) * self.mask_dim), mode='constant', value=0)
priors = self.make_priors(conv_h, conv_w, x.device)
preds = { 'loc': bbox, 'conf': conf, 'mask': mask, 'priors': priors }
if cfg.use_mask_scoring:
preds['score'] = score
if cfg.use_instance_coeff:
preds['inst'] = inst
return preds
def make_priors(self, conv_h, conv_w, device):
""" Note that priors are [x,y,width,height] where (x,y) is the center of the box. """
global prior_cache
size = (conv_h, conv_w)
with timer.env('makepriors'):
if self.last_img_size != (cfg._tmp_img_w, cfg._tmp_img_h):
prior_data = []
# Iteration order is important (it has to sync up with the convout)
for j, i in product(range(conv_h), range(conv_w)):
# +0.5 because priors are in center-size notation
x = (i + 0.5) / conv_w
y = (j + 0.5) / conv_h
for ars in self.aspect_ratios:
for scale in self.scales:
for ar in ars:
if not cfg.backbone.preapply_sqrt:
ar = sqrt(ar)
if cfg.backbone.use_pixel_scales:
w = scale * ar / cfg.max_size
h = scale / ar / cfg.max_size
else:
w = scale * ar / conv_w
h = scale / ar / conv_h
# This is for backward compatability with a bug where I made everything square by accident
if cfg.backbone.use_square_anchors:
h = w
prior_data += [x, y, w, h]
self.priors = torch.Tensor(prior_data, device=device).view(-1, 4).detach()
self.priors.requires_grad = False
self.last_img_size = (cfg._tmp_img_w, cfg._tmp_img_h)
self.last_conv_size = (conv_w, conv_h)
prior_cache[size] = None
elif self.priors.device != device:
# This whole weird situation is so that DataParalell doesn't copy the priors each iteration
if prior_cache[size] is None:
prior_cache[size] = {}
if device not in prior_cache[size]:
prior_cache[size][device] = self.priors.to(device)
self.priors = prior_cache[size][device]
return self.priors
class FPN(ScriptModuleWrapper):
"""
Implements a general version of the FPN introduced in
https://arxiv.org/pdf/1612.03144.pdf
Parameters (in cfg.fpn):
- num_features (int): The number of output features in the fpn layers.
- interpolation_mode (str): The mode to pass to F.interpolate.
- num_downsample (int): The number of downsampled layers to add onto the selected layers.
These extra layers are downsampled from the last selected layer.
Args:
- in_channels (list): For each conv layer you supply in the forward pass,
how many features will it have?
"""
__constants__ = ['interpolation_mode', 'num_downsample', 'use_conv_downsample', 'relu_pred_layers',
'lat_layers', 'pred_layers', 'downsample_layers', 'relu_downsample_layers']
def __init__(self, in_channels):
super().__init__()
self.lat_layers = nn.ModuleList([
nn.Conv2d(x, cfg.fpn.num_features, kernel_size=1)
for x in reversed(in_channels)
])
# This is here for backwards compatability
padding = 1 if cfg.fpn.pad else 0
self.pred_layers = nn.ModuleList([
nn.Conv2d(cfg.fpn.num_features, cfg.fpn.num_features, kernel_size=3, padding=padding)
for _ in in_channels
])
if cfg.fpn.use_conv_downsample:
self.downsample_layers = nn.ModuleList([
nn.Conv2d(cfg.fpn.num_features, cfg.fpn.num_features, kernel_size=3, padding=1, stride=2)
for _ in range(cfg.fpn.num_downsample)
])
self.interpolation_mode = cfg.fpn.interpolation_mode
self.num_downsample = cfg.fpn.num_downsample
self.use_conv_downsample = cfg.fpn.use_conv_downsample
self.relu_downsample_layers = cfg.fpn.relu_downsample_layers
self.relu_pred_layers = cfg.fpn.relu_pred_layers
@script_method_wrapper
def forward(self, convouts:List[torch.Tensor]):
"""
Args:
- convouts (list): A list of convouts for the corresponding layers in in_channels.
Returns:
- A list of FPN convouts in the same order as x with extra downsample layers if requested.
"""
out = []
x = torch.zeros(1, device=convouts[0].device)
for i in range(len(convouts)):
out.append(x)
# For backward compatability, the conv layers are stored in reverse but the input and output is
# given in the correct order. Thus, use j=-i-1 for the input and output and i for the conv layers.
j = len(convouts)
for lat_layer in self.lat_layers:
j -= 1
if j < len(convouts) - 1:
_, _, h, w = convouts[j].size()
x = F.interpolate(x, size=(h, w), mode=self.interpolation_mode, align_corners=False)
x = x + lat_layer(convouts[j])
out[j] = x
# This janky second loop is here because TorchScript.
j = len(convouts)
for pred_layer in self.pred_layers:
j -= 1
out[j] = pred_layer(out[j])
if self.relu_pred_layers:
F.relu(out[j], inplace=True)
cur_idx = len(out)
# In the original paper, this takes care of P6
if self.use_conv_downsample:
for downsample_layer in self.downsample_layers:
out.append(downsample_layer(out[-1]))
else:
for idx in range(self.num_downsample):
# Note: this is an untested alternative to out.append(out[-1][:, :, ::2, ::2]). Thanks TorchScript.
out.append(nn.functional.max_pool2d(out[-1], 1, stride=2))
if self.relu_downsample_layers:
for idx in range(len(out) - cur_idx):
out[idx] = F.relu(out[idx + cur_idx], inplace=False)
return out
class FastMaskIoUNet(ScriptModuleWrapper):
def __init__(self):
super().__init__()
input_channels = 1
last_layer = [(cfg.num_classes-1, 1, {})]
self.maskiou_net, _ = make_net(input_channels, cfg.maskiou_net + last_layer, include_last_relu=True)
def forward(self, x):
x = self.maskiou_net(x)
maskiou_p = F.max_pool2d(x, kernel_size=x.size()[2:]).squeeze(-1).squeeze(-1)
return maskiou_p
class Yolact(nn.Module):
"""
██╗ ██╗ ██████╗ ██╗ █████╗ ██████╗████████╗
╚██╗ ██╔╝██╔═══██╗██║ ██╔══██╗██╔════╝╚══██╔══╝
╚████╔╝ ██║ ██║██║ ███████║██║ ██║
╚██╔╝ ██║ ██║██║ ██╔══██║██║ ██║
██║ ╚██████╔╝███████╗██║ ██║╚██████╗ ██║
╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝
You can set the arguments by changing them in the backbone config object in config.py.
Parameters (in cfg.backbone):
- selected_layers: The indices of the conv layers to use for prediction.
- pred_scales: A list with len(selected_layers) containing tuples of scales (see PredictionModule)
- pred_aspect_ratios: A list of lists of aspect ratios with len(selected_layers) (see PredictionModule)
"""
def __init__(self, only_last_layer=False):
super().__init__()
self.only_last_layer = only_last_layer
self.backbone = construct_backbone(cfg.backbone)
if cfg.freeze_bn:
self.freeze_bn()
# Compute mask_dim here and add it back to the config. Make sure Yolact's constructor is called early!
if cfg.mask_type == mask_type.direct:
cfg.mask_dim = cfg.mask_size**2
elif cfg.mask_type == mask_type.lincomb:
if cfg.mask_proto_use_grid:
self.grid = torch.Tensor(np.load(cfg.mask_proto_grid_file))
self.num_grids = self.grid.size(0)
else:
self.num_grids = 0
self.proto_src = cfg.mask_proto_src
if self.proto_src is None: in_channels = 3
elif cfg.fpn is not None: in_channels = cfg.fpn.num_features
else: in_channels = self.backbone.channels[self.proto_src]
in_channels += self.num_grids
# The include_last_relu=false here is because we might want to change it to another function
self.proto_net, cfg.mask_dim = make_net(in_channels, cfg.mask_proto_net, include_last_relu=False)
if cfg.mask_proto_bias:
cfg.mask_dim += 1
self.selected_layers = cfg.backbone.selected_layers
src_channels = self.backbone.channels
if cfg.use_maskiou:
self.maskiou_net = FastMaskIoUNet()
if cfg.fpn is not None:
# Some hacky rewiring to accomodate the FPN
self.fpn = FPN([src_channels[i] for i in self.selected_layers])
self.selected_layers = list(range(len(self.selected_layers) + cfg.fpn.num_downsample))
src_channels = [cfg.fpn.num_features] * len(self.selected_layers)
self.prediction_layers = nn.ModuleList()
cfg.num_heads = len(self.selected_layers)
for idx, layer_idx in enumerate(self.selected_layers):
# If we're sharing prediction module weights, have every module's parent be the first one
parent = None
if cfg.share_prediction_module and idx > 0:
parent = self.prediction_layers[0]
pred = PredictionModule(src_channels[layer_idx], src_channels[layer_idx],
aspect_ratios = cfg.backbone.pred_aspect_ratios[idx],
scales = cfg.backbone.pred_scales[idx],
parent = parent,
index = idx)
self.prediction_layers.append(pred)
# Extra parameters for the extra losses
if cfg.use_class_existence_loss:
# This comes from the smallest layer selected
# Also note that cfg.num_classes includes background
self.class_existence_fc = nn.Linear(src_channels[-1], cfg.num_classes - 1)
if cfg.use_semantic_segmentation_loss:
self.semantic_seg_conv = nn.Conv2d(src_channels[0], cfg.num_classes-1, kernel_size=1)
# For use in evaluation
self.detect = Detect(cfg.num_classes, bkg_label=0, top_k=cfg.nms_top_k,
conf_thresh=cfg.nms_conf_thresh, nms_thresh=cfg.nms_thresh)
def save_weights(self, path):
""" Saves the model's weights using compression because the file sizes were getting too big. """
torch.save(self.state_dict(), path)
def load_weights(self, path):
""" Loads weights from a compressed save file. """
state_dict = torch.load(path)
# For backward compatability, remove these (the new variable is called layers)
for key in list(state_dict.keys()):
if key.startswith('backbone.layer') and not key.startswith('backbone.layers'):
del state_dict[key]
# Also for backward compatibility with v1.0 weights, do this check
if key.startswith('fpn.downsample_layers.'):
if cfg.fpn is not None and int(key.split('.')[2]) >= cfg.fpn.num_downsample:
del state_dict[key]
# Uncomment this in normal conditions
# self.load_state_dict(state_dict)
# Added this for fine-tuning. Comment this in normal conditions.
try:
self.load_state_dict(state_dict)
except RuntimeError as e:
print('Ignoring "' + str(e) + '"')
def init_weights(self, backbone_path):
""" Initialize weights for training. """
# Initialize the backbone with the pretrained weights.
self.backbone.init_backbone(backbone_path)
conv_constants = getattr(nn.Conv2d(1, 1, 1), '__constants__')
# Quick lambda to test if one list contains the other
def all_in(x, y):
for _x in x:
if _x not in y:
return False
return True
# Initialize the rest of the conv layers with xavier
for name, module in self.named_modules():
# See issue #127 for why we need such a complicated condition if the module is a WeakScriptModuleProxy
# Broke in 1.3 (see issue #175), WeakScriptModuleProxy was turned into just ScriptModule.
# Broke in 1.4 (see issue #292), where RecursiveScriptModule is the new star of the show.
# Note that this might break with future pytorch updates, so let me know if it does
is_script_conv = False
if 'Script' in type(module).__name__:
# 1.4 workaround: now there's an original_name member so just use that
if hasattr(module, 'original_name'):
is_script_conv = 'Conv' in module.original_name
# 1.3 workaround: check if this has the same constants as a conv module
else:
is_script_conv = (
all_in(module.__dict__['_constants_set'], conv_constants)
and all_in(conv_constants, module.__dict__['_constants_set']))
is_conv_layer = isinstance(module, nn.Conv2d) or is_script_conv
if is_conv_layer and module not in self.backbone.backbone_modules:
nn.init.xavier_uniform_(module.weight.data)
if module.bias is not None:
if cfg.use_focal_loss and 'conf_layer' in name:
if not cfg.use_sigmoid_focal_loss:
# Initialize the last layer as in the focal loss paper.
# Because we use softmax and not sigmoid, I had to derive an alternate expression
# on a notecard. Define pi to be the probability of outputting a foreground detection.
# Then let z = sum(exp(x)) - exp(x_0). Finally let c be the number of foreground classes.
# Chugging through the math, this gives us
# x_0 = log(z * (1 - pi) / pi) where 0 is the background class
# x_i = log(z / c) for all i > 0
# For simplicity (and because we have a degree of freedom here), set z = 1. Then we have
# x_0 = log((1 - pi) / pi) note: don't split up the log for numerical stability
# x_i = -log(c) for all i > 0
module.bias.data[0] = np.log((1 - cfg.focal_loss_init_pi) / cfg.focal_loss_init_pi)
module.bias.data[1:] = -np.log(module.bias.size(0) - 1)
else:
module.bias.data[0] = -np.log(cfg.focal_loss_init_pi / (1 - cfg.focal_loss_init_pi))
module.bias.data[1:] = -np.log((1 - cfg.focal_loss_init_pi) / cfg.focal_loss_init_pi)
else:
module.bias.data.zero_()
def train(self, mode=True):
super().train(mode)
if cfg.freeze_bn:
self.freeze_bn()
def freeze_bn(self, enable=False):
""" Adapted from https://discuss.pytorch.org/t/how-to-train-with-frozen-batchnorm/12106/8 """
for module in self.modules():
if isinstance(module, nn.BatchNorm2d):
module.train() if enable else module.eval()
module.weight.requires_grad = enable
module.bias.requires_grad = enable
def forward(self, x):
""" The input should be of size [batch_size, 3, img_h, img_w] """
_, _, img_h, img_w = x.size()
cfg._tmp_img_h = img_h
cfg._tmp_img_w = img_w
with timer.env('backbone'):
outs = self.backbone(x)
if cfg.fpn is not None:
with timer.env('fpn'):
# Use backbone.selected_layers because we overwrote self.selected_layers
outs = [outs[i] for i in cfg.backbone.selected_layers]
outs = self.fpn(outs)
proto_out = None
if cfg.mask_type == mask_type.lincomb and cfg.eval_mask_branch:
with timer.env('proto'):
proto_x = x if self.proto_src is None else outs[self.proto_src]
if self.num_grids > 0:
grids = self.grid.repeat(proto_x.size(0), 1, 1, 1)
proto_x = torch.cat([proto_x, grids], dim=1)
proto_out = self.proto_net(proto_x)
proto_out = cfg.mask_proto_prototype_activation(proto_out)
if cfg.mask_proto_prototypes_as_features:
# Clone here because we don't want to permute this, though idk if contiguous makes this unnecessary
proto_downsampled = proto_out.clone()
if cfg.mask_proto_prototypes_as_features_no_grad:
proto_downsampled = proto_out.detach()
# Move the features last so the multiplication is easy
proto_out = proto_out.permute(0, 2, 3, 1).contiguous()
if cfg.mask_proto_bias:
bias_shape = [x for x in proto_out.size()]
bias_shape[-1] = 1
proto_out = torch.cat([proto_out, torch.ones(*bias_shape)], -1)
with timer.env('pred_heads'):
pred_outs = { 'loc': [], 'conf': [], 'mask': [], 'priors': [] }
if cfg.use_mask_scoring:
pred_outs['score'] = []
if cfg.use_instance_coeff:
pred_outs['inst'] = []
for idx, pred_layer in zip(self.selected_layers, self.prediction_layers):
pred_x = outs[idx]
if cfg.mask_type == mask_type.lincomb and cfg.mask_proto_prototypes_as_features:
# Scale the prototypes down to the current prediction layer's size and add it as inputs
proto_downsampled = F.interpolate(proto_downsampled, size=outs[idx].size()[2:], mode='bilinear', align_corners=False)
pred_x = torch.cat([pred_x, proto_downsampled], dim=1)
# A hack for the way dataparallel works
if cfg.share_prediction_module and pred_layer is not self.prediction_layers[0]:
pred_layer.parent = [self.prediction_layers[0]]
if self.only_last_layer:
p = pred_layer(pred_x.detach())
else:
p = pred_layer(pred_x)
for k, v in p.items():
pred_outs[k].append(v)
for k, v in pred_outs.items():
pred_outs[k] = torch.cat(v, -2)
if proto_out is not None:
pred_outs['proto'] = proto_out
if self.training:
# For the extra loss functions
if cfg.use_class_existence_loss:
pred_outs['classes'] = self.class_existence_fc(outs[-1].mean(dim=(2, 3)))
if cfg.use_semantic_segmentation_loss:
pred_outs['segm'] = self.semantic_seg_conv(outs[0])
return pred_outs
else:
if cfg.use_mask_scoring:
pred_outs['score'] = torch.sigmoid(pred_outs['score'])
if cfg.use_focal_loss:
if cfg.use_sigmoid_focal_loss:
# Note: even though conf[0] exists, this mode doesn't train it so don't use it
pred_outs['conf'] = torch.sigmoid(pred_outs['conf'])
if cfg.use_mask_scoring:
pred_outs['conf'] *= pred_outs['score']
elif cfg.use_objectness_score:
# See focal_loss_sigmoid in multibox_loss.py for details
objectness = torch.sigmoid(pred_outs['conf'][:, :, 0])
pred_outs['conf'][:, :, 1:] = objectness[:, :, None] * F.softmax(pred_outs['conf'][:, :, 1:], -1)
pred_outs['conf'][:, :, 0 ] = 1 - objectness
else:
pred_outs['conf'] = F.softmax(pred_outs['conf'], -1)
else:
if cfg.use_objectness_score:
objectness = torch.sigmoid(pred_outs['conf'][:, :, 0])
pred_outs['conf'][:, :, 1:] = (objectness > 0.10)[..., None] \
* F.softmax(pred_outs['conf'][:, :, 1:], dim=-1)
else:
pred_outs['conf'] = F.softmax(pred_outs['conf'], -1)
return self.detect(pred_outs, self)
# Some testing code
if __name__ == '__main__':
from utils.functions import init_console
init_console()
# Use the first argument to set the config if you want
import sys
if len(sys.argv) > 1:
from data.config import set_cfg
set_cfg(sys.argv[1])
net = Yolact()
net.train()
net.init_weights(backbone_path='weights/' + cfg.backbone.path)
# GPU
net = net.cuda()
torch.set_default_tensor_type('torch.cuda.FloatTensor')
x = torch.zeros((1, 3, cfg.max_size, cfg.max_size))
y = net(x)
for p in net.prediction_layers:
print(p.last_conv_size)
print()
for k, a in y.items():
print(k + ': ', a.size(), torch.sum(a))
exit()
net(x)
# timer.disable('pass2')
avg = MovingAverage()
try:
while True:
timer.reset()
with timer.env('everything else'):
net(x)
avg.add(timer.total_time())
print('\033[2J') # Moves console cursor to 0,0
timer.print_stats()
print('Avg fps: %.2f\tAvg ms: %.2f ' % (1/avg.get_avg(), avg.get_avg()*1000))
except KeyboardInterrupt:
pass
| [
[
[
7,
12
],
[
697,
702
],
[
822,
827
],
[
946,
951
],
[
1019,
1024
],
[
30801,
30806
],
[
30866,
30871
],
[
31072,
31077
],
[
1361,
1366
],
[
7502,
7507
],
[
7982,
7987
],
[
8197,
8202
],
[
8525,
8530
],
[
10742,
10747
],
[
13546,
13551
],
[
13845,
13850
],
[
17301,
17306
],
[
20182,
20187
],
[
20337,
20342
],
[
26175,
26180
],
[
27043,
27048
],
[
27065,
27070
],
[
27875,
27880
],
[
28483,
28488
],
[
29021,
29026
],
[
29277,
29282
],
[
29576,
29581
],
[
29993,
29998
]
],
[
[
14,
25
]
],
[
[
33,
47
],
[
985,
987
],
[
1100,
1102
],
[
1495,
1497
],
[
15942,
15944
],
[
1204,
1206
],
[
3978,
3980
],
[
4068,
4070
],
[
4128,
4130
],
[
4243,
4245
],
[
4358,
4360
],
[
4528,
4530
],
[
4668,
4670
],
[
5606,
5608
],
[
12468,
12470
],
[
12496,
12498
],
[
12721,
12723
],
[
12749,
12751
],
[
12957,
12959
],
[
12989,
12991
],
[
15245,
15247
],
[
18605,
18607
],
[
19646,
19648
],
[
19788,
19790
],
[
21433,
21435
],
[
22879,
22881
],
[
23004,
23006
],
[
25069,
25071
],
[
5158,
5160
],
[
5204,
5206
],
[
5293,
5295
]
],
[
[
55,
79
],
[
6903,
6904
],
[
8656,
8657
],
[
14358,
14359
],
[
14773,
14774
],
[
15411,
15412
],
[
15834,
15835
],
[
27748,
27749
],
[
29693,
29694
],
[
29863,
29864
],
[
30165,
30166
],
[
30295,
30296
]
],
[
[
118,
128
],
[
3906,
3916
]
],
[
[
136,
147
],
[
17314,
17316
],
[
24255,
24257
],
[
24369,
24371
],
[
24483,
24485
],
[
24597,
24599
]
],
[
[
170,
177
],
[
9551,
9558
]
],
[
[
195,
199
],
[
10019,
10023
]
],
[
[
219,
223
],
[
13541,
13545
]
],
[
[
248,
259
],
[
1445,
1456
]
],
[
[
285,
288
],
[
30746,
30749
],
[
30885,
30888
],
[
30899,
30902
],
[
3042,
3045
],
[
3085,
3088
],
[
3327,
3330
],
[
3373,
3376
],
[
3417,
3420
],
[
3525,
3528
],
[
3656,
3659
],
[
3814,
3817
],
[
3850,
3853
],
[
4190,
4193
],
[
4305,
4308
],
[
4420,
4423
],
[
4471,
4474
],
[
4571,
4574
],
[
4610,
4613
],
[
4710,
4713
],
[
4737,
4740
],
[
5460,
5463
],
[
5506,
5509
],
[
5545,
5548
],
[
6631,
6634
],
[
6716,
6719
],
[
7337,
7340
],
[
7586,
7589
],
[
7715,
7718
],
[
7829,
7832
],
[
7928,
7931
],
[
8102,
8105
],
[
8139,
8142
],
[
8234,
8237
],
[
8293,
8296
],
[
8351,
8354
],
[
8557,
8560
],
[
8601,
8604
],
[
8935,
8938
],
[
9004,
9007
],
[
9373,
9376
],
[
9389,
9392
],
[
9950,
9953
],
[
10064,
10067
],
[
10148,
10151
],
[
10214,
10217
],
[
10576,
10579
],
[
10891,
10894
],
[
10907,
10910
],
[
12509,
12512
],
[
12675,
12678
],
[
12759,
12762
],
[
12781,
12784
],
[
12891,
12894
],
[
13110,
13113
],
[
12999,
13002
],
[
13021,
13024
],
[
13196,
13199
],
[
13261,
13264
],
[
13322,
13325
],
[
13388,
13391
],
[
13457,
13460
],
[
15619,
15622
],
[
15701,
15704
],
[
16912,
16915
],
[
16938,
16941
],
[
17105,
17108
],
[
17167,
17170
],
[
17152,
17155
],
[
17197,
17200
],
[
17248,
17251
],
[
17322,
17325
],
[
17483,
17486
],
[
17587,
17590
],
[
17622,
17625
],
[
17927,
17930
],
[
17988,
17991
],
[
18025,
18028
],
[
18076,
18079
],
[
18163,
18166
],
[
18240,
18243
],
[
18467,
18470
],
[
18520,
18523
],
[
18629,
18632
],
[
18878,
18881
],
[
19109,
19112
],
[
19199,
19202
],
[
19455,
19458
],
[
19674,
19677
],
[
19715,
19718
],
[
19815,
19818
],
[
19911,
19914
],
[
19947,
19950
],
[
19986,
19989
],
[
20018,
20021
],
[
20777,
20780
],
[
20827,
20830
],
[
23116,
23119
],
[
23192,
23195
],
[
24267,
24270
],
[
24293,
24296
],
[
24490,
24493
],
[
24520,
24523
],
[
24609,
24612
],
[
24635,
24638
],
[
24811,
24814
],
[
25402,
25405
],
[
25433,
25436
],
[
25549,
25552
],
[
25735,
25738
],
[
25840,
25843
],
[
25879,
25882
],
[
26291,
26294
],
[
26358,
26361
],
[
26599,
26602
],
[
26888,
26891
],
[
27227,
27230
],
[
27305,
27308
],
[
27522,
27525
],
[
27561,
27564
],
[
27997,
28000
],
[
28663,
28666
],
[
28799,
28802
],
[
28962,
28965
],
[
29071,
29074
],
[
29110,
29113
],
[
29333,
29336
],
[
29440,
29443
],
[
29934,
29937
]
],
[
[
290,
299
],
[
3434,
3443
],
[
5523,
5532
],
[
8156,
8165
],
[
8251,
8260
],
[
8618,
8627
],
[
17122,
17131
],
[
17214,
17223
],
[
25857,
25866
],
[
27539,
27548
]
],
[
[
319,
325
],
[
19904,
19910
]
],
[
[
357,
374
]
],
[
[
396,
414
],
[
16893,
16911
]
],
[
[
423,
452
]
],
[
[
471,
476
],
[
31209,
31214
],
[
31240,
31245
],
[
31313,
31318
],
[
31404,
31409
],
[
9310,
9315
],
[
25478,
25483
],
[
25587,
25592
],
[
25918,
25923
],
[
27110,
27115
]
],
[
[
505,
518
],
[
31152,
31165
]
],
[
[
520,
528
],
[
3792,
3800
],
[
15676,
15684
],
[
17905,
17913
]
],
[
[
812,
819
],
[
860,
867
],
[
972,
979
],
[
1046,
1053
]
],
[
[
924,
943
],
[
11507,
11526
],
[
15495,
15514
]
],
[
[
995,
1016
],
[
13488,
13509
]
],
[
[
1093,
1099
]
],
[
[
1431,
1442
],
[
10994,
11005
],
[
11193,
11204
],
[
11240,
11251
],
[
11313,
11324
],
[
11352,
11363
],
[
11434,
11445
]
],
[
[
1478,
1494
],
[
18990,
19006
]
],
[
[
11503,
11506
],
[
18340,
18343
]
],
[
[
15480,
15494
],
[
18211,
18225
]
],
[
[
15935,
15941
],
[
30673,
30679
]
],
[
[
30460,
30472
],
[
30477,
30489
]
],
[
[
30563,
30566
],
[
30578,
30581
],
[
30649,
30652
]
],
[
[
30625,
30632
],
[
30641,
30648
]
],
[
[
30667,
30670
],
[
30686,
30689
],
[
30702,
30705
],
[
30786,
30789
]
],
[
[
30780,
30783
],
[
30922,
30925
],
[
30943,
30946
],
[
31106,
31109
],
[
31286,
31289
]
],
[
[
30862,
30863
],
[
30926,
30927
],
[
31110,
31111
],
[
31290,
31291
]
],
[
[
30918,
30919
],
[
31027,
31028
]
],
[
[
30938,
30939
],
[
30980,
30981
]
],
[
[
31019,
31020
],
[
31052,
31053
]
],
[
[
31022,
31023
],
[
31062,
31063
],
[
31082,
31083
]
],
[
[
31146,
31149
],
[
31305,
31308
],
[
31486,
31489
],
[
31501,
31504
]
]
] |
import json
from jupyterlab.labapp import LabApp
from notebook.base.handlers import APIHandler
from notebook.utils import url_path_join
import tornado
from .api import group_info, submit_job, get_env, check_function_set
from .utils import get_group_volume_path
import os.path
from shutil import copyfile
from datetime import datetime
import importlib.util
import sys
ENV_API_ENDPOINT = 'JUPYTERLAB_DEV_API_ENDPOINT'
NAMESPACE = "jupyterlab-primehub"
api_endpoint = 'http://primehub-graphql/api/graphql'
NOTEBOOK_DIR = None
class CheckFunctionSetHandler(APIHandler):
@tornado.web.authenticated
def post(self):
params = self.get_json_body()
api_token = params.get('api_token', None)
function_set = check_function_set(api_endpoint, api_token)
self.log.info(function_set)
self.finish(json.dumps(function_set))
class ResourceHandler(APIHandler):
@tornado.web.authenticated
def post(self):
params = self.get_json_body()
api_token = params.get('api_token', None)
group_id = os.environ.get('GROUP_ID')
self.log.info('group_info with group_id: {}'.format(group_id))
self.finish(json.dumps(group_info(api_endpoint, api_token, group_id)))
class SubmitJobHandler(APIHandler):
@tornado.web.authenticated
def post(self):
params = self.get_json_body()
api_token = params.get('api_token', None)
name = params.get('name', 'notebook_job')
group_id = os.environ.get('GROUP_ID')
instance_type = params.get('instance_type', None)
image = params.get('image', os.environ.get('IMAGE_NAME'))
path = params.get('path', None)
notebook_parameters = params.get('notebook_parameters', '')
self.log.info('group_info with group_id: {}'.format(group_id))
fullpath = os.path.join(NOTEBOOK_DIR, path)
self.log.info("relative path: " + path)
self.log.info("notebook path: " + fullpath)
# copy the file
group_name = params.get('group_name', os.environ.get('GROUP_NAME'))
time_string = datetime.now().strftime("%Y%m%d%H%M%S%f")
nb_file_name = path.split('/').pop()
nb_directory_path = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''))
hidden_nb_file_name = '.' + nb_file_name.replace('.ipynb', '') + '-' + time_string + '.ipynb'
hidden_nb_fullpath = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''), hidden_nb_file_name)
output_nb_fullpath = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''), hidden_nb_file_name[1:].replace('.ipynb', '-output.ipynb'))
copyfile(fullpath, hidden_nb_fullpath)
papermill_parameters = ''
try:
for parameter in notebook_parameters.replace(' ', '').split(';'):
if '=' in parameter:
kv = parameter.split('=')
papermill_parameters = papermill_parameters + ' -p {} {}'.format(kv[0], kv[1])
except Exception as e:
self.finish(json.dumps({
'status': 'failed',
'error': 'failed to parse notebook parameters',
'message': str(e)
}))
return
command_str = 'cd {} && papermill {} {}{} && rm {}'.format(nb_directory_path, hidden_nb_fullpath, output_nb_fullpath, papermill_parameters, hidden_nb_fullpath)
self.finish(json.dumps(submit_job(api_endpoint, api_token, name, group_id, instance_type, image, command_str)))
class EnvironmentHandler(APIHandler):
@tornado.web.authenticated
def post(self):
self.finish(json.dumps(get_env()))
def url_pattern(web_app, endpoint, *pieces):
base_url = web_app.settings["base_url"]
return url_path_join(base_url, NAMESPACE, endpoint, *pieces)
def setup_handlers(lab_app: LabApp):
setup_globals(lab_app)
web_app, logger = lab_app.web_app, lab_app.log
apply_api_endpoint_override(logger)
host_pattern = ".*$"
handlers = [(url_pattern(web_app, 'check-function'), CheckFunctionSetHandler),
(url_pattern(web_app, 'resources'), ResourceHandler),
(url_pattern(web_app, 'submit-job'), SubmitJobHandler),
(url_pattern(web_app, 'get-env'), EnvironmentHandler)]
web_app.add_handlers(host_pattern, handlers)
for h in handlers:
logger.info('handler => {}'.format(h))
def setup_globals(lab_app):
global NOTEBOOK_DIR
NOTEBOOK_DIR = lab_app.notebook_dir
lab_app.log.info('setup globals')
lab_app.log.info('\tNOTEBOOK_DIR: ' + NOTEBOOK_DIR)
def apply_api_endpoint_override(logger):
global api_endpoint
override = os.environ.get(ENV_API_ENDPOINT, None)
if not override:
logger.info('use api-endpoint: {}'.format(api_endpoint))
logger.info('it could be override from ENV with the key {}'.format(ENV_API_ENDPOINT))
return
logger.info('update api-endpoint from ENV: {}'.format(override))
api_endpoint = override
| [
[
[
7,
11
],
[
834,
838
],
[
1174,
1178
],
[
3039,
3043
],
[
3420,
3424
],
[
3632,
3636
]
],
[
[
43,
49
],
[
3841,
3847
]
],
[
[
85,
95
],
[
559,
569
],
[
884,
894
],
[
1258,
1268
],
[
3547,
3557
]
],
[
[
123,
136
],
[
3757,
3770
]
],
[
[
144,
151
],
[
577,
584
],
[
903,
910
],
[
1277,
1284
],
[
3566,
3573
]
],
[
[
169,
179
],
[
1185,
1195
]
],
[
[
181,
191
],
[
3431,
3441
]
],
[
[
193,
200
],
[
3643,
3650
]
],
[
[
202,
220
],
[
734,
752
]
],
[
[
240,
261
]
],
[
[
269,
276
],
[
1056,
1058
],
[
1480,
1482
],
[
1601,
1603
],
[
1830,
1832
],
[
2034,
2036
],
[
2202,
2204
],
[
2392,
2394
],
[
2501,
2503
],
[
4682,
4684
]
],
[
[
296,
304
],
[
2637,
2645
]
],
[
[
326,
334
],
[
2086,
2094
]
],
[
[
342,
356
]
],
[
[
364,
367
]
],
[
[
369,
385
],
[
4697,
4713
],
[
4882,
4898
]
],
[
[
419,
428
],
[
3781,
3790
]
],
[
[
453,
465
],
[
753,
765
],
[
1196,
1208
],
[
3442,
3454
],
[
4792,
4804
]
],
[
[
507,
519
],
[
1843,
1855
],
[
2215,
2227
],
[
2405,
2417
],
[
2514,
2526
]
],
[
[
535,
558
],
[
4052,
4075
]
],
[
[
868,
883
],
[
4130,
4145
]
],
[
[
1241,
1257
],
[
4201,
4217
]
],
[
[
3528,
3546
],
[
4270,
4288
]
],
[
[
3661,
3672
],
[
4012,
4023
],
[
4095,
4106
],
[
4165,
4176
],
[
4237,
4248
]
],
[
[
3817,
3831
]
],
[
[
4417,
4430
],
[
3854,
3867
]
],
[
[
4606,
4633
],
[
3932,
3959
]
],
[
[
4469,
4481
],
[
4586,
4598
]
],
[
[
4989,
5001
]
]
] |
import setuptools
with open("README.md", "r") as f:
long_description = f.read()
setuptools.setup(
name="runai",
version="0.1.2",
author="Run:AI",
author_email="[email protected]",
description="Run:AI Python library",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/run-ai/runai",
packages=setuptools.find_packages(),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
],
)
| [
[
[
7,
17
],
[
86,
96
],
[
383,
393
]
],
[
[
50,
51
],
[
76,
77
]
],
[
[
57,
73
],
[
258,
274
]
]
] |
from django.urls import path
from .views import (
UserSelfView,
)
urlpatterns = [
path("users/self/profile/", UserSelfView.as_view(), name="user_self"),
]
| [
[
[
25,
29
],
[
93,
97
]
],
[
[
56,
68
],
[
121,
133
]
],
[
[
73,
84
]
]
] |
if __name__ == "__main__":
import numpy as np
import beluga.Beluga as Beluga
import beluga.bvpsol as bvpsol
import beluga.bvpsol.algorithms as algorithms
import beluga.optim.Problem
from beluga.optim.problem import *
from beluga.continuation import *
import logging
# Import Libraries for Matrix Calculations
from sympy import symbols, Matrix, Transpose, simplify, diff, diag
from sympy import sin
from sympy import cos, acos
from sympy import sqrt
from sympy import exp
from sympy import atan
from numpy import pi
writeEqn = True
simpList = False
if writeEqn:
writeList = []
# Constants
v, u_max = symbols('v, u_max')
xb, yb = symbols('xb, yb')
Dt, sigv, sigw, sigr = symbols('Dt, sigv, sigw, sigr')
# Primary States
x, y, theta = symbols('x, y, theta')
# Control
w = symbols('w')
# Secondary States
# Primary State Rates
x_dot = v * cos(theta)
y_dot = v * sin(theta)
theta_dot = u_max * sin(w)
writeList = [x_dot, y_dot, theta_dot]
# Covariance Calculations
p11, p12, p13,\
p22, p23, \
p33 \
= symbols('p11 p12 p13\
p22 p23 \
p33')
P = Matrix([[p11, p12, p13],
[p12, p22, p23],
[p13, p13, p33]])
F = Matrix([[diff(x_dot, x), diff(x_dot, y), diff(x_dot, theta)],
[diff(y_dot, x), diff(y_dot, y), diff(y_dot, theta)],
[diff(theta_dot, x), diff(theta_dot, y), diff(theta_dot, theta)],])
G = Matrix([[cos(theta), 0],
[sin(theta), 0],
[0, 1]])
h = sqrt((x - xb)**2 + (y - yb)**2)
H = Matrix([[diff(h, x), diff(h, y), diff(h, theta)]])
Q = Dt*diag(sigv**2, sigw**2)
R = Dt*diag(sigr**2)
P_dot = (F*P + P*F.T - P*H.T*(R**-1)*H*P + G*Q*G.T)
Dim = P_dot.shape
k = symbols('k')
PP = (F*P + P*F.T - k * P*H.T*(R**-1)*H*P + G*Q*G.T)
obj = PP[1, 1]
for i in range(0, Dim[0]):
for j in range(i, Dim[1]):
# print(P_dot[i, j])
writeList.append(P_dot[i, j])
# h_new, theta_new, v_new, gam_new = symbols('h_new, theta_new, v_new, gam_new')
# h_scale, theta_scale, v_scale, gam_scale = symbols('h_scale, theta_scale, v_scale, gam_scale')
states = [x, y, theta,
p11, p12, p13,
p22, p23,
p33]
x_s, y_s, theta_s, \
p11_s, p12_s, p13_s, \
p22_s, p23_s, \
p33_s = \
symbols('x_s, y_s, theta_s, \
p11_s, p12_s, p13_s, \
p22_s, p23_s, \
p33_s')
scales = [x_s, y_s, theta_s,
p11_s, p12_s, p13_s,
p22_s, p23_s,
p33_s]
x_n, y_n, theta_n, \
p11_n, p12_n, p13_n, \
p22_n, p23_n, \
p33_n = \
symbols('x_n, y_n, theta_n, \
p11_n, p12_n, p13_n, \
p22_n, p23_n, \
p33_n')
states_new = [x_n, y_n, theta_n,
p11_n, p12_n, p13_n,
p22_n, p23_n,
p33_n]
# print(writeList)
Z1 = zip(writeList, scales)
scaledList = []
for item, Scale in Z1:
# print(item)
item = item/Scale
Z2 = zip(states, states_new, scales)
# print(item)
# for state, new, scale in Z2:
# print(state)
# print(new)
# print(scale)
for state, new, scale in Z2:
# print(new)
item = item.subs(state, scale*new)
# print(item)
scaledList.append(item)
Z2 = zip(states, states_new, scales)
for state, new, scale in Z2:
# print(new)
obj = obj.subs(state, scale * new)
k = 1
with open("eqns.txt", "w") as my_file:
for item in scaledList:
if simpList:
# print('* ' + str(item))
item = simplify(item)
# print('# ' + str(item))
my_file.write(str(item) + "\n")
# print(" Wrote " + str(k) + "/" + str(len(scaledList)))
k += 1
k = 1
with open("eqnsUnscaled.txt", "w") as my_file:
for item in writeList:
my_file.write(str(item) + "\n")
# print(" Wrote " + str(k) + "/" + str(len(writeList)))
k += 1
''' Start Optimal Control Calculations '''
# Read Covariance State Rates from File
with open("eqns.txt", "r") as f:
eqnsList = list(f)
# for item in P_dot_eqns:
# print(item)
# Rename this and/or move to optim package?
problem = beluga.optim.Problem('carts0')
# Define independent variables
problem.independent('t', 's')
# Define equations of motion
problem\
.state('x_n', eqnsList[0] + '+ ep*u_max*cos(w)', '1') \
.state('y_n', eqnsList[1], '1') \
.state('theta_n', eqnsList[2], '1') \
.state('p11_n', eqnsList[3], '1') \
.state('p12_n', eqnsList[4], '1') \
.state('p13_n', eqnsList[5], '1') \
.state('p22_n', eqnsList[6], '1') \
.state('p23_n', eqnsList[7], '1') \
.state('p33_n', eqnsList[8], '1') \
# Define controls
problem.control('w', '1') \
# Define costs
# problem.cost['path'] = Expression('p11', 'm^2/s^2')
# problem.cost['path'] = Expression('sin(w)**2', 's')
# problem.cost['terminal'] = Expression('p22_n', '1')
problem.cost['path'] = Expression(str(obj), 's')
# Define constraints
problem.constraints() \
.initial('x_n-x_n_0', '1') \
.initial('y_n-y_n_0', '1') \
.initial('theta_n-theta_n_0', '1') \
\
.initial('p11_n-p11_n_0', '1') \
.initial('p12_n-p12_n_0', '1') \
.initial('p13_n-p13_n_0', '1') \
.initial('p22_n-p22_n_0', '1') \
.initial('p23_n-p23_n_0', '1') \
.initial('p33_n-p33_n_0', '1') \
\
.terminal('x_n-x_n_f', '1') \
.terminal('y_n-y_n_f', '1') \
\
# Define constants
problem.constant('Dt', 0.1, '1')
problem.constant('sigv', 0.1, '1')
problem.constant('sigw', 0.1, '1')
problem.constant('sigr', 0.1, '1')
problem.constant('xb', 5, '1')
problem.constant('yb', 5, '1')
problem.constant('u_max', 0.1, '1')
problem.constant('v', 30, '1')
problem.constant('x_s', 1, '1')
problem.constant('y_s', 1, '1')
problem.constant('theta_s', 1, '1')
problem.constant('p11_s', 1e-3, '1')
problem.constant('p12_s', 1e-3, '1')
problem.constant('p13_s', 1e-3, '1')
problem.constant('p22_s', 1e-1, '1')
problem.constant('p23_s', 1e-2, '1')
problem.constant('p33_s', 1e-3, '1')
problem.constant('ep', 5, '1')
problem.constant('k', 0, '1')
problem.bvp_solver = algorithms.MultipleShooting(derivative_method='fd', tolerance=1e-4, max_iterations=1000, verbose=True, cached=False, number_arcs=64)
# problem.bvp_solver = algorithms.SingleShooting(derivative_method='fd',tolerance=1e-4, max_iterations=1000, verbose=True, cached=False)
problem.scale.unit('m', 1) \
.unit('s', 1) \
.unit('kg', 1) \
.unit('rad', 1)
# Define quantity (not implemented at present)
# Is this actually an Expression rather than a Value?
# problem.quantity = [Value('tanAng','tan(theta)')]
problem.guess.setup('auto', start=[0, 0, 0, 0, 0, 0, 0, 0, 0], time_integrate=1, costate_guess=[0, 0, 0.001, -0.0001, 0.0, 0.0, 0.001, 0.0, 0.])
# problem.guess.setup('auto',start=[80000,3.38575809e-21,5000,7.98617365e-02, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],direction='forward',time_integrate=229.865209,costate_guess =[-1.37514494e+01,3.80852584e+06,-3.26290152e+03,-2.31984720e-14,0.00,0.01,0.01,0.01,0.01,0.01,0.01,0.01,0.01,0.01])
# Figure out nicer way of representing this. Done?
problem.steps.add_step().num_cases(5) \
.terminal('x_n', 20) \
.terminal('y_n', 0) \
problem.steps.add_step().num_cases(10) \
.const('xb', 200) \
.const('yb', 600) \
problem.steps.add_step().num_cases(80) \
.terminal('x_n', 4000) \
problem.steps.add_step().num_cases(20) \
.const('k', 1) \
# problem.steps.add_step().num_cases(10) \
# .const('xb', 7) \
# .const('yb', 7) \
# \
# \
# problem.steps.add_step().num_cases(20) \
# .terminal('x_n', 150) \
# .terminal('y_n', 0) \
# problem.steps.add_step().num_cases(15) \
# .terminal('theta', 5)
# problem.steps.add_step().num_cases(21) \
# .terminal('theta', 10*pi/180)
Beluga.run(problem, display_level=logging.DEBUG)
| [
[
[
38,
49
]
],
[
[
61,
84
],
[
9110,
9116
]
],
[
[
96,
119
]
],
[
[
131,
169
],
[
7243,
7253
]
],
[
[
181,
201
],
[
5067,
5073
]
],
[
[
239,
240
]
],
[
[
277,
278
],
[
5916,
5926
]
],
[
[
291,
298
],
[
9144,
9151
]
],
[
[
370,
377
],
[
706,
713
],
[
743,
750
],
[
792,
799
],
[
872,
879
],
[
926,
933
],
[
1259,
1266
],
[
2076,
2083
],
[
2755,
2762
],
[
3129,
3136
]
],
[
[
379,
385
],
[
1356,
1362
],
[
1469,
1475
],
[
1706,
1712
],
[
1855,
1861
]
],
[
[
387,
396
]
],
[
[
398,
406
],
[
4341,
4349
]
],
[
[
408,
412
],
[
1478,
1482
],
[
1494,
1498
],
[
1510,
1514
],
[
1552,
1556
],
[
1568,
1572
],
[
1584,
1588
],
[
1626,
1630
],
[
1646,
1650
],
[
1666,
1670
],
[
1864,
1868
],
[
1876,
1880
],
[
1888,
1892
]
],
[
[
414,
418
],
[
1922,
1926
],
[
1961,
1965
]
],
[
[
441,
444
],
[
1050,
1053
],
[
1089,
1092
],
[
1752,
1755
]
],
[
[
467,
470
],
[
1019,
1022
],
[
1715,
1718
]
],
[
[
472,
476
]
],
[
[
499,
503
],
[
1810,
1814
]
],
[
[
526,
529
]
],
[
[
552,
556
]
],
[
[
580,
582
]
],
[
[
588,
596
],
[
633,
641
]
],
[
[
608,
616
],
[
4258,
4266
]
],
[
[
651,
660
]
],
[
[
695,
696
],
[
1015,
1016
],
[
1046,
1047
]
],
[
[
698,
703
],
[
1081,
1086
]
],
[
[
734,
736
],
[
1820,
1822
]
],
[
[
738,
740
],
[
1834,
1836
]
],
[
[
769,
771
],
[
1919,
1921
],
[
1958,
1960
]
],
[
[
773,
777
],
[
1927,
1931
]
],
[
[
779,
783
],
[
1936,
1940
]
],
[
[
785,
789
],
[
1966,
1970
]
],
[
[
858,
859
],
[
1490,
1491
],
[
1564,
1565
],
[
1642,
1643
],
[
1816,
1817
],
[
1872,
1873
],
[
2547,
2548
]
],
[
[
861,
862
],
[
1506,
1507
],
[
1580,
1581
],
[
1662,
1663
],
[
1830,
1831
],
[
1884,
1885
],
[
2550,
2551
]
],
[
[
864,
869
],
[
1023,
1028
],
[
1054,
1059
],
[
1522,
1527
],
[
1596,
1601
],
[
1682,
1687
],
[
1719,
1724
],
[
1756,
1761
],
[
1896,
1901
],
[
2553,
2558
]
],
[
[
922,
923
],
[
1093,
1094
]
],
[
[
1007,
1012
],
[
1118,
1123
],
[
1483,
1488
],
[
1499,
1504
],
[
1515,
1520
]
],
[
[
1038,
1043
],
[
1125,
1130
],
[
1557,
1562
],
[
1573,
1578
],
[
1589,
1594
]
],
[
[
1069,
1078
],
[
1132,
1141
],
[
1631,
1640
],
[
1651,
1660
],
[
1671,
1680
]
],
[
[
1105,
1114
],
[
2303,
2312
],
[
3462,
3471
],
[
4640,
4649
]
],
[
[
1187,
1190
],
[
1365,
1368
],
[
2578,
2581
]
],
[
[
1192,
1195
],
[
1370,
1373
],
[
1402,
1405
],
[
2583,
2586
]
],
[
[
1197,
1200
],
[
1375,
1378
],
[
1439,
1442
],
[
1444,
1447
],
[
2588,
2591
]
],
[
[
1215,
1218
],
[
1407,
1410
],
[
2611,
2614
]
],
[
[
1220,
1223
],
[
1412,
1415
],
[
2616,
2619
]
],
[
[
1239,
1242
],
[
1449,
1452
],
[
2639,
2642
]
],
[
[
1352,
1353
],
[
1995,
1996
],
[
1999,
2000
],
[
2007,
2008
],
[
2023,
2024
],
[
2106,
2107
],
[
2110,
2111
],
[
2122,
2123
],
[
2138,
2139
]
],
[
[
1465,
1466
],
[
1993,
1994
],
[
2001,
2002
],
[
2104,
2105
],
[
2112,
2113
]
],
[
[
1702,
1703
],
[
2027,
2028
],
[
2031,
2032
],
[
2142,
2143
],
[
2146,
2147
]
],
[
[
1806,
1807
],
[
1869,
1870
],
[
1881,
1882
],
[
1893,
1894
]
],
[
[
1851,
1852
],
[
2009,
2010
],
[
2021,
2022
],
[
2124,
2125
],
[
2136,
2137
]
],
[
[
1915,
1916
],
[
2029,
2030
],
[
2144,
2145
]
],
[
[
1954,
1955
],
[
2014,
2015
],
[
2129,
2130
]
],
[
[
1984,
1989
],
[
2051,
2056
],
[
2320,
2325
]
],
[
[
2045,
2048
],
[
2202,
2205
],
[
2241,
2244
]
],
[
[
2072,
2073
],
[
2118,
2119
]
],
[
[
2098,
2100
],
[
2166,
2168
]
],
[
[
2160,
2163
],
[
4112,
4115
],
[
5931,
5934
]
],
[
[
2188,
2189
],
[
2238,
2239
],
[
2326,
2327
]
],
[
[
2227,
2228
],
[
2329,
2330
]
],
[
[
2537,
2543
],
[
3615,
3621
],
[
4004,
4010
]
],
[
[
2653,
2656
],
[
2899,
2902
]
],
[
[
2658,
2661
],
[
2904,
2907
]
],
[
[
2663,
2670
],
[
2909,
2916
]
],
[
[
2682,
2687
],
[
2936,
2941
]
],
[
[
2689,
2694
],
[
2943,
2948
]
],
[
[
2696,
2701
],
[
2950,
2955
]
],
[
[
2713,
2718
],
[
2975,
2980
]
],
[
[
2720,
2725
],
[
2982,
2987
]
],
[
[
2737,
2742
],
[
3007,
3012
]
],
[
[
2889,
2895
],
[
3473,
3479
],
[
3635,
3641
],
[
4024,
4030
]
],
[
[
3023,
3026
],
[
3289,
3292
]
],
[
[
3028,
3031
],
[
3294,
3297
]
],
[
[
3033,
3040
],
[
3299,
3306
]
],
[
[
3052,
3057
],
[
3330,
3335
]
],
[
[
3059,
3064
],
[
3337,
3342
]
],
[
[
3066,
3071
],
[
3344,
3349
]
],
[
[
3083,
3088
],
[
3373,
3378
]
],
[
[
3090,
3095
],
[
3380,
3385
]
],
[
[
3107,
3112
],
[
3409,
3414
]
],
[
[
3275,
3285
],
[
3623,
3633
],
[
4012,
4022
]
],
[
[
3453,
3455
],
[
3534,
3536
]
],
[
[
3490,
3500
],
[
3962,
3972
],
[
4227,
4237
]
],
[
[
3519,
3523
],
[
3583,
3587
]
],
[
[
3525,
3530
],
[
3588,
3593
]
],
[
[
3576,
3580
],
[
3896,
3900
],
[
3980,
3984
]
],
[
[
3606,
3608
],
[
3840,
3842
]
],
[
[
3819,
3824
],
[
3906,
3911
]
],
[
[
3826,
3829
],
[
3919,
3922
]
],
[
[
3831,
3836
],
[
3913,
3918
]
],
[
[
3889,
3893
],
[
3896,
3900
],
[
3980,
3984
]
],
[
[
3995,
3997
],
[
4065,
4067
]
],
[
[
4044,
4049
],
[
4121,
4126
]
],
[
[
4051,
4054
],
[
4136,
4139
]
],
[
[
4056,
4061
],
[
4128,
4133
]
],
[
[
4106,
4109
],
[
4112,
4115
],
[
5931,
5934
]
],
[
[
4150,
4151
],
[
4539,
4540
]
],
[
[
4194,
4201
],
[
4418,
4425
]
],
[
[
4219,
4223
],
[
4350,
4354
],
[
4436,
4440
]
],
[
[
4334,
4338
],
[
4436,
4440
]
],
[
[
4555,
4556
],
[
4787,
4788
]
],
[
[
4607,
4614
],
[
4667,
4674
]
],
[
[
4632,
4636
],
[
4685,
4689
]
],
[
[
4921,
4922
],
[
4948,
4949
]
],
[
[
4932,
4940
],
[
5239,
5247
],
[
5306,
5314
],
[
5352,
5360
],
[
5396,
5404
],
[
5440,
5448
],
[
5484,
5492
],
[
5528,
5536
],
[
5572,
5580
],
[
5616,
5624
]
],
[
[
5057,
5064
],
[
5138,
5145
],
[
5207,
5214
],
[
5667,
5674
],
[
5893,
5900
],
[
5972,
5979
],
[
6478,
6485
],
[
6515,
6522
],
[
6554,
6561
],
[
6593,
6600
],
[
6633,
6640
],
[
6668,
6675
],
[
6704,
6711
],
[
6745,
6752
],
[
6781,
6788
],
[
6821,
6828
],
[
6861,
6868
],
[
6902,
6909
],
[
6943,
6950
],
[
6984,
6991
],
[
7026,
7033
],
[
7067,
7074
],
[
7109,
7116
],
[
7151,
7158
],
[
7187,
7194
],
[
7222,
7229
],
[
7522,
7529
],
[
7833,
7840
],
[
8327,
8334
],
[
8433,
8440
],
[
8535,
8542
],
[
8614,
8621
],
[
9121,
9128
]
]
] |
from airbnb_priceforecaster.models import train_model
from airbnb_priceforecaster.models import build_model
from airbnb_priceforecaster.data import AirBnBDataset
import click
@click.group()
def cli():
pass
@cli.command()
@click.option("-y", "--year", default=2020, type=int)
@click.option("-m", "--month", default=5, type=int)
@click.option("-d", "--day", default=30, type=int)
def train(year, month, day):
result = train_model(year, month, day)
click.echo(result)
@cli.command()
@click.option("-y", "--year", default=2020, type=int)
@click.option("-m", "--month", default=5, type=int)
@click.option("-d", "--day", default=30, type=int)
def prod(year, month, day):
dataset = AirBnBDataset(year=year, month=month, day=day)
model = build_model()
model.train_estimator(dataset)
model.save_estimator(prod=True)
if __name__ == '__main__':
cli()
| [
[
[
42,
53
],
[
428,
439
]
],
[
[
96,
107
],
[
756,
767
]
],
[
[
148,
161
],
[
697,
710
]
],
[
[
169,
174
],
[
178,
183
],
[
230,
235
],
[
284,
289
],
[
336,
341
],
[
499,
504
],
[
553,
558
],
[
605,
610
],
[
462,
467
]
],
[
[
196,
199
],
[
215,
218
],
[
484,
487
],
[
875,
878
]
],
[
[
390,
395
]
],
[
[
659,
663
]
]
] |
from sympy import (symbols, Symbol, nan, oo, zoo, I, sinh, sin, pi, atan,
acos, Rational, sqrt, asin, acot, coth, E, S, tan, tanh, cos,
cosh, atan2, exp, log, asinh, acoth, atanh, O, cancel, Matrix, re, im,
Float, Pow, gcd, sec, csc, cot, diff, simplify, Heaviside, arg,
conjugate, series, FiniteSet, asec, acsc, Mul, sinc, jn,
AccumBounds, Interval, ImageSet, Lambda, besselj)
from sympy.core.compatibility import range
from sympy.core.expr import unchanged
from sympy.core.function import ArgumentIndexError
from sympy.core.relational import Ne, Eq
from sympy.functions.elementary.piecewise import Piecewise
from sympy.sets.setexpr import SetExpr
from sympy.utilities.pytest import XFAIL, slow, raises
x, y, z = symbols('x y z')
r = Symbol('r', real=True)
k = Symbol('k', integer=True)
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
np = Symbol('p', nonpositive=True)
nn = Symbol('n', nonnegative=True)
nz = Symbol('nz', nonzero=True)
ep = Symbol('ep', extended_positive=True)
en = Symbol('en', extended_negative=True)
enp = Symbol('ep', extended_nonpositive=True)
enn = Symbol('en', extended_nonnegative=True)
enz = Symbol('enz', extended_nonzero=True)
a = Symbol('a', algebraic=True)
na = Symbol('na', nonzero=True, algebraic=True)
def test_sin():
x, y = symbols('x y')
assert sin.nargs == FiniteSet(1)
assert sin(nan) is nan
assert sin(zoo) is nan
assert sin(oo) == AccumBounds(-1, 1)
assert sin(oo) - sin(oo) == AccumBounds(-2, 2)
assert sin(oo*I) == oo*I
assert sin(-oo*I) == -oo*I
assert 0*sin(oo) is S.Zero
assert 0/sin(oo) is S.Zero
assert 0 + sin(oo) == AccumBounds(-1, 1)
assert 5 + sin(oo) == AccumBounds(4, 6)
assert sin(0) == 0
assert sin(asin(x)) == x
assert sin(atan(x)) == x / sqrt(1 + x**2)
assert sin(acos(x)) == sqrt(1 - x**2)
assert sin(acot(x)) == 1 / (sqrt(1 + 1 / x**2) * x)
assert sin(acsc(x)) == 1 / x
assert sin(asec(x)) == sqrt(1 - 1 / x**2)
assert sin(atan2(y, x)) == y / sqrt(x**2 + y**2)
assert sin(pi*I) == sinh(pi)*I
assert sin(-pi*I) == -sinh(pi)*I
assert sin(-2*I) == -sinh(2)*I
assert sin(pi) == 0
assert sin(-pi) == 0
assert sin(2*pi) == 0
assert sin(-2*pi) == 0
assert sin(-3*10**73*pi) == 0
assert sin(7*10**103*pi) == 0
assert sin(pi/2) == 1
assert sin(-pi/2) == -1
assert sin(pi*Rational(5, 2)) == 1
assert sin(pi*Rational(7, 2)) == -1
ne = symbols('ne', integer=True, even=False)
e = symbols('e', even=True)
assert sin(pi*ne/2) == (-1)**(ne/2 - S.Half)
assert sin(pi*k/2).func == sin
assert sin(pi*e/2) == 0
assert sin(pi*k) == 0
assert sin(pi*k).subs(k, 3) == sin(pi*k/2).subs(k, 6) # issue 8298
assert sin(pi/3) == S.Half*sqrt(3)
assert sin(pi*Rational(-2, 3)) == Rational(-1, 2)*sqrt(3)
assert sin(pi/4) == S.Half*sqrt(2)
assert sin(-pi/4) == Rational(-1, 2)*sqrt(2)
assert sin(pi*Rational(17, 4)) == S.Half*sqrt(2)
assert sin(pi*Rational(-3, 4)) == Rational(-1, 2)*sqrt(2)
assert sin(pi/6) == S.Half
assert sin(-pi/6) == Rational(-1, 2)
assert sin(pi*Rational(7, 6)) == Rational(-1, 2)
assert sin(pi*Rational(-5, 6)) == Rational(-1, 2)
assert sin(pi*Rational(1, 5)) == sqrt((5 - sqrt(5)) / 8)
assert sin(pi*Rational(2, 5)) == sqrt((5 + sqrt(5)) / 8)
assert sin(pi*Rational(3, 5)) == sin(pi*Rational(2, 5))
assert sin(pi*Rational(4, 5)) == sin(pi*Rational(1, 5))
assert sin(pi*Rational(6, 5)) == -sin(pi*Rational(1, 5))
assert sin(pi*Rational(8, 5)) == -sin(pi*Rational(2, 5))
assert sin(pi*Rational(-1273, 5)) == -sin(pi*Rational(2, 5))
assert sin(pi/8) == sqrt((2 - sqrt(2))/4)
assert sin(pi/10) == Rational(-1, 4) + sqrt(5)/4
assert sin(pi/12) == -sqrt(2)/4 + sqrt(6)/4
assert sin(pi*Rational(5, 12)) == sqrt(2)/4 + sqrt(6)/4
assert sin(pi*Rational(-7, 12)) == -sqrt(2)/4 - sqrt(6)/4
assert sin(pi*Rational(-11, 12)) == sqrt(2)/4 - sqrt(6)/4
assert sin(pi*Rational(104, 105)) == sin(pi/105)
assert sin(pi*Rational(106, 105)) == -sin(pi/105)
assert sin(pi*Rational(-104, 105)) == -sin(pi/105)
assert sin(pi*Rational(-106, 105)) == sin(pi/105)
assert sin(x*I) == sinh(x)*I
assert sin(k*pi) == 0
assert sin(17*k*pi) == 0
assert sin(k*pi*I) == sinh(k*pi)*I
assert sin(r).is_real is True
assert sin(0, evaluate=False).is_algebraic
assert sin(a).is_algebraic is None
assert sin(na).is_algebraic is False
q = Symbol('q', rational=True)
assert sin(pi*q).is_algebraic
qn = Symbol('qn', rational=True, nonzero=True)
assert sin(qn).is_rational is False
assert sin(q).is_rational is None # issue 8653
assert isinstance(sin( re(x) - im(y)), sin) is True
assert isinstance(sin(-re(x) + im(y)), sin) is False
assert sin(SetExpr(Interval(0, 1))) == SetExpr(ImageSet(Lambda(x, sin(x)),
Interval(0, 1)))
for d in list(range(1, 22)) + [60, 85]:
for n in range(0, d*2 + 1):
x = n*pi/d
e = abs( float(sin(x)) - sin(float(x)) )
assert e < 1e-12
def test_sin_cos():
for d in [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 24, 30, 40, 60, 120]: # list is not exhaustive...
for n in range(-2*d, d*2):
x = n*pi/d
assert sin(x + pi/2) == cos(x), "fails for %d*pi/%d" % (n, d)
assert sin(x - pi/2) == -cos(x), "fails for %d*pi/%d" % (n, d)
assert sin(x) == cos(x - pi/2), "fails for %d*pi/%d" % (n, d)
assert -sin(x) == cos(x + pi/2), "fails for %d*pi/%d" % (n, d)
def test_sin_series():
assert sin(x).series(x, 0, 9) == \
x - x**3/6 + x**5/120 - x**7/5040 + O(x**9)
def test_sin_rewrite():
assert sin(x).rewrite(exp) == -I*(exp(I*x) - exp(-I*x))/2
assert sin(x).rewrite(tan) == 2*tan(x/2)/(1 + tan(x/2)**2)
assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2)
assert sin(sinh(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sinh(3)).n()
assert sin(cosh(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cosh(3)).n()
assert sin(tanh(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tanh(3)).n()
assert sin(coth(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, coth(3)).n()
assert sin(sin(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sin(3)).n()
assert sin(cos(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cos(3)).n()
assert sin(tan(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tan(3)).n()
assert sin(cot(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cot(3)).n()
assert sin(log(x)).rewrite(Pow) == I*x**-I / 2 - I*x**I /2
assert sin(x).rewrite(csc) == 1/csc(x)
assert sin(x).rewrite(cos) == cos(x - pi / 2, evaluate=False)
assert sin(x).rewrite(sec) == 1 / sec(x - pi / 2, evaluate=False)
assert sin(cos(x)).rewrite(Pow) == sin(cos(x))
def test_sin_expansion():
# Note: these formulas are not unique. The ones here come from the
# Chebyshev formulas.
assert sin(x + y).expand(trig=True) == sin(x)*cos(y) + cos(x)*sin(y)
assert sin(x - y).expand(trig=True) == sin(x)*cos(y) - cos(x)*sin(y)
assert sin(y - x).expand(trig=True) == cos(x)*sin(y) - sin(x)*cos(y)
assert sin(2*x).expand(trig=True) == 2*sin(x)*cos(x)
assert sin(3*x).expand(trig=True) == -4*sin(x)**3 + 3*sin(x)
assert sin(4*x).expand(trig=True) == -8*sin(x)**3*cos(x) + 4*sin(x)*cos(x)
assert sin(2).expand(trig=True) == 2*sin(1)*cos(1)
assert sin(3).expand(trig=True) == -4*sin(1)**3 + 3*sin(1)
def test_sin_AccumBounds():
assert sin(AccumBounds(-oo, oo)) == AccumBounds(-1, 1)
assert sin(AccumBounds(0, oo)) == AccumBounds(-1, 1)
assert sin(AccumBounds(-oo, 0)) == AccumBounds(-1, 1)
assert sin(AccumBounds(0, 2*S.Pi)) == AccumBounds(-1, 1)
assert sin(AccumBounds(0, S.Pi*Rational(3, 4))) == AccumBounds(0, 1)
assert sin(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(7, 4))) == AccumBounds(-1, sin(S.Pi*Rational(3, 4)))
assert sin(AccumBounds(S.Pi/4, S.Pi/3)) == AccumBounds(sin(S.Pi/4), sin(S.Pi/3))
assert sin(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(5, 6))) == AccumBounds(sin(S.Pi*Rational(5, 6)), sin(S.Pi*Rational(3, 4)))
def test_sin_fdiff():
assert sin(x).fdiff() == cos(x)
raises(ArgumentIndexError, lambda: sin(x).fdiff(2))
def test_trig_symmetry():
assert sin(-x) == -sin(x)
assert cos(-x) == cos(x)
assert tan(-x) == -tan(x)
assert cot(-x) == -cot(x)
assert sin(x + pi) == -sin(x)
assert sin(x + 2*pi) == sin(x)
assert sin(x + 3*pi) == -sin(x)
assert sin(x + 4*pi) == sin(x)
assert sin(x - 5*pi) == -sin(x)
assert cos(x + pi) == -cos(x)
assert cos(x + 2*pi) == cos(x)
assert cos(x + 3*pi) == -cos(x)
assert cos(x + 4*pi) == cos(x)
assert cos(x - 5*pi) == -cos(x)
assert tan(x + pi) == tan(x)
assert tan(x - 3*pi) == tan(x)
assert cot(x + pi) == cot(x)
assert cot(x - 3*pi) == cot(x)
assert sin(pi/2 - x) == cos(x)
assert sin(pi*Rational(3, 2) - x) == -cos(x)
assert sin(pi*Rational(5, 2) - x) == cos(x)
assert cos(pi/2 - x) == sin(x)
assert cos(pi*Rational(3, 2) - x) == -sin(x)
assert cos(pi*Rational(5, 2) - x) == sin(x)
assert tan(pi/2 - x) == cot(x)
assert tan(pi*Rational(3, 2) - x) == cot(x)
assert tan(pi*Rational(5, 2) - x) == cot(x)
assert cot(pi/2 - x) == tan(x)
assert cot(pi*Rational(3, 2) - x) == tan(x)
assert cot(pi*Rational(5, 2) - x) == tan(x)
assert sin(pi/2 + x) == cos(x)
assert cos(pi/2 + x) == -sin(x)
assert tan(pi/2 + x) == -cot(x)
assert cot(pi/2 + x) == -tan(x)
def test_cos():
x, y = symbols('x y')
assert cos.nargs == FiniteSet(1)
assert cos(nan) is nan
assert cos(oo) == AccumBounds(-1, 1)
assert cos(oo) - cos(oo) == AccumBounds(-2, 2)
assert cos(oo*I) is oo
assert cos(-oo*I) is oo
assert cos(zoo) is nan
assert cos(0) == 1
assert cos(acos(x)) == x
assert cos(atan(x)) == 1 / sqrt(1 + x**2)
assert cos(asin(x)) == sqrt(1 - x**2)
assert cos(acot(x)) == 1 / sqrt(1 + 1 / x**2)
assert cos(acsc(x)) == sqrt(1 - 1 / x**2)
assert cos(asec(x)) == 1 / x
assert cos(atan2(y, x)) == x / sqrt(x**2 + y**2)
assert cos(pi*I) == cosh(pi)
assert cos(-pi*I) == cosh(pi)
assert cos(-2*I) == cosh(2)
assert cos(pi/2) == 0
assert cos(-pi/2) == 0
assert cos(pi/2) == 0
assert cos(-pi/2) == 0
assert cos((-3*10**73 + 1)*pi/2) == 0
assert cos((7*10**103 + 1)*pi/2) == 0
n = symbols('n', integer=True, even=False)
e = symbols('e', even=True)
assert cos(pi*n/2) == 0
assert cos(pi*e/2) == (-1)**(e/2)
assert cos(pi) == -1
assert cos(-pi) == -1
assert cos(2*pi) == 1
assert cos(5*pi) == -1
assert cos(8*pi) == 1
assert cos(pi/3) == S.Half
assert cos(pi*Rational(-2, 3)) == Rational(-1, 2)
assert cos(pi/4) == S.Half*sqrt(2)
assert cos(-pi/4) == S.Half*sqrt(2)
assert cos(pi*Rational(11, 4)) == Rational(-1, 2)*sqrt(2)
assert cos(pi*Rational(-3, 4)) == Rational(-1, 2)*sqrt(2)
assert cos(pi/6) == S.Half*sqrt(3)
assert cos(-pi/6) == S.Half*sqrt(3)
assert cos(pi*Rational(7, 6)) == Rational(-1, 2)*sqrt(3)
assert cos(pi*Rational(-5, 6)) == Rational(-1, 2)*sqrt(3)
assert cos(pi*Rational(1, 5)) == (sqrt(5) + 1)/4
assert cos(pi*Rational(2, 5)) == (sqrt(5) - 1)/4
assert cos(pi*Rational(3, 5)) == -cos(pi*Rational(2, 5))
assert cos(pi*Rational(4, 5)) == -cos(pi*Rational(1, 5))
assert cos(pi*Rational(6, 5)) == -cos(pi*Rational(1, 5))
assert cos(pi*Rational(8, 5)) == cos(pi*Rational(2, 5))
assert cos(pi*Rational(-1273, 5)) == -cos(pi*Rational(2, 5))
assert cos(pi/8) == sqrt((2 + sqrt(2))/4)
assert cos(pi/12) == sqrt(2)/4 + sqrt(6)/4
assert cos(pi*Rational(5, 12)) == -sqrt(2)/4 + sqrt(6)/4
assert cos(pi*Rational(7, 12)) == sqrt(2)/4 - sqrt(6)/4
assert cos(pi*Rational(11, 12)) == -sqrt(2)/4 - sqrt(6)/4
assert cos(pi*Rational(104, 105)) == -cos(pi/105)
assert cos(pi*Rational(106, 105)) == -cos(pi/105)
assert cos(pi*Rational(-104, 105)) == -cos(pi/105)
assert cos(pi*Rational(-106, 105)) == -cos(pi/105)
assert cos(x*I) == cosh(x)
assert cos(k*pi*I) == cosh(k*pi)
assert cos(r).is_real is True
assert cos(0, evaluate=False).is_algebraic
assert cos(a).is_algebraic is None
assert cos(na).is_algebraic is False
q = Symbol('q', rational=True)
assert cos(pi*q).is_algebraic
assert cos(pi*Rational(2, 7)).is_algebraic
assert cos(k*pi) == (-1)**k
assert cos(2*k*pi) == 1
for d in list(range(1, 22)) + [60, 85]:
for n in range(0, 2*d + 1):
x = n*pi/d
e = abs( float(cos(x)) - cos(float(x)) )
assert e < 1e-12
def test_issue_6190():
c = Float('123456789012345678901234567890.25', '')
for cls in [sin, cos, tan, cot]:
assert cls(c*pi) == cls(pi/4)
assert cls(4.125*pi) == cls(pi/8)
assert cls(4.7*pi) == cls((4.7 % 2)*pi)
def test_cos_series():
assert cos(x).series(x, 0, 9) == \
1 - x**2/2 + x**4/24 - x**6/720 + x**8/40320 + O(x**9)
def test_cos_rewrite():
assert cos(x).rewrite(exp) == exp(I*x)/2 + exp(-I*x)/2
assert cos(x).rewrite(tan) == (1 - tan(x/2)**2)/(1 + tan(x/2)**2)
assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2)
assert cos(sinh(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sinh(3)).n()
assert cos(cosh(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cosh(3)).n()
assert cos(tanh(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tanh(3)).n()
assert cos(coth(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, coth(3)).n()
assert cos(sin(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sin(3)).n()
assert cos(cos(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cos(3)).n()
assert cos(tan(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tan(3)).n()
assert cos(cot(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cot(3)).n()
assert cos(log(x)).rewrite(Pow) == x**I/2 + x**-I/2
assert cos(x).rewrite(sec) == 1/sec(x)
assert cos(x).rewrite(sin) == sin(x + pi/2, evaluate=False)
assert cos(x).rewrite(csc) == 1/csc(-x + pi/2, evaluate=False)
assert cos(sin(x)).rewrite(Pow) == cos(sin(x))
def test_cos_expansion():
assert cos(x + y).expand(trig=True) == cos(x)*cos(y) - sin(x)*sin(y)
assert cos(x - y).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y)
assert cos(y - x).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y)
assert cos(2*x).expand(trig=True) == 2*cos(x)**2 - 1
assert cos(3*x).expand(trig=True) == 4*cos(x)**3 - 3*cos(x)
assert cos(4*x).expand(trig=True) == 8*cos(x)**4 - 8*cos(x)**2 + 1
assert cos(2).expand(trig=True) == 2*cos(1)**2 - 1
assert cos(3).expand(trig=True) == 4*cos(1)**3 - 3*cos(1)
def test_cos_AccumBounds():
assert cos(AccumBounds(-oo, oo)) == AccumBounds(-1, 1)
assert cos(AccumBounds(0, oo)) == AccumBounds(-1, 1)
assert cos(AccumBounds(-oo, 0)) == AccumBounds(-1, 1)
assert cos(AccumBounds(0, 2*S.Pi)) == AccumBounds(-1, 1)
assert cos(AccumBounds(-S.Pi/3, S.Pi/4)) == AccumBounds(cos(-S.Pi/3), 1)
assert cos(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(5, 4))) == AccumBounds(-1, cos(S.Pi*Rational(3, 4)))
assert cos(AccumBounds(S.Pi*Rational(5, 4), S.Pi*Rational(4, 3))) == AccumBounds(cos(S.Pi*Rational(5, 4)), cos(S.Pi*Rational(4, 3)))
assert cos(AccumBounds(S.Pi/4, S.Pi/3)) == AccumBounds(cos(S.Pi/3), cos(S.Pi/4))
def test_cos_fdiff():
assert cos(x).fdiff() == -sin(x)
raises(ArgumentIndexError, lambda: cos(x).fdiff(2))
def test_tan():
assert tan(nan) is nan
assert tan(zoo) is nan
assert tan(oo) == AccumBounds(-oo, oo)
assert tan(oo) - tan(oo) == AccumBounds(-oo, oo)
assert tan.nargs == FiniteSet(1)
assert tan(oo*I) == I
assert tan(-oo*I) == -I
assert tan(0) == 0
assert tan(atan(x)) == x
assert tan(asin(x)) == x / sqrt(1 - x**2)
assert tan(acos(x)) == sqrt(1 - x**2) / x
assert tan(acot(x)) == 1 / x
assert tan(acsc(x)) == 1 / (sqrt(1 - 1 / x**2) * x)
assert tan(asec(x)) == sqrt(1 - 1 / x**2) * x
assert tan(atan2(y, x)) == y/x
assert tan(pi*I) == tanh(pi)*I
assert tan(-pi*I) == -tanh(pi)*I
assert tan(-2*I) == -tanh(2)*I
assert tan(pi) == 0
assert tan(-pi) == 0
assert tan(2*pi) == 0
assert tan(-2*pi) == 0
assert tan(-3*10**73*pi) == 0
assert tan(pi/2) is zoo
assert tan(pi*Rational(3, 2)) is zoo
assert tan(pi/3) == sqrt(3)
assert tan(pi*Rational(-2, 3)) == sqrt(3)
assert tan(pi/4) is S.One
assert tan(-pi/4) is S.NegativeOne
assert tan(pi*Rational(17, 4)) is S.One
assert tan(pi*Rational(-3, 4)) is S.One
assert tan(pi/5) == sqrt(5 - 2*sqrt(5))
assert tan(pi*Rational(2, 5)) == sqrt(5 + 2*sqrt(5))
assert tan(pi*Rational(18, 5)) == -sqrt(5 + 2*sqrt(5))
assert tan(pi*Rational(-16, 5)) == -sqrt(5 - 2*sqrt(5))
assert tan(pi/6) == 1/sqrt(3)
assert tan(-pi/6) == -1/sqrt(3)
assert tan(pi*Rational(7, 6)) == 1/sqrt(3)
assert tan(pi*Rational(-5, 6)) == 1/sqrt(3)
assert tan(pi/8) == -1 + sqrt(2)
assert tan(pi*Rational(3, 8)) == 1 + sqrt(2) # issue 15959
assert tan(pi*Rational(5, 8)) == -1 - sqrt(2)
assert tan(pi*Rational(7, 8)) == 1 - sqrt(2)
assert tan(pi/10) == sqrt(1 - 2*sqrt(5)/5)
assert tan(pi*Rational(3, 10)) == sqrt(1 + 2*sqrt(5)/5)
assert tan(pi*Rational(17, 10)) == -sqrt(1 + 2*sqrt(5)/5)
assert tan(pi*Rational(-31, 10)) == -sqrt(1 - 2*sqrt(5)/5)
assert tan(pi/12) == -sqrt(3) + 2
assert tan(pi*Rational(5, 12)) == sqrt(3) + 2
assert tan(pi*Rational(7, 12)) == -sqrt(3) - 2
assert tan(pi*Rational(11, 12)) == sqrt(3) - 2
assert tan(pi/24).radsimp() == -2 - sqrt(3) + sqrt(2) + sqrt(6)
assert tan(pi*Rational(5, 24)).radsimp() == -2 + sqrt(3) - sqrt(2) + sqrt(6)
assert tan(pi*Rational(7, 24)).radsimp() == 2 - sqrt(3) - sqrt(2) + sqrt(6)
assert tan(pi*Rational(11, 24)).radsimp() == 2 + sqrt(3) + sqrt(2) + sqrt(6)
assert tan(pi*Rational(13, 24)).radsimp() == -2 - sqrt(3) - sqrt(2) - sqrt(6)
assert tan(pi*Rational(17, 24)).radsimp() == -2 + sqrt(3) + sqrt(2) - sqrt(6)
assert tan(pi*Rational(19, 24)).radsimp() == 2 - sqrt(3) + sqrt(2) - sqrt(6)
assert tan(pi*Rational(23, 24)).radsimp() == 2 + sqrt(3) - sqrt(2) - sqrt(6)
assert tan(x*I) == tanh(x)*I
assert tan(k*pi) == 0
assert tan(17*k*pi) == 0
assert tan(k*pi*I) == tanh(k*pi)*I
assert tan(r).is_real is None
assert tan(r).is_extended_real is True
assert tan(0, evaluate=False).is_algebraic
assert tan(a).is_algebraic is None
assert tan(na).is_algebraic is False
assert tan(pi*Rational(10, 7)) == tan(pi*Rational(3, 7))
assert tan(pi*Rational(11, 7)) == -tan(pi*Rational(3, 7))
assert tan(pi*Rational(-11, 7)) == tan(pi*Rational(3, 7))
assert tan(pi*Rational(15, 14)) == tan(pi/14)
assert tan(pi*Rational(-15, 14)) == -tan(pi/14)
assert tan(r).is_finite is None
assert tan(I*r).is_finite is True
def test_tan_series():
assert tan(x).series(x, 0, 9) == \
x + x**3/3 + 2*x**5/15 + 17*x**7/315 + O(x**9)
def test_tan_rewrite():
neg_exp, pos_exp = exp(-x*I), exp(x*I)
assert tan(x).rewrite(exp) == I*(neg_exp - pos_exp)/(neg_exp + pos_exp)
assert tan(x).rewrite(sin) == 2*sin(x)**2/sin(2*x)
assert tan(x).rewrite(cos) == cos(x - S.Pi/2, evaluate=False)/cos(x)
assert tan(x).rewrite(cot) == 1/cot(x)
assert tan(sinh(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sinh(3)).n()
assert tan(cosh(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cosh(3)).n()
assert tan(tanh(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tanh(3)).n()
assert tan(coth(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, coth(3)).n()
assert tan(sin(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sin(3)).n()
assert tan(cos(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cos(3)).n()
assert tan(tan(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tan(3)).n()
assert tan(cot(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cot(3)).n()
assert tan(log(x)).rewrite(Pow) == I*(x**-I - x**I)/(x**-I + x**I)
assert 0 == (cos(pi/34)*tan(pi/34) - sin(pi/34)).rewrite(pow)
assert 0 == (cos(pi/17)*tan(pi/17) - sin(pi/17)).rewrite(pow)
assert tan(pi/19).rewrite(pow) == tan(pi/19)
assert tan(pi*Rational(8, 19)).rewrite(sqrt) == tan(pi*Rational(8, 19))
assert tan(x).rewrite(sec) == sec(x)/sec(x - pi/2, evaluate=False)
assert tan(x).rewrite(csc) == csc(-x + pi/2, evaluate=False)/csc(x)
assert tan(sin(x)).rewrite(Pow) == tan(sin(x))
assert tan(pi*Rational(2, 5), evaluate=False).rewrite(sqrt) == sqrt(sqrt(5)/8 +
Rational(5, 8))/(Rational(-1, 4) + sqrt(5)/4)
def test_tan_subs():
assert tan(x).subs(tan(x), y) == y
assert tan(x).subs(x, y) == tan(y)
assert tan(x).subs(x, S.Pi/2) is zoo
assert tan(x).subs(x, S.Pi*Rational(3, 2)) is zoo
def test_tan_expansion():
assert tan(x + y).expand(trig=True) == ((tan(x) + tan(y))/(1 - tan(x)*tan(y))).expand()
assert tan(x - y).expand(trig=True) == ((tan(x) - tan(y))/(1 + tan(x)*tan(y))).expand()
assert tan(x + y + z).expand(trig=True) == (
(tan(x) + tan(y) + tan(z) - tan(x)*tan(y)*tan(z))/
(1 - tan(x)*tan(y) - tan(x)*tan(z) - tan(y)*tan(z))).expand()
assert 0 == tan(2*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 7))])*24 - 7
assert 0 == tan(3*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*55 - 37
assert 0 == tan(4*x - pi/4).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*239 - 1
def test_tan_AccumBounds():
assert tan(AccumBounds(-oo, oo)) == AccumBounds(-oo, oo)
assert tan(AccumBounds(S.Pi/3, S.Pi*Rational(2, 3))) == AccumBounds(-oo, oo)
assert tan(AccumBounds(S.Pi/6, S.Pi/3)) == AccumBounds(tan(S.Pi/6), tan(S.Pi/3))
def test_tan_fdiff():
assert tan(x).fdiff() == tan(x)**2 + 1
raises(ArgumentIndexError, lambda: tan(x).fdiff(2))
def test_cot():
assert cot(nan) is nan
assert cot.nargs == FiniteSet(1)
assert cot(oo*I) == -I
assert cot(-oo*I) == I
assert cot(zoo) is nan
assert cot(0) is zoo
assert cot(2*pi) is zoo
assert cot(acot(x)) == x
assert cot(atan(x)) == 1 / x
assert cot(asin(x)) == sqrt(1 - x**2) / x
assert cot(acos(x)) == x / sqrt(1 - x**2)
assert cot(acsc(x)) == sqrt(1 - 1 / x**2) * x
assert cot(asec(x)) == 1 / (sqrt(1 - 1 / x**2) * x)
assert cot(atan2(y, x)) == x/y
assert cot(pi*I) == -coth(pi)*I
assert cot(-pi*I) == coth(pi)*I
assert cot(-2*I) == coth(2)*I
assert cot(pi) == cot(2*pi) == cot(3*pi)
assert cot(-pi) == cot(-2*pi) == cot(-3*pi)
assert cot(pi/2) == 0
assert cot(-pi/2) == 0
assert cot(pi*Rational(5, 2)) == 0
assert cot(pi*Rational(7, 2)) == 0
assert cot(pi/3) == 1/sqrt(3)
assert cot(pi*Rational(-2, 3)) == 1/sqrt(3)
assert cot(pi/4) is S.One
assert cot(-pi/4) is S.NegativeOne
assert cot(pi*Rational(17, 4)) is S.One
assert cot(pi*Rational(-3, 4)) is S.One
assert cot(pi/6) == sqrt(3)
assert cot(-pi/6) == -sqrt(3)
assert cot(pi*Rational(7, 6)) == sqrt(3)
assert cot(pi*Rational(-5, 6)) == sqrt(3)
assert cot(pi/8) == 1 + sqrt(2)
assert cot(pi*Rational(3, 8)) == -1 + sqrt(2)
assert cot(pi*Rational(5, 8)) == 1 - sqrt(2)
assert cot(pi*Rational(7, 8)) == -1 - sqrt(2)
assert cot(pi/12) == sqrt(3) + 2
assert cot(pi*Rational(5, 12)) == -sqrt(3) + 2
assert cot(pi*Rational(7, 12)) == sqrt(3) - 2
assert cot(pi*Rational(11, 12)) == -sqrt(3) - 2
assert cot(pi/24).radsimp() == sqrt(2) + sqrt(3) + 2 + sqrt(6)
assert cot(pi*Rational(5, 24)).radsimp() == -sqrt(2) - sqrt(3) + 2 + sqrt(6)
assert cot(pi*Rational(7, 24)).radsimp() == -sqrt(2) + sqrt(3) - 2 + sqrt(6)
assert cot(pi*Rational(11, 24)).radsimp() == sqrt(2) - sqrt(3) - 2 + sqrt(6)
assert cot(pi*Rational(13, 24)).radsimp() == -sqrt(2) + sqrt(3) + 2 - sqrt(6)
assert cot(pi*Rational(17, 24)).radsimp() == sqrt(2) - sqrt(3) + 2 - sqrt(6)
assert cot(pi*Rational(19, 24)).radsimp() == sqrt(2) + sqrt(3) - 2 - sqrt(6)
assert cot(pi*Rational(23, 24)).radsimp() == -sqrt(2) - sqrt(3) - 2 - sqrt(6)
assert cot(x*I) == -coth(x)*I
assert cot(k*pi*I) == -coth(k*pi)*I
assert cot(r).is_real is None
assert cot(r).is_extended_real is True
assert cot(a).is_algebraic is None
assert cot(na).is_algebraic is False
assert cot(pi*Rational(10, 7)) == cot(pi*Rational(3, 7))
assert cot(pi*Rational(11, 7)) == -cot(pi*Rational(3, 7))
assert cot(pi*Rational(-11, 7)) == cot(pi*Rational(3, 7))
assert cot(pi*Rational(39, 34)) == cot(pi*Rational(5, 34))
assert cot(pi*Rational(-41, 34)) == -cot(pi*Rational(7, 34))
assert cot(x).is_finite is None
assert cot(r).is_finite is None
i = Symbol('i', imaginary=True)
assert cot(i).is_finite is True
assert cot(x).subs(x, 3*pi) is zoo
def test_tan_cot_sin_cos_evalf():
assert abs((tan(pi*Rational(8, 15))*cos(pi*Rational(8, 15))/sin(pi*Rational(8, 15)) - 1).evalf()) < 1e-14
assert abs((cot(pi*Rational(4, 15))*sin(pi*Rational(4, 15))/cos(pi*Rational(4, 15)) - 1).evalf()) < 1e-14
@XFAIL
def test_tan_cot_sin_cos_ratsimp():
assert 1 == (tan(pi*Rational(8, 15))*cos(pi*Rational(8, 15))/sin(pi*Rational(8, 15))).ratsimp()
assert 1 == (cot(pi*Rational(4, 15))*sin(pi*Rational(4, 15))/cos(pi*Rational(4, 15))).ratsimp()
def test_cot_series():
assert cot(x).series(x, 0, 9) == \
1/x - x/3 - x**3/45 - 2*x**5/945 - x**7/4725 + O(x**9)
# issue 6210
assert cot(x**4 + x**5).series(x, 0, 1) == \
x**(-4) - 1/x**3 + x**(-2) - 1/x + 1 + O(x)
assert cot(pi*(1-x)).series(x, 0, 3) == -1/(pi*x) + pi*x/3 + O(x**3)
assert cot(x).taylor_term(0, x) == 1/x
assert cot(x).taylor_term(2, x) is S.Zero
assert cot(x).taylor_term(3, x) == -x**3/45
def test_cot_rewrite():
neg_exp, pos_exp = exp(-x*I), exp(x*I)
assert cot(x).rewrite(exp) == I*(pos_exp + neg_exp)/(pos_exp - neg_exp)
assert cot(x).rewrite(sin) == sin(2*x)/(2*(sin(x)**2))
assert cot(x).rewrite(cos) == cos(x)/cos(x - pi/2, evaluate=False)
assert cot(x).rewrite(tan) == 1/tan(x)
assert cot(sinh(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sinh(3)).n()
assert cot(cosh(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, cosh(3)).n()
assert cot(tanh(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tanh(3)).n()
assert cot(coth(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, coth(3)).n()
assert cot(sin(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sin(3)).n()
assert cot(tan(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tan(3)).n()
assert cot(log(x)).rewrite(Pow) == -I*(x**-I + x**I)/(x**-I - x**I)
assert cot(pi*Rational(4, 34)).rewrite(pow).ratsimp() == (cos(pi*Rational(4, 34))/sin(pi*Rational(4, 34))).rewrite(pow).ratsimp()
assert cot(pi*Rational(4, 17)).rewrite(pow) == (cos(pi*Rational(4, 17))/sin(pi*Rational(4, 17))).rewrite(pow)
assert cot(pi/19).rewrite(pow) == cot(pi/19)
assert cot(pi/19).rewrite(sqrt) == cot(pi/19)
assert cot(x).rewrite(sec) == sec(x - pi / 2, evaluate=False) / sec(x)
assert cot(x).rewrite(csc) == csc(x) / csc(- x + pi / 2, evaluate=False)
assert cot(sin(x)).rewrite(Pow) == cot(sin(x))
assert cot(pi*Rational(2, 5), evaluate=False).rewrite(sqrt) == (Rational(-1, 4) + sqrt(5)/4)/\
sqrt(sqrt(5)/8 + Rational(5, 8))
def test_cot_subs():
assert cot(x).subs(cot(x), y) == y
assert cot(x).subs(x, y) == cot(y)
assert cot(x).subs(x, 0) is zoo
assert cot(x).subs(x, S.Pi) is zoo
def test_cot_expansion():
assert cot(x + y).expand(trig=True) == ((cot(x)*cot(y) - 1)/(cot(x) + cot(y))).expand()
assert cot(x - y).expand(trig=True) == (-(cot(x)*cot(y) + 1)/(cot(x) - cot(y))).expand()
assert cot(x + y + z).expand(trig=True) == (
(cot(x)*cot(y)*cot(z) - cot(x) - cot(y) - cot(z))/
(-1 + cot(x)*cot(y) + cot(x)*cot(z) + cot(y)*cot(z))).expand()
assert cot(3*x).expand(trig=True) == ((cot(x)**3 - 3*cot(x))/(3*cot(x)**2 - 1)).expand()
assert 0 == cot(2*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 3))])*3 + 4
assert 0 == cot(3*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 5))])*55 - 37
assert 0 == cot(4*x - pi/4).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 7))])*863 + 191
def test_cot_AccumBounds():
assert cot(AccumBounds(-oo, oo)) == AccumBounds(-oo, oo)
assert cot(AccumBounds(-S.Pi/3, S.Pi/3)) == AccumBounds(-oo, oo)
assert cot(AccumBounds(S.Pi/6, S.Pi/3)) == AccumBounds(cot(S.Pi/3), cot(S.Pi/6))
def test_cot_fdiff():
assert cot(x).fdiff() == -cot(x)**2 - 1
raises(ArgumentIndexError, lambda: cot(x).fdiff(2))
def test_sinc():
assert isinstance(sinc(x), sinc)
s = Symbol('s', zero=True)
assert sinc(s) is S.One
assert sinc(S.Infinity) is S.Zero
assert sinc(S.NegativeInfinity) is S.Zero
assert sinc(S.NaN) is S.NaN
assert sinc(S.ComplexInfinity) is S.NaN
n = Symbol('n', integer=True, nonzero=True)
assert sinc(n*pi) is S.Zero
assert sinc(-n*pi) is S.Zero
assert sinc(pi/2) == 2 / pi
assert sinc(-pi/2) == 2 / pi
assert sinc(pi*Rational(5, 2)) == 2 / (5*pi)
assert sinc(pi*Rational(7, 2)) == -2 / (7*pi)
assert sinc(-x) == sinc(x)
assert sinc(x).diff() == Piecewise(((x*cos(x) - sin(x)) / x**2, Ne(x, 0)), (0, True))
assert sinc(x).diff(x).equals(sinc(x).rewrite(sin).diff(x))
assert sinc(x).diff().subs(x, 0) is S.Zero
assert sinc(x).series() == 1 - x**2/6 + x**4/120 + O(x**6)
assert sinc(x).rewrite(jn) == jn(0, x)
assert sinc(x).rewrite(sin) == Piecewise((sin(x)/x, Ne(x, 0)), (1, True))
def test_asin():
assert asin(nan) is nan
assert asin.nargs == FiniteSet(1)
assert asin(oo) == -I*oo
assert asin(-oo) == I*oo
assert asin(zoo) is zoo
# Note: asin(-x) = - asin(x)
assert asin(0) == 0
assert asin(1) == pi/2
assert asin(-1) == -pi/2
assert asin(sqrt(3)/2) == pi/3
assert asin(-sqrt(3)/2) == -pi/3
assert asin(sqrt(2)/2) == pi/4
assert asin(-sqrt(2)/2) == -pi/4
assert asin(sqrt((5 - sqrt(5))/8)) == pi/5
assert asin(-sqrt((5 - sqrt(5))/8)) == -pi/5
assert asin(S.Half) == pi/6
assert asin(Rational(-1, 2)) == -pi/6
assert asin((sqrt(2 - sqrt(2)))/2) == pi/8
assert asin(-(sqrt(2 - sqrt(2)))/2) == -pi/8
assert asin((sqrt(5) - 1)/4) == pi/10
assert asin(-(sqrt(5) - 1)/4) == -pi/10
assert asin((sqrt(3) - 1)/sqrt(2**3)) == pi/12
assert asin(-(sqrt(3) - 1)/sqrt(2**3)) == -pi/12
# check round-trip for exact values:
for d in [5, 6, 8, 10, 12]:
for n in range(-(d//2), d//2 + 1):
if gcd(n, d) == 1:
assert asin(sin(n*pi/d)) == n*pi/d
assert asin(x).diff(x) == 1/sqrt(1 - x**2)
assert asin(0.2).is_real is True
assert asin(-2).is_real is False
assert asin(r).is_real is None
assert asin(-2*I) == -I*asinh(2)
assert asin(Rational(1, 7), evaluate=False).is_positive is True
assert asin(Rational(-1, 7), evaluate=False).is_positive is False
assert asin(p).is_positive is None
assert asin(sin(Rational(7, 2))) == Rational(-7, 2) + pi
assert asin(sin(Rational(-7, 4))) == Rational(7, 4) - pi
assert unchanged(asin, cos(x))
def test_asin_series():
assert asin(x).series(x, 0, 9) == \
x + x**3/6 + 3*x**5/40 + 5*x**7/112 + O(x**9)
t5 = asin(x).taylor_term(5, x)
assert t5 == 3*x**5/40
assert asin(x).taylor_term(7, x, t5, 0) == 5*x**7/112
def test_asin_rewrite():
assert asin(x).rewrite(log) == -I*log(I*x + sqrt(1 - x**2))
assert asin(x).rewrite(atan) == 2*atan(x/(1 + sqrt(1 - x**2)))
assert asin(x).rewrite(acos) == S.Pi/2 - acos(x)
assert asin(x).rewrite(acot) == 2*acot((sqrt(-x**2 + 1) + 1)/x)
assert asin(x).rewrite(asec) == -asec(1/x) + pi/2
assert asin(x).rewrite(acsc) == acsc(1/x)
def test_asin_fdiff():
assert asin(x).fdiff() == 1/sqrt(1 - x**2)
raises(ArgumentIndexError, lambda: asin(x).fdiff(2))
def test_acos():
assert acos(nan) is nan
assert acos(zoo) is zoo
assert acos.nargs == FiniteSet(1)
assert acos(oo) == I*oo
assert acos(-oo) == -I*oo
# Note: acos(-x) = pi - acos(x)
assert acos(0) == pi/2
assert acos(S.Half) == pi/3
assert acos(Rational(-1, 2)) == pi*Rational(2, 3)
assert acos(1) == 0
assert acos(-1) == pi
assert acos(sqrt(2)/2) == pi/4
assert acos(-sqrt(2)/2) == pi*Rational(3, 4)
# check round-trip for exact values:
for d in [5, 6, 8, 10, 12]:
for num in range(d):
if gcd(num, d) == 1:
assert acos(cos(num*pi/d)) == num*pi/d
assert acos(2*I) == pi/2 - asin(2*I)
assert acos(x).diff(x) == -1/sqrt(1 - x**2)
assert acos(0.2).is_real is True
assert acos(-2).is_real is False
assert acos(r).is_real is None
assert acos(Rational(1, 7), evaluate=False).is_positive is True
assert acos(Rational(-1, 7), evaluate=False).is_positive is True
assert acos(Rational(3, 2), evaluate=False).is_positive is False
assert acos(p).is_positive is None
assert acos(2 + p).conjugate() != acos(10 + p)
assert acos(-3 + n).conjugate() != acos(-3 + n)
assert acos(Rational(1, 3)).conjugate() == acos(Rational(1, 3))
assert acos(Rational(-1, 3)).conjugate() == acos(Rational(-1, 3))
assert acos(p + n*I).conjugate() == acos(p - n*I)
assert acos(z).conjugate() != acos(conjugate(z))
def test_acos_series():
assert acos(x).series(x, 0, 8) == \
pi/2 - x - x**3/6 - 3*x**5/40 - 5*x**7/112 + O(x**8)
assert acos(x).series(x, 0, 8) == pi/2 - asin(x).series(x, 0, 8)
t5 = acos(x).taylor_term(5, x)
assert t5 == -3*x**5/40
assert acos(x).taylor_term(7, x, t5, 0) == -5*x**7/112
assert acos(x).taylor_term(0, x) == pi/2
assert acos(x).taylor_term(2, x) is S.Zero
def test_acos_rewrite():
assert acos(x).rewrite(log) == pi/2 + I*log(I*x + sqrt(1 - x**2))
assert acos(x).rewrite(atan) == \
atan(sqrt(1 - x**2)/x) + (pi/2)*(1 - x*sqrt(1/x**2))
assert acos(0).rewrite(atan) == S.Pi/2
assert acos(0.5).rewrite(atan) == acos(0.5).rewrite(log)
assert acos(x).rewrite(asin) == S.Pi/2 - asin(x)
assert acos(x).rewrite(acot) == -2*acot((sqrt(-x**2 + 1) + 1)/x) + pi/2
assert acos(x).rewrite(asec) == asec(1/x)
assert acos(x).rewrite(acsc) == -acsc(1/x) + pi/2
def test_acos_fdiff():
assert acos(x).fdiff() == -1/sqrt(1 - x**2)
raises(ArgumentIndexError, lambda: acos(x).fdiff(2))
def test_atan():
assert atan(nan) is nan
assert atan.nargs == FiniteSet(1)
assert atan(oo) == pi/2
assert atan(-oo) == -pi/2
assert atan(zoo) == AccumBounds(-pi/2, pi/2)
assert atan(0) == 0
assert atan(1) == pi/4
assert atan(sqrt(3)) == pi/3
assert atan(-(1 + sqrt(2))) == pi*Rational(-3, 8)
assert atan(sqrt((5 - 2 * sqrt(5)))) == pi/5
assert atan(-sqrt(1 - 2 * sqrt(5)/ 5)) == -pi/10
assert atan(sqrt(1 + 2 * sqrt(5) / 5)) == pi*Rational(3, 10)
assert atan(-2 + sqrt(3)) == -pi/12
assert atan(2 + sqrt(3)) == pi*Rational(5, 12)
assert atan(-2 - sqrt(3)) == pi*Rational(-5, 12)
# check round-trip for exact values:
for d in [5, 6, 8, 10, 12]:
for num in range(-(d//2), d//2 + 1):
if gcd(num, d) == 1:
assert atan(tan(num*pi/d)) == num*pi/d
assert atan(oo) == pi/2
assert atan(x).diff(x) == 1/(1 + x**2)
assert atan(r).is_real is True
assert atan(-2*I) == -I*atanh(2)
assert unchanged(atan, cot(x))
assert atan(cot(Rational(1, 4))) == Rational(-1, 4) + pi/2
assert acot(Rational(1, 4)).is_rational is False
for s in (x, p, n, np, nn, nz, ep, en, enp, enn, enz):
if s.is_real or s.is_extended_real is None:
assert s.is_nonzero is atan(s).is_nonzero
assert s.is_positive is atan(s).is_positive
assert s.is_negative is atan(s).is_negative
assert s.is_nonpositive is atan(s).is_nonpositive
assert s.is_nonnegative is atan(s).is_nonnegative
else:
assert s.is_extended_nonzero is atan(s).is_nonzero
assert s.is_extended_positive is atan(s).is_positive
assert s.is_extended_negative is atan(s).is_negative
assert s.is_extended_nonpositive is atan(s).is_nonpositive
assert s.is_extended_nonnegative is atan(s).is_nonnegative
assert s.is_extended_nonzero is atan(s).is_extended_nonzero
assert s.is_extended_positive is atan(s).is_extended_positive
assert s.is_extended_negative is atan(s).is_extended_negative
assert s.is_extended_nonpositive is atan(s).is_extended_nonpositive
assert s.is_extended_nonnegative is atan(s).is_extended_nonnegative
def test_atan_rewrite():
assert atan(x).rewrite(log) == I*(log(1 - I*x)-log(1 + I*x))/2
assert atan(x).rewrite(asin) == (-asin(1/sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x
assert atan(x).rewrite(acos) == sqrt(x**2)*acos(1/sqrt(x**2 + 1))/x
assert atan(x).rewrite(acot) == acot(1/x)
assert atan(x).rewrite(asec) == sqrt(x**2)*asec(sqrt(x**2 + 1))/x
assert atan(x).rewrite(acsc) == (-acsc(sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x
assert atan(-5*I).evalf() == atan(x).rewrite(log).evalf(subs={x:-5*I})
assert atan(5*I).evalf() == atan(x).rewrite(log).evalf(subs={x:5*I})
def test_atan_fdiff():
assert atan(x).fdiff() == 1/(x**2 + 1)
raises(ArgumentIndexError, lambda: atan(x).fdiff(2))
def test_atan2():
assert atan2.nargs == FiniteSet(2)
assert atan2(0, 0) is S.NaN
assert atan2(0, 1) == 0
assert atan2(1, 1) == pi/4
assert atan2(1, 0) == pi/2
assert atan2(1, -1) == pi*Rational(3, 4)
assert atan2(0, -1) == pi
assert atan2(-1, -1) == pi*Rational(-3, 4)
assert atan2(-1, 0) == -pi/2
assert atan2(-1, 1) == -pi/4
i = symbols('i', imaginary=True)
r = symbols('r', real=True)
eq = atan2(r, i)
ans = -I*log((i + I*r)/sqrt(i**2 + r**2))
reps = ((r, 2), (i, I))
assert eq.subs(reps) == ans.subs(reps)
x = Symbol('x', negative=True)
y = Symbol('y', negative=True)
assert atan2(y, x) == atan(y/x) - pi
y = Symbol('y', nonnegative=True)
assert atan2(y, x) == atan(y/x) + pi
y = Symbol('y')
assert atan2(y, x) == atan2(y, x, evaluate=False)
u = Symbol("u", positive=True)
assert atan2(0, u) == 0
u = Symbol("u", negative=True)
assert atan2(0, u) == pi
assert atan2(y, oo) == 0
assert atan2(y, -oo)== 2*pi*Heaviside(re(y)) - pi
assert atan2(y, x).rewrite(log) == -I*log((x + I*y)/sqrt(x**2 + y**2))
assert atan2(0, 0) is S.NaN
ex = atan2(y, x) - arg(x + I*y)
assert ex.subs({x:2, y:3}).rewrite(arg) == 0
assert ex.subs({x:2, y:3*I}).rewrite(arg) == -pi - I*log(sqrt(5)*I/5)
assert ex.subs({x:2*I, y:3}).rewrite(arg) == -pi/2 - I*log(sqrt(5)*I)
assert ex.subs({x:2*I, y:3*I}).rewrite(arg) == -pi + atan(Rational(2, 3)) + atan(Rational(3, 2))
i = symbols('i', imaginary=True)
r = symbols('r', real=True)
e = atan2(i, r)
rewrite = e.rewrite(arg)
reps = {i: I, r: -2}
assert rewrite == -I*log(abs(I*i + r)/sqrt(abs(i**2 + r**2))) + arg((I*i + r)/sqrt(i**2 + r**2))
assert (e - rewrite).subs(reps).equals(0)
assert atan2(0, x).rewrite(atan) == Piecewise((pi, re(x) < 0),
(0, Ne(x, 0)),
(nan, True))
assert atan2(0, r).rewrite(atan) == Piecewise((pi, r < 0), (0, Ne(r, 0)), (S.NaN, True))
assert atan2(0, i),rewrite(atan) == 0
assert atan2(0, r + i).rewrite(atan) == Piecewise((pi, r < 0), (0, True))
assert atan2(y, x).rewrite(atan) == Piecewise(
(2*atan(y/(x + sqrt(x**2 + y**2))), Ne(y, 0)),
(pi, re(x) < 0),
(0, (re(x) > 0) | Ne(im(x), 0)),
(nan, True))
assert conjugate(atan2(x, y)) == atan2(conjugate(x), conjugate(y))
assert diff(atan2(y, x), x) == -y/(x**2 + y**2)
assert diff(atan2(y, x), y) == x/(x**2 + y**2)
assert simplify(diff(atan2(y, x).rewrite(log), x)) == -y/(x**2 + y**2)
assert simplify(diff(atan2(y, x).rewrite(log), y)) == x/(x**2 + y**2)
assert str(atan2(1, 2).evalf(5)) == '0.46365'
raises(ArgumentIndexError, lambda: atan2(x, y).fdiff(3))
def test_issue_17461():
class A(Symbol):
is_extended_real = True
def _eval_evalf(self, prec):
return Float(5.0)
x = A('X')
y = A('Y')
assert abs(atan2(x, y).evalf() - 0.785398163397448) <= 1e-10
def test_acot():
assert acot(nan) is nan
assert acot.nargs == FiniteSet(1)
assert acot(-oo) == 0
assert acot(oo) == 0
assert acot(zoo) == 0
assert acot(1) == pi/4
assert acot(0) == pi/2
assert acot(sqrt(3)/3) == pi/3
assert acot(1/sqrt(3)) == pi/3
assert acot(-1/sqrt(3)) == -pi/3
assert acot(x).diff(x) == -1/(1 + x**2)
assert acot(r).is_extended_real is True
assert acot(I*pi) == -I*acoth(pi)
assert acot(-2*I) == I*acoth(2)
assert acot(x).is_positive is None
assert acot(n).is_positive is False
assert acot(p).is_positive is True
assert acot(I).is_positive is False
assert acot(Rational(1, 4)).is_rational is False
assert unchanged(acot, cot(x))
assert unchanged(acot, tan(x))
assert acot(cot(Rational(1, 4))) == Rational(1, 4)
assert acot(tan(Rational(-1, 4))) == Rational(1, 4) - pi/2
def test_acot_rewrite():
assert acot(x).rewrite(log) == I*(log(1 - I/x)-log(1 + I/x))/2
assert acot(x).rewrite(asin) == x*(-asin(sqrt(-x**2)/sqrt(-x**2 - 1)) + pi/2)*sqrt(x**(-2))
assert acot(x).rewrite(acos) == x*sqrt(x**(-2))*acos(sqrt(-x**2)/sqrt(-x**2 - 1))
assert acot(x).rewrite(atan) == atan(1/x)
assert acot(x).rewrite(asec) == x*sqrt(x**(-2))*asec(sqrt((x**2 + 1)/x**2))
assert acot(x).rewrite(acsc) == x*(-acsc(sqrt((x**2 + 1)/x**2)) + pi/2)*sqrt(x**(-2))
assert acot(-I/5).evalf() == acot(x).rewrite(log).evalf(subs={x:-I/5})
assert acot(I/5).evalf() == acot(x).rewrite(log).evalf(subs={x:I/5})
def test_acot_fdiff():
assert acot(x).fdiff() == -1/(x**2 + 1)
raises(ArgumentIndexError, lambda: acot(x).fdiff(2))
def test_attributes():
assert sin(x).args == (x,)
def test_sincos_rewrite():
assert sin(pi/2 - x) == cos(x)
assert sin(pi - x) == sin(x)
assert cos(pi/2 - x) == sin(x)
assert cos(pi - x) == -cos(x)
def _check_even_rewrite(func, arg):
"""Checks that the expr has been rewritten using f(-x) -> f(x)
arg : -x
"""
return func(arg).args[0] == -arg
def _check_odd_rewrite(func, arg):
"""Checks that the expr has been rewritten using f(-x) -> -f(x)
arg : -x
"""
return func(arg).func.is_Mul
def _check_no_rewrite(func, arg):
"""Checks that the expr is not rewritten"""
return func(arg).args[0] == arg
def test_evenodd_rewrite():
a = cos(2) # negative
b = sin(1) # positive
even = [cos]
odd = [sin, tan, cot, asin, atan, acot]
with_minus = [-1, -2**1024 * E, -pi/105, -x*y, -x - y]
for func in even:
for expr in with_minus:
assert _check_even_rewrite(func, expr)
assert _check_no_rewrite(func, a*b)
assert func(
x - y) == func(y - x) # it doesn't matter which form is canonical
for func in odd:
for expr in with_minus:
assert _check_odd_rewrite(func, expr)
assert _check_no_rewrite(func, a*b)
assert func(
x - y) == -func(y - x) # it doesn't matter which form is canonical
def test_issue_4547():
assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2)
assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2)
assert tan(x).rewrite(cot) == 1/cot(x)
assert cot(x).fdiff() == -1 - cot(x)**2
def test_as_leading_term_issue_5272():
assert sin(x).as_leading_term(x) == x
assert cos(x).as_leading_term(x) == 1
assert tan(x).as_leading_term(x) == x
assert cot(x).as_leading_term(x) == 1/x
assert asin(x).as_leading_term(x) == x
assert acos(x).as_leading_term(x) == x
assert atan(x).as_leading_term(x) == x
assert acot(x).as_leading_term(x) == x
def test_leading_terms():
for func in [sin, cos, tan, cot, asin, acos, atan, acot]:
for arg in (1/x, S.Half):
eq = func(arg)
assert eq.as_leading_term(x) == eq
def test_atan2_expansion():
assert cancel(atan2(x**2, x + 1).diff(x) - atan(x**2/(x + 1)).diff(x)) == 0
assert cancel(atan(y/x).series(y, 0, 5) - atan2(y, x).series(y, 0, 5)
+ atan2(0, x) - atan(0)) == O(y**5)
assert cancel(atan(y/x).series(x, 1, 4) - atan2(y, x).series(x, 1, 4)
+ atan2(y, 1) - atan(y)) == O((x - 1)**4, (x, 1))
assert cancel(atan((y + x)/x).series(x, 1, 3) - atan2(y + x, x).series(x, 1, 3)
+ atan2(1 + y, 1) - atan(1 + y)) == O((x - 1)**3, (x, 1))
assert Matrix([atan2(y, x)]).jacobian([y, x]) == \
Matrix([[x/(y**2 + x**2), -y/(y**2 + x**2)]])
def test_aseries():
def t(n, v, d, e):
assert abs(
n(1/v).evalf() - n(1/x).series(x, dir=d).removeO().subs(x, v)) < e
t(atan, 0.1, '+', 1e-5)
t(atan, -0.1, '-', 1e-5)
t(acot, 0.1, '+', 1e-5)
t(acot, -0.1, '-', 1e-5)
def test_issue_4420():
i = Symbol('i', integer=True)
e = Symbol('e', even=True)
o = Symbol('o', odd=True)
# unknown parity for variable
assert cos(4*i*pi) == 1
assert sin(4*i*pi) == 0
assert tan(4*i*pi) == 0
assert cot(4*i*pi) is zoo
assert cos(3*i*pi) == cos(pi*i) # +/-1
assert sin(3*i*pi) == 0
assert tan(3*i*pi) == 0
assert cot(3*i*pi) is zoo
assert cos(4.0*i*pi) == 1
assert sin(4.0*i*pi) == 0
assert tan(4.0*i*pi) == 0
assert cot(4.0*i*pi) is zoo
assert cos(3.0*i*pi) == cos(pi*i) # +/-1
assert sin(3.0*i*pi) == 0
assert tan(3.0*i*pi) == 0
assert cot(3.0*i*pi) is zoo
assert cos(4.5*i*pi) == cos(0.5*pi*i)
assert sin(4.5*i*pi) == sin(0.5*pi*i)
assert tan(4.5*i*pi) == tan(0.5*pi*i)
assert cot(4.5*i*pi) == cot(0.5*pi*i)
# parity of variable is known
assert cos(4*e*pi) == 1
assert sin(4*e*pi) == 0
assert tan(4*e*pi) == 0
assert cot(4*e*pi) is zoo
assert cos(3*e*pi) == 1
assert sin(3*e*pi) == 0
assert tan(3*e*pi) == 0
assert cot(3*e*pi) is zoo
assert cos(4.0*e*pi) == 1
assert sin(4.0*e*pi) == 0
assert tan(4.0*e*pi) == 0
assert cot(4.0*e*pi) is zoo
assert cos(3.0*e*pi) == 1
assert sin(3.0*e*pi) == 0
assert tan(3.0*e*pi) == 0
assert cot(3.0*e*pi) is zoo
assert cos(4.5*e*pi) == cos(0.5*pi*e)
assert sin(4.5*e*pi) == sin(0.5*pi*e)
assert tan(4.5*e*pi) == tan(0.5*pi*e)
assert cot(4.5*e*pi) == cot(0.5*pi*e)
assert cos(4*o*pi) == 1
assert sin(4*o*pi) == 0
assert tan(4*o*pi) == 0
assert cot(4*o*pi) is zoo
assert cos(3*o*pi) == -1
assert sin(3*o*pi) == 0
assert tan(3*o*pi) == 0
assert cot(3*o*pi) is zoo
assert cos(4.0*o*pi) == 1
assert sin(4.0*o*pi) == 0
assert tan(4.0*o*pi) == 0
assert cot(4.0*o*pi) is zoo
assert cos(3.0*o*pi) == -1
assert sin(3.0*o*pi) == 0
assert tan(3.0*o*pi) == 0
assert cot(3.0*o*pi) is zoo
assert cos(4.5*o*pi) == cos(0.5*pi*o)
assert sin(4.5*o*pi) == sin(0.5*pi*o)
assert tan(4.5*o*pi) == tan(0.5*pi*o)
assert cot(4.5*o*pi) == cot(0.5*pi*o)
# x could be imaginary
assert cos(4*x*pi) == cos(4*pi*x)
assert sin(4*x*pi) == sin(4*pi*x)
assert tan(4*x*pi) == tan(4*pi*x)
assert cot(4*x*pi) == cot(4*pi*x)
assert cos(3*x*pi) == cos(3*pi*x)
assert sin(3*x*pi) == sin(3*pi*x)
assert tan(3*x*pi) == tan(3*pi*x)
assert cot(3*x*pi) == cot(3*pi*x)
assert cos(4.0*x*pi) == cos(4.0*pi*x)
assert sin(4.0*x*pi) == sin(4.0*pi*x)
assert tan(4.0*x*pi) == tan(4.0*pi*x)
assert cot(4.0*x*pi) == cot(4.0*pi*x)
assert cos(3.0*x*pi) == cos(3.0*pi*x)
assert sin(3.0*x*pi) == sin(3.0*pi*x)
assert tan(3.0*x*pi) == tan(3.0*pi*x)
assert cot(3.0*x*pi) == cot(3.0*pi*x)
assert cos(4.5*x*pi) == cos(4.5*pi*x)
assert sin(4.5*x*pi) == sin(4.5*pi*x)
assert tan(4.5*x*pi) == tan(4.5*pi*x)
assert cot(4.5*x*pi) == cot(4.5*pi*x)
def test_inverses():
raises(AttributeError, lambda: sin(x).inverse())
raises(AttributeError, lambda: cos(x).inverse())
assert tan(x).inverse() == atan
assert cot(x).inverse() == acot
raises(AttributeError, lambda: csc(x).inverse())
raises(AttributeError, lambda: sec(x).inverse())
assert asin(x).inverse() == sin
assert acos(x).inverse() == cos
assert atan(x).inverse() == tan
assert acot(x).inverse() == cot
def test_real_imag():
a, b = symbols('a b', real=True)
z = a + b*I
for deep in [True, False]:
assert sin(
z).as_real_imag(deep=deep) == (sin(a)*cosh(b), cos(a)*sinh(b))
assert cos(
z).as_real_imag(deep=deep) == (cos(a)*cosh(b), -sin(a)*sinh(b))
assert tan(z).as_real_imag(deep=deep) == (sin(2*a)/(cos(2*a) +
cosh(2*b)), sinh(2*b)/(cos(2*a) + cosh(2*b)))
assert cot(z).as_real_imag(deep=deep) == (-sin(2*a)/(cos(2*a) -
cosh(2*b)), -sinh(2*b)/(cos(2*a) - cosh(2*b)))
assert sin(a).as_real_imag(deep=deep) == (sin(a), 0)
assert cos(a).as_real_imag(deep=deep) == (cos(a), 0)
assert tan(a).as_real_imag(deep=deep) == (tan(a), 0)
assert cot(a).as_real_imag(deep=deep) == (cot(a), 0)
@XFAIL
def test_sin_cos_with_infinity():
# Test for issue 5196
# https://github.com/sympy/sympy/issues/5196
assert sin(oo) is S.NaN
assert cos(oo) is S.NaN
@slow
def test_sincos_rewrite_sqrt():
# equivalent to testing rewrite(pow)
for p in [1, 3, 5, 17]:
for t in [1, 8]:
n = t*p
# The vertices `exp(i*pi/n)` of a regular `n`-gon can
# be expressed by means of nested square roots if and
# only if `n` is a product of Fermat primes, `p`, and
# powers of 2, `t'. The code aims to check all vertices
# not belonging to an `m`-gon for `m < n`(`gcd(i, n) == 1`).
# For large `n` this makes the test too slow, therefore
# the vertices are limited to those of index `i < 10`.
for i in range(1, min((n + 1)//2 + 1, 10)):
if 1 == gcd(i, n):
x = i*pi/n
s1 = sin(x).rewrite(sqrt)
c1 = cos(x).rewrite(sqrt)
assert not s1.has(cos, sin), "fails for %d*pi/%d" % (i, n)
assert not c1.has(cos, sin), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs(sin(x.evalf(5)) - s1.evalf(2)), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs(cos(x.evalf(5)) - c1.evalf(2)), "fails for %d*pi/%d" % (i, n)
assert cos(pi/14).rewrite(sqrt) == sqrt(cos(pi/7)/2 + S.Half)
assert cos(pi/257).rewrite(sqrt).evalf(64) == cos(pi/257).evalf(64)
assert cos(pi*Rational(-15, 2)/11, evaluate=False).rewrite(
sqrt) == -sqrt(-cos(pi*Rational(4, 11))/2 + S.Half)
assert cos(Mul(2, pi, S.Half, evaluate=False), evaluate=False).rewrite(
sqrt) == -1
e = cos(pi/3/17) # don't use pi/15 since that is caught at instantiation
a = (
-3*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17) + 17)/64 -
3*sqrt(34)*sqrt(sqrt(17) + 17)/128 - sqrt(sqrt(17) +
17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17)
+ sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 - sqrt(-sqrt(17)
+ 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 - Rational(1, 32) +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 +
3*sqrt(2)*sqrt(sqrt(17) + 17)/128 + sqrt(34)*sqrt(-sqrt(17) + 17)/128
+ 13*sqrt(2)*sqrt(-sqrt(17) + 17)/128 + sqrt(17)*sqrt(-sqrt(17) +
17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17)
+ sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 + 5*sqrt(17)/32
+ sqrt(3)*sqrt(-sqrt(2)*sqrt(sqrt(17) + 17)*sqrt(sqrt(17)/32 +
sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/8 -
5*sqrt(2)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 +
Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 -
3*sqrt(2)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 +
sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/32
+ sqrt(34)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 +
Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 +
sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/2 +
S.Half + sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) +
17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) -
sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) +
6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) -
sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) +
6*sqrt(17) + 34)/32 + sqrt(34)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 +
sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 +
Rational(15, 32))/32)/2)
assert e.rewrite(sqrt) == a
assert e.n() == a.n()
# coverage of fermatCoords: multiplicity > 1; the following could be
# different but that portion of the code should be tested in some way
assert cos(pi/9/17).rewrite(sqrt) == \
sin(pi/9)*sin(pi*Rational(2, 17)) + cos(pi/9)*cos(pi*Rational(2, 17))
@slow
def test_tancot_rewrite_sqrt():
# equivalent to testing rewrite(pow)
for p in [1, 3, 5, 17]:
for t in [1, 8]:
n = t*p
for i in range(1, min((n + 1)//2 + 1, 10)):
if 1 == gcd(i, n):
x = i*pi/n
if 2*i != n and 3*i != 2*n:
t1 = tan(x).rewrite(sqrt)
assert not t1.has(cot, tan), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs( tan(x.evalf(7)) - t1.evalf(4) ), "fails for %d*pi/%d" % (i, n)
if i != 0 and i != n:
c1 = cot(x).rewrite(sqrt)
assert not c1.has(cot, tan), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs( cot(x.evalf(7)) - c1.evalf(4) ), "fails for %d*pi/%d" % (i, n)
def test_sec():
x = symbols('x', real=True)
z = symbols('z')
assert sec.nargs == FiniteSet(1)
assert sec(zoo) is nan
assert sec(0) == 1
assert sec(pi) == -1
assert sec(pi/2) is zoo
assert sec(-pi/2) is zoo
assert sec(pi/6) == 2*sqrt(3)/3
assert sec(pi/3) == 2
assert sec(pi*Rational(5, 2)) is zoo
assert sec(pi*Rational(9, 7)) == -sec(pi*Rational(2, 7))
assert sec(pi*Rational(3, 4)) == -sqrt(2) # issue 8421
assert sec(I) == 1/cosh(1)
assert sec(x*I) == 1/cosh(x)
assert sec(-x) == sec(x)
assert sec(asec(x)) == x
assert sec(z).conjugate() == sec(conjugate(z))
assert (sec(z).as_real_imag() ==
(cos(re(z))*cosh(im(z))/(sin(re(z))**2*sinh(im(z))**2 +
cos(re(z))**2*cosh(im(z))**2),
sin(re(z))*sinh(im(z))/(sin(re(z))**2*sinh(im(z))**2 +
cos(re(z))**2*cosh(im(z))**2)))
assert sec(x).expand(trig=True) == 1/cos(x)
assert sec(2*x).expand(trig=True) == 1/(2*cos(x)**2 - 1)
assert sec(x).is_extended_real == True
assert sec(z).is_real == None
assert sec(a).is_algebraic is None
assert sec(na).is_algebraic is False
assert sec(x).as_leading_term() == sec(x)
assert sec(0).is_finite == True
assert sec(x).is_finite == None
assert sec(pi/2).is_finite == False
assert series(sec(x), x, x0=0, n=6) == 1 + x**2/2 + 5*x**4/24 + O(x**6)
# https://github.com/sympy/sympy/issues/7166
assert series(sqrt(sec(x))) == 1 + x**2/4 + 7*x**4/96 + O(x**6)
# https://github.com/sympy/sympy/issues/7167
assert (series(sqrt(sec(x)), x, x0=pi*3/2, n=4) ==
1/sqrt(x - pi*Rational(3, 2)) + (x - pi*Rational(3, 2))**Rational(3, 2)/12 +
(x - pi*Rational(3, 2))**Rational(7, 2)/160 + O((x - pi*Rational(3, 2))**4, (x, pi*Rational(3, 2))))
assert sec(x).diff(x) == tan(x)*sec(x)
# Taylor Term checks
assert sec(z).taylor_term(4, z) == 5*z**4/24
assert sec(z).taylor_term(6, z) == 61*z**6/720
assert sec(z).taylor_term(5, z) == 0
def test_sec_rewrite():
assert sec(x).rewrite(exp) == 1/(exp(I*x)/2 + exp(-I*x)/2)
assert sec(x).rewrite(cos) == 1/cos(x)
assert sec(x).rewrite(tan) == (tan(x/2)**2 + 1)/(-tan(x/2)**2 + 1)
assert sec(x).rewrite(pow) == sec(x)
assert sec(x).rewrite(sqrt) == sec(x)
assert sec(z).rewrite(cot) == (cot(z/2)**2 + 1)/(cot(z/2)**2 - 1)
assert sec(x).rewrite(sin) == 1 / sin(x + pi / 2, evaluate=False)
assert sec(x).rewrite(tan) == (tan(x / 2)**2 + 1) / (-tan(x / 2)**2 + 1)
assert sec(x).rewrite(csc) == csc(-x + pi/2, evaluate=False)
def test_sec_fdiff():
assert sec(x).fdiff() == tan(x)*sec(x)
raises(ArgumentIndexError, lambda: sec(x).fdiff(2))
def test_csc():
x = symbols('x', real=True)
z = symbols('z')
# https://github.com/sympy/sympy/issues/6707
cosecant = csc('x')
alternate = 1/sin('x')
assert cosecant.equals(alternate) == True
assert alternate.equals(cosecant) == True
assert csc.nargs == FiniteSet(1)
assert csc(0) is zoo
assert csc(pi) is zoo
assert csc(zoo) is nan
assert csc(pi/2) == 1
assert csc(-pi/2) == -1
assert csc(pi/6) == 2
assert csc(pi/3) == 2*sqrt(3)/3
assert csc(pi*Rational(5, 2)) == 1
assert csc(pi*Rational(9, 7)) == -csc(pi*Rational(2, 7))
assert csc(pi*Rational(3, 4)) == sqrt(2) # issue 8421
assert csc(I) == -I/sinh(1)
assert csc(x*I) == -I/sinh(x)
assert csc(-x) == -csc(x)
assert csc(acsc(x)) == x
assert csc(z).conjugate() == csc(conjugate(z))
assert (csc(z).as_real_imag() ==
(sin(re(z))*cosh(im(z))/(sin(re(z))**2*cosh(im(z))**2 +
cos(re(z))**2*sinh(im(z))**2),
-cos(re(z))*sinh(im(z))/(sin(re(z))**2*cosh(im(z))**2 +
cos(re(z))**2*sinh(im(z))**2)))
assert csc(x).expand(trig=True) == 1/sin(x)
assert csc(2*x).expand(trig=True) == 1/(2*sin(x)*cos(x))
assert csc(x).is_extended_real == True
assert csc(z).is_real == None
assert csc(a).is_algebraic is None
assert csc(na).is_algebraic is False
assert csc(x).as_leading_term() == csc(x)
assert csc(0).is_finite == False
assert csc(x).is_finite == None
assert csc(pi/2).is_finite == True
assert series(csc(x), x, x0=pi/2, n=6) == \
1 + (x - pi/2)**2/2 + 5*(x - pi/2)**4/24 + O((x - pi/2)**6, (x, pi/2))
assert series(csc(x), x, x0=0, n=6) == \
1/x + x/6 + 7*x**3/360 + 31*x**5/15120 + O(x**6)
assert csc(x).diff(x) == -cot(x)*csc(x)
assert csc(x).taylor_term(2, x) == 0
assert csc(x).taylor_term(3, x) == 7*x**3/360
assert csc(x).taylor_term(5, x) == 31*x**5/15120
raises(ArgumentIndexError, lambda: csc(x).fdiff(2))
def test_asec():
z = Symbol('z', zero=True)
assert asec(z) is zoo
assert asec(nan) is nan
assert asec(1) == 0
assert asec(-1) == pi
assert asec(oo) == pi/2
assert asec(-oo) == pi/2
assert asec(zoo) == pi/2
assert asec(sec(pi*Rational(13, 4))) == pi*Rational(3, 4)
assert asec(1 + sqrt(5)) == pi*Rational(2, 5)
assert asec(2/sqrt(3)) == pi/6
assert asec(sqrt(4 - 2*sqrt(2))) == pi/8
assert asec(-sqrt(4 + 2*sqrt(2))) == pi*Rational(5, 8)
assert asec(sqrt(2 + 2*sqrt(5)/5)) == pi*Rational(3, 10)
assert asec(-sqrt(2 + 2*sqrt(5)/5)) == pi*Rational(7, 10)
assert asec(sqrt(2) - sqrt(6)) == pi*Rational(11, 12)
assert asec(x).diff(x) == 1/(x**2*sqrt(1 - 1/x**2))
assert asec(x).as_leading_term(x) == log(x)
assert asec(x).rewrite(log) == I*log(sqrt(1 - 1/x**2) + I/x) + pi/2
assert asec(x).rewrite(asin) == -asin(1/x) + pi/2
assert asec(x).rewrite(acos) == acos(1/x)
assert asec(x).rewrite(atan) == (2*atan(x + sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x
assert asec(x).rewrite(acot) == (2*acot(x - sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x
assert asec(x).rewrite(acsc) == -acsc(x) + pi/2
raises(ArgumentIndexError, lambda: asec(x).fdiff(2))
def test_asec_is_real():
assert asec(S.Half).is_real is False
n = Symbol('n', positive=True, integer=True)
assert asec(n).is_extended_real is True
assert asec(x).is_real is None
assert asec(r).is_real is None
t = Symbol('t', real=False, finite=True)
assert asec(t).is_real is False
def test_acsc():
assert acsc(nan) is nan
assert acsc(1) == pi/2
assert acsc(-1) == -pi/2
assert acsc(oo) == 0
assert acsc(-oo) == 0
assert acsc(zoo) == 0
assert acsc(0) is zoo
assert acsc(csc(3)) == -3 + pi
assert acsc(csc(4)) == -4 + pi
assert acsc(csc(6)) == 6 - 2*pi
assert unchanged(acsc, csc(x))
assert unchanged(acsc, sec(x))
assert acsc(2/sqrt(3)) == pi/3
assert acsc(csc(pi*Rational(13, 4))) == -pi/4
assert acsc(sqrt(2 + 2*sqrt(5)/5)) == pi/5
assert acsc(-sqrt(2 + 2*sqrt(5)/5)) == -pi/5
assert acsc(-2) == -pi/6
assert acsc(-sqrt(4 + 2*sqrt(2))) == -pi/8
assert acsc(sqrt(4 - 2*sqrt(2))) == pi*Rational(3, 8)
assert acsc(1 + sqrt(5)) == pi/10
assert acsc(sqrt(2) - sqrt(6)) == pi*Rational(-5, 12)
assert acsc(x).diff(x) == -1/(x**2*sqrt(1 - 1/x**2))
assert acsc(x).as_leading_term(x) == log(x)
assert acsc(x).rewrite(log) == -I*log(sqrt(1 - 1/x**2) + I/x)
assert acsc(x).rewrite(asin) == asin(1/x)
assert acsc(x).rewrite(acos) == -acos(1/x) + pi/2
assert acsc(x).rewrite(atan) == (-atan(sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x
assert acsc(x).rewrite(acot) == (-acot(1/sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x
assert acsc(x).rewrite(asec) == -asec(x) + pi/2
raises(ArgumentIndexError, lambda: acsc(x).fdiff(2))
def test_csc_rewrite():
assert csc(x).rewrite(pow) == csc(x)
assert csc(x).rewrite(sqrt) == csc(x)
assert csc(x).rewrite(exp) == 2*I/(exp(I*x) - exp(-I*x))
assert csc(x).rewrite(sin) == 1/sin(x)
assert csc(x).rewrite(tan) == (tan(x/2)**2 + 1)/(2*tan(x/2))
assert csc(x).rewrite(cot) == (cot(x/2)**2 + 1)/(2*cot(x/2))
assert csc(x).rewrite(cos) == 1/cos(x - pi/2, evaluate=False)
assert csc(x).rewrite(sec) == sec(-x + pi/2, evaluate=False)
# issue 17349
assert csc(1 - exp(-besselj(I, I))).rewrite(cos) == \
-1/cos(-pi/2 - 1 + cos(I*besselj(I, I)) +
I*cos(-pi/2 + I*besselj(I, I), evaluate=False), evaluate=False)
def test_issue_8653():
n = Symbol('n', integer=True)
assert sin(n).is_irrational is None
assert cos(n).is_irrational is None
assert tan(n).is_irrational is None
def test_issue_9157():
n = Symbol('n', integer=True, positive=True)
assert atan(n - 1).is_nonnegative is True
def test_trig_period():
x, y = symbols('x, y')
assert sin(x).period() == 2*pi
assert cos(x).period() == 2*pi
assert tan(x).period() == pi
assert cot(x).period() == pi
assert sec(x).period() == 2*pi
assert csc(x).period() == 2*pi
assert sin(2*x).period() == pi
assert cot(4*x - 6).period() == pi/4
assert cos((-3)*x).period() == pi*Rational(2, 3)
assert cos(x*y).period(x) == 2*pi/abs(y)
assert sin(3*x*y + 2*pi).period(y) == 2*pi/abs(3*x)
assert tan(3*x).period(y) is S.Zero
raises(NotImplementedError, lambda: sin(x**2).period(x))
def test_issue_7171():
assert sin(x).rewrite(sqrt) == sin(x)
assert sin(x).rewrite(pow) == sin(x)
def test_issue_11864():
w, k = symbols('w, k', real=True)
F = Piecewise((1, Eq(2*pi*k, 0)), (sin(pi*k)/(pi*k), True))
soln = Piecewise((1, Eq(2*pi*k, 0)), (sinc(pi*k), True))
assert F.rewrite(sinc) == soln
def test_real_assumptions():
z = Symbol('z', real=False, finite=True)
assert sin(z).is_real is None
assert cos(z).is_real is None
assert tan(z).is_real is False
assert sec(z).is_real is None
assert csc(z).is_real is None
assert cot(z).is_real is False
assert asin(p).is_real is None
assert asin(n).is_real is None
assert asec(p).is_real is None
assert asec(n).is_real is None
assert acos(p).is_real is None
assert acos(n).is_real is None
assert acsc(p).is_real is None
assert acsc(n).is_real is None
assert atan(p).is_positive is True
assert atan(n).is_negative is True
assert acot(p).is_positive is True
assert acot(n).is_negative is True
def test_issue_14320():
assert asin(sin(2)) == -2 + pi and (-pi/2 <= -2 + pi <= pi/2) and sin(2) == sin(-2 + pi)
assert asin(cos(2)) == -2 + pi/2 and (-pi/2 <= -2 + pi/2 <= pi/2) and cos(2) == sin(-2 + pi/2)
assert acos(sin(2)) == -pi/2 + 2 and (0 <= -pi/2 + 2 <= pi) and sin(2) == cos(-pi/2 + 2)
assert acos(cos(20)) == -6*pi + 20 and (0 <= -6*pi + 20 <= pi) and cos(20) == cos(-6*pi + 20)
assert acos(cos(30)) == -30 + 10*pi and (0 <= -30 + 10*pi <= pi) and cos(30) == cos(-30 + 10*pi)
assert atan(tan(17)) == -5*pi + 17 and (-pi/2 < -5*pi + 17 < pi/2) and tan(17) == tan(-5*pi + 17)
assert atan(tan(15)) == -5*pi + 15 and (-pi/2 < -5*pi + 15 < pi/2) and tan(15) == tan(-5*pi + 15)
assert atan(cot(12)) == -12 + pi*Rational(7, 2) and (-pi/2 < -12 + pi*Rational(7, 2) < pi/2) and cot(12) == tan(-12 + pi*Rational(7, 2))
assert acot(cot(15)) == -5*pi + 15 and (-pi/2 < -5*pi + 15 <= pi/2) and cot(15) == cot(-5*pi + 15)
assert acot(tan(19)) == -19 + pi*Rational(13, 2) and (-pi/2 < -19 + pi*Rational(13, 2) <= pi/2) and tan(19) == cot(-19 + pi*Rational(13, 2))
assert asec(sec(11)) == -11 + 4*pi and (0 <= -11 + 4*pi <= pi) and cos(11) == cos(-11 + 4*pi)
assert asec(csc(13)) == -13 + pi*Rational(9, 2) and (0 <= -13 + pi*Rational(9, 2) <= pi) and sin(13) == cos(-13 + pi*Rational(9, 2))
assert acsc(csc(14)) == -4*pi + 14 and (-pi/2 <= -4*pi + 14 <= pi/2) and sin(14) == sin(-4*pi + 14)
assert acsc(sec(10)) == pi*Rational(-7, 2) + 10 and (-pi/2 <= pi*Rational(-7, 2) + 10 <= pi/2) and cos(10) == sin(pi*Rational(-7, 2) + 10)
def test_issue_14543():
assert sec(2*pi + 11) == sec(11)
assert sec(2*pi - 11) == sec(11)
assert sec(pi + 11) == -sec(11)
assert sec(pi - 11) == -sec(11)
assert csc(2*pi + 17) == csc(17)
assert csc(2*pi - 17) == -csc(17)
assert csc(pi + 17) == -csc(17)
assert csc(pi - 17) == csc(17)
x = Symbol('x')
assert csc(pi/2 + x) == sec(x)
assert csc(pi/2 - x) == sec(x)
assert csc(pi*Rational(3, 2) + x) == -sec(x)
assert csc(pi*Rational(3, 2) - x) == -sec(x)
assert sec(pi/2 - x) == csc(x)
assert sec(pi/2 + x) == -csc(x)
assert sec(pi*Rational(3, 2) + x) == csc(x)
assert sec(pi*Rational(3, 2) - x) == -csc(x)
| [
[
[
19,
26
],
[
756,
763
],
[
1322,
1329
],
[
2486,
2493
],
[
2534,
2541
],
[
9882,
9889
],
[
10761,
10768
],
[
10808,
10815
],
[
39011,
39018
],
[
39048,
39055
],
[
40140,
40147
],
[
40177,
40184
],
[
49945,
49952
],
[
56833,
56840
],
[
56865,
56872
],
[
59586,
59593
],
[
59618,
59625
],
[
65508,
65515
],
[
66207,
66214
]
],
[
[
28,
34
],
[
777,
783
],
[
804,
810
],
[
834,
840
],
[
865,
871
],
[
897,
903
],
[
932,
938
],
[
967,
973
],
[
999,
1005
],
[
1041,
1047
],
[
1084,
1090
],
[
1130,
1136
],
[
1176,
1182
],
[
1217,
1223
],
[
1250,
1256
],
[
4538,
4544
],
[
4608,
4614
],
[
12676,
12682
],
[
25662,
25668
],
[
29868,
29874
],
[
30088,
30094
],
[
39219,
39225
],
[
39254,
39260
],
[
39330,
39336
],
[
39409,
39415
],
[
39484,
39490
],
[
39547,
39553
],
[
41504,
41510
],
[
46494,
46500
],
[
46528,
46534
],
[
46559,
46565
],
[
61632,
61638
],
[
62911,
62917
],
[
63074,
63080
],
[
65205,
65211
],
[
65384,
65390
],
[
66432,
66438
],
[
69024,
69030
]
],
[
[
36,
39
],
[
1390,
1393
],
[
1398,
1401
],
[
1425,
1428
],
[
9950,
9953
],
[
9958,
9961
],
[
10133,
10136
],
[
16127,
16130
],
[
16135,
16138
],
[
16163,
16166
],
[
22805,
22808
],
[
22813,
22816
],
[
22932,
22935
],
[
30814,
30817
],
[
30822,
30825
],
[
33178,
33181
],
[
33186,
33189
],
[
35693,
35696
],
[
35701,
35704
],
[
40594,
40597
],
[
41017,
41020
],
[
41743,
41746
],
[
41751,
41754
],
[
56940,
56943
],
[
59937,
59940
],
[
61697,
61700
],
[
61705,
61708
],
[
63182,
63185
],
[
63190,
63193
]
],
[
[
41,
43
],
[
1445,
1447
],
[
1486,
1488
],
[
1496,
1498
],
[
1537,
1539
],
[
1546,
1548
],
[
1567,
1569
],
[
1577,
1579
],
[
1599,
1601
],
[
1630,
1632
],
[
1663,
1665
],
[
1708,
1710
],
[
7816,
7818
],
[
7820,
7822
],
[
7877,
7879
],
[
7932,
7934
],
[
9978,
9980
],
[
10019,
10021
],
[
10029,
10031
],
[
10070,
10072
],
[
10079,
10081
],
[
10098,
10100
],
[
10107,
10109
],
[
15356,
15358
],
[
15360,
15362
],
[
15417,
15419
],
[
15472,
15474
],
[
16182,
16184
],
[
16202,
16204
],
[
16206,
16208
],
[
16225,
16227
],
[
16235,
16237
],
[
16255,
16257
],
[
16259,
16261
],
[
16315,
16317
],
[
16342,
16344
],
[
22450,
22452
],
[
22454,
22456
],
[
22475,
22477
],
[
22479,
22481
],
[
22556,
22558
],
[
22560,
22562
],
[
22870,
22872
],
[
22898,
22900
],
[
29492,
29494
],
[
29496,
29498
],
[
29517,
29519
],
[
29521,
29523
],
[
29586,
29588
],
[
29590,
29592
],
[
30881,
30883
],
[
30891,
30893
],
[
30911,
30913
],
[
30920,
30922
],
[
33273,
33275
],
[
33282,
33284
],
[
33302,
33304
],
[
33312,
33314
],
[
35760,
35762
],
[
35789,
35791
],
[
36525,
36527
],
[
39624,
39626
],
[
39655,
39657
],
[
41811,
41813
],
[
41836,
41838
],
[
50846,
50848
],
[
50874,
50876
],
[
61775,
61777
],
[
61804,
61806
],
[
63266,
63268
],
[
63292,
63294
]
],
[
[
45,
48
],
[
1417,
1420
],
[
10125,
10128
],
[
16155,
16158
],
[
16944,
16947
],
[
16985,
16988
],
[
21647,
21650
],
[
21701,
21704
],
[
22924,
22927
],
[
22958,
22961
],
[
22986,
22989
],
[
25762,
25765
],
[
28608,
28611
],
[
28647,
28650
],
[
30939,
30942
],
[
30947,
30950
],
[
33206,
33209
],
[
33214,
33217
],
[
35818,
35821
],
[
41861,
41864
],
[
46726,
46729
],
[
46857,
46860
],
[
46980,
46983
],
[
47119,
47122
],
[
47437,
47440
],
[
47552,
47555
],
[
47675,
47678
],
[
47798,
47801
],
[
48082,
48085
],
[
48198,
48201
],
[
48321,
48324
],
[
48445,
48448
],
[
56932,
56935
],
[
57016,
57019
],
[
57045,
57048
],
[
57148,
57151
],
[
59884,
59887
],
[
59910,
59913
],
[
59929,
59932
],
[
61677,
61680
],
[
61832,
61835
],
[
63317,
63320
],
[
63349,
63352
]
],
[
[
50,
51
],
[
1540,
1541
],
[
1549,
1550
],
[
1570,
1571
],
[
1580,
1581
],
[
2082,
2083
],
[
2097,
2098
],
[
2118,
2119
],
[
2134,
2135
],
[
2154,
2155
],
[
2169,
2170
],
[
4255,
4256
],
[
4269,
4270
],
[
4348,
4349
],
[
4365,
4366
],
[
5816,
5817
],
[
5823,
5824
],
[
5835,
5836
],
[
6840,
6841
],
[
6846,
6847
],
[
6854,
6855
],
[
6859,
6860
],
[
10073,
10074
],
[
10101,
10102
],
[
10480,
10481
],
[
10514,
10515
],
[
10547,
10548
],
[
12454,
12455
],
[
12488,
12489
],
[
13467,
13468
],
[
13481,
13482
],
[
14503,
14504
],
[
14513,
14514
],
[
16318,
16319
],
[
16324,
16325
],
[
16345,
16346
],
[
16352,
16353
],
[
16693,
16694
],
[
16708,
16709
],
[
16729,
16730
],
[
16745,
16746
],
[
16765,
16766
],
[
16780,
16781
],
[
18893,
18894
],
[
18907,
18908
],
[
18986,
18987
],
[
19003,
19004
],
[
19552,
19553
],
[
19750,
19751
],
[
19760,
19761
],
[
19797,
19798
],
[
20881,
20882
],
[
20888,
20889
],
[
20895,
20896
],
[
20903,
20904
],
[
20910,
20911
],
[
22873,
22874
],
[
22880,
22881
],
[
22901,
22902
],
[
22907,
22908
],
[
23305,
23306
],
[
23321,
23322
],
[
23342,
23343
],
[
23357,
23358
],
[
23377,
23378
],
[
23391,
23392
],
[
25050,
25051
],
[
25065,
25066
],
[
25087,
25088
],
[
25105,
25106
],
[
26777,
26778
],
[
26787,
26788
],
[
26824,
26825
],
[
27705,
27706
],
[
27712,
27713
],
[
27719,
27720
],
[
27727,
27728
],
[
27734,
27735
],
[
30889,
30890
],
[
30918,
30919
],
[
32042,
32043
],
[
32049,
32050
],
[
32698,
32699
],
[
32704,
32705
],
[
33280,
33281
],
[
33310,
33311
],
[
33809,
33810
],
[
33829,
33830
],
[
34501,
34502
],
[
34530,
34531
],
[
35065,
35066
],
[
35071,
35072
],
[
36636,
36637
],
[
36643,
36644
],
[
37978,
37979
],
[
37989,
37990
],
[
38002,
38003
],
[
38380,
38381
],
[
38432,
38433
],
[
38454,
38455
],
[
38505,
38506
],
[
39104,
39105
],
[
39115,
39116
],
[
39163,
39164
],
[
39730,
39731
],
[
39741,
39742
],
[
39829,
39830
],
[
39912,
39913
],
[
39938,
39939
],
[
39952,
39953
],
[
39981,
39982
],
[
40014,
40015
],
[
40028,
40029
],
[
40055,
40056
],
[
40062,
40063
],
[
40265,
40266
],
[
40298,
40299
],
[
40308,
40309
],
[
40348,
40349
],
[
42138,
42139
],
[
42148,
42149
],
[
42179,
42180
],
[
42185,
42186
],
[
42330,
42331
],
[
42657,
42658
],
[
42668,
42669
],
[
42681,
42682
],
[
43105,
43106
],
[
43157,
43158
],
[
43179,
43180
],
[
43230,
43231
],
[
49985,
49986
],
[
57288,
57289
],
[
57321,
57322
],
[
58936,
58937
],
[
58950,
58951
],
[
60232,
60233
],
[
60239,
60240
],
[
60266,
60267
],
[
60273,
60274
],
[
62419,
62420
],
[
62444,
62445
],
[
64085,
64086
],
[
64110,
64111
],
[
64632,
64633
],
[
64639,
64640
],
[
64651,
64652
],
[
65012,
65013
],
[
65015,
65016
],
[
65072,
65073
],
[
65082,
65083
],
[
65085,
65086
],
[
65109,
65110
],
[
65123,
65124
],
[
65133,
65134
],
[
65136,
65137
]
],
[
[
53,
57
],
[
2088,
2092
],
[
2125,
2129
],
[
2161,
2165
],
[
4261,
4265
],
[
4354,
4358
],
[
5984,
5988
],
[
6061,
6065
],
[
13644,
13648
],
[
13721,
13725
],
[
20025,
20029
],
[
20102,
20106
],
[
27054,
27058
],
[
27131,
27135
],
[
50104,
50108
],
[
50200,
50204
],
[
50304,
50308
],
[
50435,
50439
],
[
57529,
57533
],
[
57622,
57626
],
[
57649,
57653
],
[
60241,
60245
],
[
60275,
60279
],
[
60552,
60556
],
[
60594,
60598
],
[
60678,
60682
]
],
[
[
59,
62
],
[
1349,
1352
],
[
1386,
1389
],
[
1413,
1416
],
[
1441,
1444
],
[
1482,
1485
],
[
1492,
1495
],
[
1533,
1536
],
[
1562,
1565
],
[
1595,
1598
],
[
1626,
1629
],
[
1659,
1662
],
[
1704,
1707
],
[
1745,
1748
],
[
1769,
1772
],
[
1798,
1801
],
[
1844,
1847
],
[
1886,
1889
],
[
1942,
1945
],
[
1975,
1978
],
[
2021,
2024
],
[
2075,
2078
],
[
2110,
2113
],
[
2147,
2150
],
[
2183,
2186
],
[
2207,
2210
],
[
2232,
2235
],
[
2258,
2261
],
[
2285,
2288
],
[
2319,
2322
],
[
2354,
2357
],
[
2380,
2383
],
[
2408,
2411
],
[
2447,
2450
],
[
2569,
2572
],
[
2618,
2621
],
[
2638,
2641
],
[
2653,
2656
],
[
2681,
2684
],
[
2707,
2710
],
[
2731,
2734
],
[
2780,
2783
],
[
2819,
2822
],
[
2882,
2885
],
[
2921,
2924
],
[
2970,
2973
],
[
3023,
3026
],
[
3086,
3089
],
[
3117,
3120
],
[
3158,
3161
],
[
3211,
3214
],
[
3266,
3269
],
[
3327,
3330
],
[
3388,
3391
],
[
3414,
3417
],
[
3448,
3451
],
[
3474,
3477
],
[
3508,
3511
],
[
3535,
3538
],
[
3569,
3572
],
[
3596,
3599
],
[
3631,
3634
],
[
3662,
3665
],
[
3697,
3700
],
[
3744,
3747
],
[
3798,
3801
],
[
3846,
3849
],
[
3906,
3909
],
[
3968,
3971
],
[
4031,
4034
],
[
4061,
4064
],
[
4084,
4087
],
[
4115,
4118
],
[
4139,
4142
],
[
4171,
4174
],
[
4194,
4197
],
[
4225,
4228
],
[
4249,
4252
],
[
4283,
4286
],
[
4309,
4312
],
[
4339,
4342
],
[
4379,
4382
],
[
4414,
4417
],
[
4461,
4464
],
[
4500,
4503
],
[
4576,
4579
],
[
4661,
4664
],
[
4701,
4704
],
[
4765,
4768
],
[
4786,
4789
],
[
4821,
4824
],
[
4842,
4845
],
[
4868,
4871
],
[
4927,
4930
],
[
5107,
5110
],
[
5117,
5120
],
[
5360,
5363
],
[
5434,
5437
],
[
5509,
5512
],
[
5584,
5587
],
[
5675,
5678
],
[
5792,
5795
],
[
5854,
5857
],
[
5917,
5920
],
[
5980,
5983
],
[
6033,
6036
],
[
6085,
6088
],
[
6138,
6141
],
[
6190,
6193
],
[
6243,
6246
],
[
6295,
6298
],
[
6348,
6351
],
[
6400,
6403
],
[
6404,
6407
],
[
6452,
6455
],
[
6480,
6483
],
[
6503,
6506
],
[
6555,
6558
],
[
6606,
6609
],
[
6658,
6661
],
[
6709,
6712
],
[
6761,
6764
],
[
6812,
6815
],
[
6875,
6878
],
[
6918,
6921
],
[
6984,
6987
],
[
7054,
7057
],
[
7082,
7085
],
[
7231,
7234
],
[
7263,
7266
],
[
7286,
7289
],
[
7304,
7307
],
[
7336,
7339
],
[
7359,
7362
],
[
7377,
7380
],
[
7416,
7419
],
[
7425,
7428
],
[
7450,
7453
],
[
7482,
7485
],
[
7507,
7510
],
[
7540,
7543
],
[
7554,
7557
],
[
7572,
7575
],
[
7605,
7608
],
[
7626,
7629
],
[
7651,
7654
],
[
7681,
7684
],
[
7706,
7709
],
[
7737,
7740
],
[
7751,
7754
],
[
7799,
7802
],
[
7858,
7861
],
[
7915,
7918
],
[
7973,
7976
],
[
8034,
8037
],
[
8107,
8110
],
[
8185,
8188
],
[
8222,
8225
],
[
8270,
8273
],
[
8283,
8286
],
[
8307,
8310
],
[
8381,
8384
],
[
8407,
8410
],
[
8468,
8471
],
[
8588,
8591
],
[
8600,
8603
],
[
8707,
8710
],
[
8723,
8726
],
[
8741,
8744
],
[
8758,
8761
],
[
8776,
8779
],
[
8794,
8797
],
[
8812,
8815
],
[
8829,
8832
],
[
8847,
8850
],
[
8865,
8868
],
[
9195,
9198
],
[
9230,
9233
],
[
9279,
9282
],
[
9344,
9347
],
[
9393,
9396
],
[
9441,
9444
],
[
9721,
9724
],
[
9774,
9777
],
[
13127,
13130
],
[
14064,
14067
],
[
14140,
14143
],
[
14586,
14589
],
[
14594,
14597
],
[
14706,
14709
],
[
14734,
14737
],
[
14829,
14832
],
[
14836,
14839
],
[
14902,
14905
],
[
14909,
14912
],
[
14975,
14978
],
[
14982,
14985
],
[
16031,
16034
],
[
19865,
19868
],
[
19875,
19878
],
[
19885,
19888
],
[
20445,
20448
],
[
20521,
20524
],
[
20954,
20957
],
[
21020,
21023
],
[
21328,
21331
],
[
21356,
21359
],
[
25866,
25869
],
[
25952,
25955
],
[
26131,
26134
],
[
26207,
26210
],
[
26892,
26895
],
[
26900,
26903
],
[
26913,
26916
],
[
27474,
27477
],
[
27550,
27553
],
[
27823,
27826
],
[
27947,
27950
],
[
28251,
28254
],
[
28279,
28282
],
[
30442,
30445
],
[
30531,
30534
],
[
30728,
30731
],
[
30747,
30750
],
[
31841,
31844
],
[
32254,
32257
],
[
32315,
32318
],
[
43398,
43401
],
[
43458,
43461
],
[
43493,
43496
],
[
43508,
43511
],
[
43543,
43546
],
[
44091,
44094
],
[
44138,
44141
],
[
44763,
44766
],
[
45025,
45028
],
[
45401,
45404
],
[
46655,
46658
],
[
46786,
46789
],
[
46903,
46906
],
[
47042,
47045
],
[
47177,
47180
],
[
47194,
47197
],
[
47366,
47369
],
[
47481,
47484
],
[
47598,
47601
],
[
47721,
47724
],
[
47856,
47859
],
[
47873,
47876
],
[
48011,
48014
],
[
48127,
48130
],
[
48244,
48247
],
[
48368,
48371
],
[
48503,
48506
],
[
48520,
48523
],
[
48695,
48698
],
[
48710,
48713
],
[
48848,
48851
],
[
48863,
48866
],
[
49005,
49008
],
[
49022,
49025
],
[
49174,
49177
],
[
49191,
49194
],
[
49343,
49346
],
[
49360,
49363
],
[
49797,
49800
],
[
50033,
50036
],
[
50081,
50084
],
[
50193,
50196
],
[
50259,
50262
],
[
50389,
50392
],
[
50484,
50487
],
[
50519,
50522
],
[
50842,
50845
],
[
51662,
51665
],
[
51772,
51775
],
[
51851,
51854
],
[
51925,
51928
],
[
55891,
55894
],
[
55901,
55904
],
[
57515,
57518
],
[
57611,
57614
],
[
57635,
57638
],
[
59251,
59254
],
[
59263,
59266
],
[
59723,
59726
],
[
60446,
60449
],
[
60470,
60473
],
[
60607,
60610
],
[
60738,
60741
],
[
60791,
60794
],
[
64683,
64686
],
[
64693,
64696
],
[
65242,
65245
],
[
65536,
65539
],
[
65742,
65745
],
[
65916,
65919
],
[
66098,
66101
],
[
66122,
66125
],
[
66140,
66143
],
[
66163,
66166
],
[
66273,
66276
],
[
66480,
66483
],
[
67152,
67155
],
[
67206,
67209
],
[
67216,
67219
],
[
67313,
67316
],
[
67344,
67347
],
[
67396,
67399
],
[
68410,
68413
],
[
68527,
68530
],
[
68538,
68541
],
[
68668,
68671
],
[
8532,
8535
],
[
49516,
49519
],
[
66041,
66044
]
],
[
[
64,
66
],
[
2079,
2081
],
[
2093,
2095
],
[
2115,
2117
],
[
2130,
2132
],
[
2187,
2189
],
[
2212,
2214
],
[
2238,
2240
],
[
2265,
2267
],
[
2299,
2301
],
[
2333,
2335
],
[
2358,
2360
],
[
2385,
2387
],
[
2412,
2414
],
[
2451,
2453
],
[
2573,
2575
],
[
2622,
2624
],
[
2657,
2659
],
[
2685,
2687
],
[
2711,
2713
],
[
2735,
2737
],
[
2784,
2786
],
[
2823,
2825
],
[
2886,
2888
],
[
2926,
2928
],
[
2974,
2976
],
[
3027,
3029
],
[
3090,
3092
],
[
3122,
3124
],
[
3162,
3164
],
[
3215,
3217
],
[
3270,
3272
],
[
3331,
3333
],
[
3392,
3394
],
[
3418,
3420
],
[
3452,
3454
],
[
3478,
3480
],
[
3512,
3514
],
[
3539,
3541
],
[
3573,
3575
],
[
3600,
3602
],
[
3635,
3637
],
[
3666,
3668
],
[
3701,
3703
],
[
3748,
3750
],
[
3802,
3804
],
[
3850,
3852
],
[
3910,
3912
],
[
3972,
3974
],
[
4035,
4037
],
[
4065,
4067
],
[
4088,
4090
],
[
4119,
4121
],
[
4143,
4145
],
[
4175,
4177
],
[
4198,
4200
],
[
4229,
4231
],
[
4289,
4291
],
[
4318,
4320
],
[
4345,
4347
],
[
4361,
4363
],
[
4580,
4582
],
[
5075,
5077
],
[
5336,
5338
],
[
5368,
5370
],
[
5442,
5444
],
[
5527,
5529
],
[
5602,
5604
],
[
6949,
6951
],
[
7019,
7021
],
[
8715,
8717
],
[
8751,
8753
],
[
8786,
8788
],
[
8822,
8824
],
[
8857,
8859
],
[
8891,
8893
],
[
8927,
8929
],
[
8962,
8964
],
[
8998,
9000
],
[
9033,
9035
],
[
9067,
9069
],
[
9102,
9104
],
[
9135,
9137
],
[
9170,
9172
],
[
9199,
9201
],
[
9234,
9236
],
[
9283,
9285
],
[
9331,
9333
],
[
9366,
9368
],
[
9415,
9417
],
[
9463,
9465
],
[
9498,
9500
],
[
9546,
9548
],
[
9594,
9596
],
[
9629,
9631
],
[
9677,
9679
],
[
9725,
9727
],
[
9760,
9762
],
[
9796,
9798
],
[
9832,
9834
],
[
10477,
10479
],
[
10491,
10493
],
[
10511,
10513
],
[
10525,
10527
],
[
10577,
10579
],
[
10604,
10606
],
[
10630,
10632
],
[
10657,
10659
],
[
10699,
10701
],
[
10741,
10743
],
[
10847,
10849
],
[
10875,
10877
],
[
10914,
10916
],
[
10940,
10942
],
[
10967,
10969
],
[
10993,
10995
],
[
11020,
11022
],
[
11045,
11047
],
[
11076,
11078
],
[
11131,
11133
],
[
11171,
11173
],
[
11210,
11212
],
[
11272,
11274
],
[
11335,
11337
],
[
11375,
11377
],
[
11414,
11416
],
[
11475,
11477
],
[
11538,
11540
],
[
11591,
11593
],
[
11644,
11646
],
[
11671,
11673
],
[
11705,
11707
],
[
11732,
11734
],
[
11766,
11768
],
[
11793,
11795
],
[
11827,
11829
],
[
11853,
11855
],
[
11888,
11890
],
[
11919,
11921
],
[
11954,
11956
],
[
12001,
12003
],
[
12048,
12050
],
[
12109,
12111
],
[
12169,
12171
],
[
12232,
12234
],
[
12263,
12265
],
[
12286,
12288
],
[
12317,
12319
],
[
12341,
12343
],
[
12373,
12375
],
[
12396,
12398
],
[
12428,
12430
],
[
12485,
12487
],
[
12501,
12503
],
[
12718,
12720
],
[
12752,
12754
],
[
12802,
12804
],
[
12836,
12838
],
[
12944,
12946
],
[
13169,
13171
],
[
13180,
13182
],
[
13211,
13213
],
[
13222,
13224
],
[
13251,
13253
],
[
13272,
13274
],
[
14602,
14604
],
[
14669,
14671
],
[
16690,
16692
],
[
16704,
16706
],
[
16726,
16728
],
[
16741,
16743
],
[
16798,
16800
],
[
16823,
16825
],
[
16849,
16851
],
[
16876,
16878
],
[
16910,
16912
],
[
16935,
16937
],
[
16963,
16965
],
[
17005,
17007
],
[
17037,
17039
],
[
17084,
17086
],
[
17115,
17117
],
[
17153,
17155
],
[
17197,
17199
],
[
17242,
17244
],
[
17286,
17288
],
[
17343,
17345
],
[
17402,
17404
],
[
17463,
17465
],
[
17498,
17500
],
[
17533,
17535
],
[
17580,
17582
],
[
17629,
17631
],
[
17666,
17668
],
[
17730,
17732
],
[
17780,
17782
],
[
17830,
17832
],
[
17877,
17879
],
[
17937,
17939
],
[
17999,
18001
],
[
18063,
18065
],
[
18101,
18103
],
[
18151,
18153
],
[
18202,
18204
],
[
18254,
18256
],
[
18322,
18324
],
[
18403,
18405
],
[
18483,
18485
],
[
18564,
18566
],
[
18646,
18648
],
[
18728,
18730
],
[
18809,
18811
],
[
18927,
18929
],
[
18956,
18958
],
[
18983,
18985
],
[
18999,
19001
],
[
19227,
19229
],
[
19254,
19256
],
[
19288,
19290
],
[
19316,
19318
],
[
19350,
19352
],
[
19378,
19380
],
[
19413,
19415
],
[
19441,
19443
],
[
19463,
19465
],
[
19493,
19495
],
[
20934,
20936
],
[
20945,
20947
],
[
20958,
20960
],
[
21000,
21002
],
[
21011,
21013
],
[
21024,
21026
],
[
21060,
21062
],
[
21087,
21089
],
[
21109,
21111
],
[
21150,
21152
],
[
21219,
21221
],
[
21284,
21286
],
[
21379,
21381
],
[
22314,
22316
],
[
22979,
22981
],
[
23302,
23304
],
[
23317,
23319
],
[
23339,
23341
],
[
23353,
23355
],
[
23409,
23411
],
[
23422,
23424
],
[
23435,
23437
],
[
23455,
23457
],
[
23469,
23471
],
[
23483,
23485
],
[
23503,
23505
],
[
23530,
23532
],
[
23556,
23558
],
[
23595,
23597
],
[
23635,
23637
],
[
23669,
23671
],
[
23718,
23720
],
[
23749,
23751
],
[
23787,
23789
],
[
23831,
23833
],
[
23876,
23878
],
[
23909,
23911
],
[
23942,
23944
],
[
23987,
23989
],
[
24034,
24036
],
[
24070,
24072
],
[
24120,
24122
],
[
24169,
24171
],
[
24220,
24222
],
[
24257,
24259
],
[
24308,
24310
],
[
24358,
24360
],
[
24411,
24413
],
[
24478,
24480
],
[
24559,
24561
],
[
24640,
24642
],
[
24721,
24723
],
[
24803,
24805
],
[
24884,
24886
],
[
24965,
24967
],
[
25084,
25086
],
[
25101,
25103
],
[
25282,
25284
],
[
25309,
25311
],
[
25343,
25345
],
[
25371,
25373
],
[
25405,
25407
],
[
25433,
25435
],
[
25468,
25470
],
[
25496,
25498
],
[
25531,
25533
],
[
25561,
25563
],
[
25755,
25757
],
[
25822,
25824
],
[
25846,
25848
],
[
25870,
25872
],
[
25932,
25934
],
[
25956,
25958
],
[
25980,
25982
],
[
26087,
26089
],
[
26111,
26113
],
[
26135,
26137
],
[
26187,
26189
],
[
26211,
26213
],
[
26235,
26237
],
[
26526,
26528
],
[
26559,
26561
],
[
26567,
26569
],
[
26974,
26976
],
[
27752,
27754
],
[
27803,
27805
],
[
27827,
27829
],
[
27886,
27888
],
[
27927,
27929
],
[
27951,
27953
],
[
28000,
28002
],
[
28027,
28029
],
[
28049,
28051
],
[
28077,
28079
],
[
28126,
28128
],
[
28212,
28214
],
[
28302,
28304
],
[
29354,
29356
],
[
30146,
30148
],
[
30179,
30181
],
[
30209,
30211
],
[
30222,
30224
],
[
30242,
30244
],
[
30255,
30257
],
[
30274,
30276
],
[
30303,
30305
],
[
30323,
30325
],
[
30353,
30355
],
[
31031,
31033
],
[
31060,
31062
],
[
31095,
31097
],
[
31132,
31134
],
[
31167,
31169
],
[
31204,
31206
],
[
31251,
31253
],
[
31300,
31302
],
[
31332,
31334
],
[
31374,
31376
],
[
31421,
31423
],
[
31470,
31472
],
[
31511,
31513
],
[
31555,
31557
],
[
31606,
31608
],
[
31659,
31661
],
[
31847,
31849
],
[
31859,
31861
],
[
32296,
32298
],
[
32357,
32359
],
[
32963,
32965
],
[
33374,
33376
],
[
33406,
33408
],
[
33447,
33449
],
[
33512,
33514
],
[
33545,
33547
],
[
33581,
33583
],
[
33771,
33773
],
[
33785,
33787
],
[
33815,
33817
],
[
34660,
34662
],
[
34751,
34753
],
[
34944,
34946
],
[
35058,
35060
],
[
35168,
35170
],
[
35423,
35425
],
[
35523,
35525
],
[
35767,
35769
],
[
35797,
35799
],
[
35839,
35841
],
[
35845,
35847
],
[
35898,
35900
],
[
35931,
35933
],
[
35971,
35973
],
[
36034,
36036
],
[
36086,
36088
],
[
36138,
36140
],
[
36191,
36193
],
[
36229,
36231
],
[
36281,
36283
],
[
36489,
36491
],
[
36503,
36505
],
[
36532,
36534
],
[
36747,
36749
],
[
38073,
38075
],
[
38341,
38343
],
[
38779,
38781
],
[
38810,
38812
],
[
38842,
38844
],
[
38887,
38889
],
[
38918,
38920
],
[
38965,
38967
],
[
38998,
39000
],
[
39319,
39321
],
[
39398,
39400
],
[
39600,
39602
],
[
39664,
39666
],
[
39686,
39688
],
[
39933,
39935
],
[
40007,
40009
],
[
40083,
40085
],
[
40474,
40476
],
[
40657,
40659
],
[
40796,
40798
],
[
40943,
40945
],
[
41893,
41895
],
[
41920,
41922
],
[
41955,
41957
],
[
41990,
41992
],
[
42027,
42029
],
[
42140,
42142
],
[
42156,
42158
],
[
42590,
42592
],
[
42765,
42767
],
[
43067,
43069
],
[
43462,
43464
],
[
43497,
43499
],
[
43530,
43532
],
[
43565,
43567
],
[
44208,
44210
],
[
46635,
46637
],
[
46663,
46665
],
[
46691,
46693
],
[
46719,
46721
],
[
46750,
46752
],
[
46761,
46763
],
[
46794,
46796
],
[
46822,
46824
],
[
46850,
46852
],
[
46883,
46885
],
[
46913,
46915
],
[
46943,
46945
],
[
46973,
46975
],
[
47006,
47008
],
[
47017,
47019
],
[
47052,
47054
],
[
47082,
47084
],
[
47112,
47114
],
[
47145,
47147
],
[
47160,
47162
],
[
47187,
47189
],
[
47202,
47204
],
[
47229,
47231
],
[
47244,
47246
],
[
47271,
47273
],
[
47286,
47288
],
[
47346,
47348
],
[
47374,
47376
],
[
47402,
47404
],
[
47430,
47432
],
[
47461,
47463
],
[
47489,
47491
],
[
47517,
47519
],
[
47545,
47547
],
[
47578,
47580
],
[
47608,
47610
],
[
47638,
47640
],
[
47668,
47670
],
[
47701,
47703
],
[
47731,
47733
],
[
47761,
47763
],
[
47791,
47793
],
[
47824,
47826
],
[
47839,
47841
],
[
47866,
47868
],
[
47881,
47883
],
[
47908,
47910
],
[
47923,
47925
],
[
47950,
47952
],
[
47965,
47967
],
[
47991,
47993
],
[
48019,
48021
],
[
48047,
48049
],
[
48075,
48077
],
[
48106,
48108
],
[
48135,
48137
],
[
48163,
48165
],
[
48191,
48193
],
[
48224,
48226
],
[
48254,
48256
],
[
48284,
48286
],
[
48314,
48316
],
[
48347,
48349
],
[
48378,
48380
],
[
48408,
48410
],
[
48438,
48440
],
[
48471,
48473
],
[
48486,
48488
],
[
48513,
48515
],
[
48528,
48530
],
[
48555,
48557
],
[
48570,
48572
],
[
48597,
48599
],
[
48612,
48614
],
[
48665,
48667
],
[
48678,
48680
],
[
48703,
48705
],
[
48716,
48718
],
[
48741,
48743
],
[
48754,
48756
],
[
48779,
48781
],
[
48792,
48794
],
[
48818,
48820
],
[
48831,
48833
],
[
48856,
48858
],
[
48869,
48871
],
[
48894,
48896
],
[
48907,
48909
],
[
48932,
48934
],
[
48945,
48947
],
[
48973,
48975
],
[
48988,
48990
],
[
49015,
49017
],
[
49030,
49032
],
[
49057,
49059
],
[
49072,
49074
],
[
49099,
49101
],
[
49114,
49116
],
[
49142,
49144
],
[
49157,
49159
],
[
49184,
49186
],
[
49199,
49201
],
[
49226,
49228
],
[
49241,
49243
],
[
49268,
49270
],
[
49283,
49285
],
[
49311,
49313
],
[
49326,
49328
],
[
49353,
49355
],
[
49368,
49370
],
[
49395,
49397
],
[
49410,
49412
],
[
49437,
49439
],
[
49452,
49454
],
[
51632,
51634
],
[
52102,
52104
],
[
52135,
52137
],
[
52168,
52170
],
[
52207,
52209
],
[
52240,
52242
],
[
52317,
52319
],
[
52371,
52373
],
[
52457,
52459
],
[
55855,
55857
],
[
55895,
55897
],
[
55905,
55907
],
[
55931,
55933
],
[
55941,
55943
],
[
56232,
56234
],
[
56982,
56984
],
[
57007,
57009
],
[
57036,
57038
],
[
57064,
57066
],
[
57100,
57102
],
[
57126,
57128
],
[
57167,
57169
],
[
57194,
57196
],
[
57228,
57230
],
[
58131,
58133
],
[
58440,
58442
],
[
58479,
58481
],
[
58505,
58507
],
[
58562,
58564
],
[
58610,
58612
],
[
58637,
58639
],
[
59271,
59273
],
[
59415,
59417
],
[
59903,
59905
],
[
59957,
59959
],
[
59984,
59986
],
[
60011,
60013
],
[
60037,
60039
],
[
60073,
60075
],
[
60112,
60114
],
[
60139,
60141
],
[
60173,
60175
],
[
61101,
61103
],
[
61158,
61160
],
[
61191,
61193
],
[
61211,
61213
],
[
61232,
61234
],
[
61246,
61248
],
[
61756,
61758
],
[
61782,
61784
],
[
61811,
61813
],
[
61840,
61842
],
[
61866,
61868
],
[
61890,
61892
],
[
61940,
61942
],
[
61988,
61990
],
[
62033,
62035
],
[
62079,
62081
],
[
62139,
62141
],
[
62201,
62203
],
[
62258,
62260
],
[
62451,
62453
],
[
62505,
62507
],
[
62622,
62624
],
[
62707,
62709
],
[
62773,
62775
],
[
63216,
63218
],
[
63245,
63247
],
[
63386,
63388
],
[
63421,
63423
],
[
63457,
63459
],
[
63561,
63563
],
[
63586,
63588
],
[
63611,
63613
],
[
63658,
63660
],
[
63707,
63709
],
[
63736,
63738
],
[
63783,
63785
],
[
63828,
63830
],
[
63878,
63880
],
[
63922,
63924
],
[
64210,
64212
],
[
64276,
64278
],
[
64358,
64360
],
[
64424,
64426
],
[
64874,
64876
],
[
64939,
64941
],
[
65057,
65059
],
[
65116,
65118
],
[
65557,
65559
],
[
65592,
65594
],
[
65625,
65627
],
[
65658,
65660
],
[
65693,
65695
],
[
65728,
65730
],
[
65763,
65765
],
[
65802,
65804
],
[
65842,
65844
],
[
65895,
65897
],
[
65930,
65932
],
[
65949,
65951
],
[
66261,
66263
],
[
66277,
66279
],
[
66284,
66286
],
[
66328,
66330
],
[
66345,
66347
],
[
67168,
67170
],
[
67177,
67179
],
[
67190,
67192
],
[
67196,
67198
],
[
67225,
67227
],
[
67261,
67263
],
[
67272,
67274
],
[
67285,
67287
],
[
67293,
67295
],
[
67322,
67324
],
[
67356,
67358
],
[
67376,
67378
],
[
67388,
67390
],
[
67411,
67413
],
[
67452,
67454
],
[
67473,
67475
],
[
67484,
67486
],
[
67510,
67512
],
[
67556,
67558
],
[
67578,
67580
],
[
67584,
67586
],
[
67616,
67618
],
[
67652,
67654
],
[
67666,
67668
],
[
67676,
67678
],
[
67686,
67688
],
[
67714,
67716
],
[
67754,
67756
],
[
67768,
67770
],
[
67778,
67780
],
[
67788,
67790
],
[
67816,
67818
],
[
67859,
67861
],
[
67883,
67885
],
[
67896,
67898
],
[
67916,
67918
],
[
67947,
67949
],
[
67997,
67999
],
[
68011,
68013
],
[
68021,
68023
],
[
68032,
68034
],
[
68060,
68062
],
[
68103,
68105
],
[
68128,
68130
],
[
68141,
68143
],
[
68163,
68165
],
[
68194,
68196
],
[
68251,
68253
],
[
68272,
68274
],
[
68278,
68280
],
[
68309,
68311
],
[
68347,
68349
],
[
68381,
68383
],
[
68402,
68404
],
[
68431,
68433
],
[
68481,
68483
],
[
68495,
68497
],
[
68506,
68508
],
[
68517,
68519
],
[
68545,
68547
],
[
68582,
68584
],
[
68612,
68614
],
[
68620,
68622
],
[
68647,
68649
],
[
68672,
68674
],
[
68739,
68741
],
[
68776,
68778
],
[
68811,
68813
],
[
68847,
68849
],
[
68886,
68888
],
[
68923,
68925
],
[
68959,
68961
],
[
68995,
68997
],
[
69051,
69053
],
[
69086,
69088
],
[
69121,
69123
],
[
69170,
69172
],
[
69220,
69222
],
[
69255,
69257
],
[
69291,
69293
],
[
69339,
69341
]
],
[
[
68,
72
],
[
1802,
1806
],
[
10206,
10210
],
[
16394,
16398
],
[
23035,
23039
],
[
32753,
32757
],
[
32764,
32768
],
[
35120,
35124
],
[
35142,
35146
],
[
35222,
35226
],
[
35267,
35271
],
[
35688,
35692
],
[
35717,
35721
],
[
35755,
35759
],
[
35783,
35787
],
[
35813,
35817
],
[
35863,
35867
],
[
35887,
35891
],
[
35914,
35918
],
[
35947,
35951
],
[
36001,
36005
],
[
36050,
36054
],
[
36103,
36107
],
[
36168,
36172
],
[
36208,
36212
],
[
36259,
36263
],
[
36476,
36480
],
[
36520,
36524
],
[
36548,
36552
],
[
36592,
36596
],
[
36628,
36632
],
[
36675,
36679
],
[
36700,
36704
],
[
36952,
36956
],
[
37007,
37011
],
[
37063,
37067
],
[
37122,
37126
],
[
37184,
37188
],
[
37265,
37269
],
[
37329,
37333
],
[
37394,
37398
],
[
37462,
37466
],
[
37533,
37537
],
[
37596,
37600
],
[
37665,
37669
],
[
37735,
37739
],
[
37808,
37812
],
[
37884,
37888
],
[
37954,
37958
],
[
38021,
38025
],
[
38103,
38107
],
[
38175,
38179
],
[
38221,
38225
],
[
38291,
38295
],
[
38372,
38376
],
[
38394,
38398
],
[
38447,
38451
],
[
38468,
38472
],
[
38545,
38549
],
[
39307,
39311
],
[
39386,
39390
],
[
40088,
40092
],
[
40111,
40115
],
[
40454,
40458
],
[
40637,
40641
],
[
40730,
40734
],
[
40776,
40780
],
[
40851,
40855
],
[
40886,
40890
],
[
42898,
42902
],
[
42907,
42911
],
[
44159,
44163
],
[
45281,
45285
],
[
45433,
45437
],
[
45631,
45635
],
[
45682,
45686
],
[
45772,
45776
],
[
45810,
45814
],
[
45900,
45904
],
[
45952,
45956
],
[
46056,
46060
],
[
46353,
46357
],
[
46381,
46385
],
[
49618,
49622
],
[
49848,
49852
],
[
62583,
62587
],
[
62595,
62599
],
[
64242,
64246
],
[
64253,
64257
],
[
65436,
65440
],
[
66966,
66970
],
[
67005,
67009
],
[
67632,
67636
],
[
67734,
67738
],
[
67836,
67840
],
[
38616,
38620
]
],
[
[
82,
86
],
[
1848,
1852
],
[
10177,
10181
],
[
16469,
16473
],
[
23114,
23118
],
[
32820,
32824
],
[
32838,
32842
],
[
33173,
33177
],
[
33201,
33205
],
[
33230,
33234
],
[
33268,
33272
],
[
33296,
33300
],
[
33363,
33367
],
[
33390,
33394
],
[
33422,
33426
],
[
33476,
33480
],
[
33500,
33504
],
[
33526,
33530
],
[
33561,
33565
],
[
33758,
33762
],
[
33802,
33806
],
[
33844,
33848
],
[
33893,
33897
],
[
33930,
33934
],
[
33967,
33971
],
[
34003,
34007
],
[
34071,
34075
],
[
34140,
34144
],
[
34209,
34213
],
[
34249,
34253
],
[
34276,
34280
],
[
34300,
34304
],
[
34328,
34332
],
[
34352,
34356
],
[
34388,
34392
],
[
34420,
34424
],
[
34457,
34461
],
[
34490,
34494
],
[
34519,
34523
],
[
34544,
34548
],
[
34567,
34571
],
[
34623,
34627
],
[
34724,
34728
],
[
34791,
34795
],
[
34856,
34860
],
[
34915,
34919
],
[
34960,
34964
],
[
35034,
35038
],
[
35104,
35108
],
[
35206,
35210
],
[
35249,
35253
],
[
35276,
35280
],
[
35310,
35314
],
[
35363,
35367
],
[
35439,
35443
],
[
35485,
35489
],
[
35564,
35568
],
[
38119,
38123
],
[
38139,
38143
],
[
42812,
42816
],
[
42837,
42841
],
[
45238,
45242
],
[
45427,
45431
],
[
49812,
49816
],
[
62537,
62541
],
[
62546,
62550
],
[
64188,
64192
],
[
64198,
64202
],
[
66826,
66830
],
[
66861,
66865
],
[
67339,
67343
],
[
67432,
67436
],
[
67530,
67534
],
[
35640,
35644
]
],
[
[
88,
96
],
[
2415,
2423
],
[
2454,
2462
],
[
2826,
2834
],
[
2846,
2854
],
[
2935,
2943
],
[
2977,
2985
],
[
3030,
3038
],
[
3050,
3058
],
[
3131,
3139
],
[
3165,
3173
],
[
3184,
3192
],
[
3218,
3226
],
[
3238,
3246
],
[
3273,
3281
],
[
3334,
3342
],
[
3395,
3403
],
[
3421,
3429
],
[
3455,
3463
],
[
3481,
3489
],
[
3515,
3523
],
[
3542,
3550
],
[
3576,
3584
],
[
3603,
3611
],
[
3638,
3646
],
[
3669,
3677
],
[
3758,
3766
],
[
3853,
3861
],
[
3913,
3921
],
[
3975,
3983
],
[
4038,
4046
],
[
4091,
4099
],
[
4146,
4154
],
[
4201,
4209
],
[
8058,
8066
],
[
8128,
8136
],
[
8149,
8157
],
[
8194,
8202
],
[
8328,
8336
],
[
8349,
8357
],
[
8390,
8398
],
[
8416,
8424
],
[
9237,
9245
],
[
9286,
9294
],
[
9369,
9377
],
[
9418,
9426
],
[
9501,
9509
],
[
9549,
9557
],
[
9632,
9640
],
[
9680,
9688
],
[
11079,
11087
],
[
11099,
11107
],
[
11213,
11221
],
[
11233,
11241
],
[
11275,
11283
],
[
11295,
11303
],
[
11417,
11425
],
[
11436,
11444
],
[
11478,
11486
],
[
11498,
11506
],
[
11541,
11549
],
[
11594,
11602
],
[
11647,
11655
],
[
11674,
11682
],
[
11708,
11716
],
[
11735,
11743
],
[
11769,
11777
],
[
11796,
11804
],
[
11830,
11838
],
[
11856,
11864
],
[
11891,
11899
],
[
11922,
11930
],
[
12051,
12059
],
[
12112,
12120
],
[
12172,
12180
],
[
12235,
12243
],
[
12289,
12297
],
[
12344,
12352
],
[
12399,
12407
],
[
12755,
12763
],
[
15672,
15680
],
[
15693,
15701
],
[
15738,
15746
],
[
15787,
15795
],
[
15808,
15816
],
[
15849,
15857
],
[
15875,
15883
],
[
16966,
16974
],
[
17040,
17048
],
[
17156,
17164
],
[
17200,
17208
],
[
17289,
17297
],
[
17346,
17354
],
[
17405,
17413
],
[
17536,
17544
],
[
17583,
17591
],
[
17669,
17677
],
[
17733,
17741
],
[
17783,
17791
],
[
17880,
17888
],
[
17940,
17948
],
[
18002,
18010
],
[
18104,
18112
],
[
18154,
18162
],
[
18205,
18213
],
[
18325,
18333
],
[
18406,
18414
],
[
18486,
18494
],
[
18567,
18575
],
[
18649,
18657
],
[
18731,
18739
],
[
18812,
18820
],
[
19230,
19238
],
[
19257,
19265
],
[
19291,
19299
],
[
19319,
19327
],
[
19353,
19361
],
[
19381,
19389
],
[
19416,
19424
],
[
19466,
19474
],
[
21112,
21120
],
[
21153,
21161
],
[
21382,
21390
],
[
21463,
21471
],
[
21480,
21488
],
[
21682,
21690
],
[
22166,
22174
],
[
22262,
22270
],
[
22366,
22374
],
[
22523,
22531
],
[
23559,
23567
],
[
23598,
23606
],
[
23672,
23680
],
[
23790,
23798
],
[
23834,
23842
],
[
23945,
23953
],
[
23990,
23998
],
[
24073,
24081
],
[
24123,
24131
],
[
24172,
24180
],
[
24260,
24268
],
[
24311,
24319
],
[
24361,
24369
],
[
24481,
24489
],
[
24562,
24570
],
[
24643,
24651
],
[
24724,
24732
],
[
24806,
24814
],
[
24887,
24895
],
[
24968,
24976
],
[
25285,
25293
],
[
25312,
25320
],
[
25346,
25354
],
[
25374,
25382
],
[
25408,
25416
],
[
25436,
25444
],
[
25471,
25479
],
[
25499,
25507
],
[
25534,
25542
],
[
25564,
25572
],
[
25825,
25833
],
[
25849,
25857
],
[
25873,
25881
],
[
25935,
25943
],
[
25959,
25967
],
[
25983,
25991
],
[
26090,
26098
],
[
26114,
26122
],
[
26138,
26146
],
[
26190,
26198
],
[
26214,
26222
],
[
26238,
26246
],
[
27755,
27763
],
[
27806,
27814
],
[
27830,
27838
],
[
27889,
27897
],
[
27930,
27938
],
[
27954,
27962
],
[
28305,
28313
],
[
28355,
28363
],
[
28459,
28467
],
[
29207,
29215
],
[
29302,
29310
],
[
29406,
29414
],
[
30277,
30285
],
[
30326,
30334
],
[
31353,
31361
],
[
32077,
32085
],
[
32145,
32153
],
[
32258,
32266
],
[
32278,
32286
],
[
32319,
32327
],
[
32340,
32348
],
[
33427,
33435
],
[
33450,
33458
],
[
33584,
33592
],
[
34008,
34016
],
[
34076,
34084
],
[
34145,
34153
],
[
34357,
34365
],
[
34393,
34401
],
[
34425,
34433
],
[
34462,
34470
],
[
35974,
35982
],
[
36141,
36149
],
[
36232,
36240
],
[
36284,
36292
],
[
36709,
36717
],
[
36729,
36737
],
[
36768,
36776
],
[
38845,
38853
],
[
38921,
38929
],
[
40093,
40101
],
[
40116,
40124
],
[
42370,
42378
],
[
42497,
42505
],
[
42517,
42525
],
[
42552,
42560
],
[
42573,
42581
],
[
52243,
52251
],
[
52320,
52328
],
[
52955,
52963
],
[
53690,
53698
],
[
53938,
53946
],
[
54334,
54342
],
[
54582,
54590
],
[
54939,
54947
],
[
55207,
55215
],
[
55610,
55618
],
[
55908,
55916
],
[
55944,
55952
],
[
57129,
57137
],
[
57170,
57178
],
[
57197,
57205
],
[
57231,
57239
],
[
58482,
58490
],
[
58508,
58516
],
[
58525,
58533
],
[
58565,
58573
],
[
58582,
58590
],
[
58613,
58621
],
[
58640,
58648
],
[
60076,
60084
],
[
60115,
60123
],
[
60142,
60150
],
[
60176,
60184
],
[
61869,
61877
],
[
61893,
61901
],
[
61943,
61951
],
[
62082,
62090
],
[
62142,
62150
],
[
62204,
62212
],
[
62261,
62269
],
[
63589,
63597
],
[
63831,
63839
],
[
63925,
63933
],
[
65845,
65853
],
[
67862,
67870
],
[
67899,
67907
],
[
67950,
67958
],
[
68106,
68114
],
[
68144,
68152
],
[
68197,
68205
],
[
68350,
68358
],
[
68384,
68392
],
[
68434,
68442
],
[
68585,
68593
],
[
68623,
68631
],
[
68675,
68683
],
[
69124,
69132
],
[
69173,
69181
],
[
69294,
69302
],
[
69342,
69350
]
],
[
[
98,
102
],
[
1818,
1822
],
[
1860,
1864
],
[
1907,
1911
],
[
1991,
1995
],
[
2045,
2049
],
[
2800,
2804
],
[
2862,
2866
],
[
2902,
2906
],
[
2951,
2955
],
[
3004,
3008
],
[
3066,
3070
],
[
3292,
3296
],
[
3302,
3306
],
[
3353,
3357
],
[
3363,
3367
],
[
3710,
3714
],
[
3720,
3724
],
[
3776,
3780
],
[
3813,
3817
],
[
3825,
3829
],
[
3873,
3877
],
[
3885,
3889
],
[
3935,
3939
],
[
3947,
3951
],
[
3997,
4001
],
[
4009,
4013
],
[
10222,
10226
],
[
10264,
10268
],
[
10310,
10314
],
[
10356,
10360
],
[
10443,
10447
],
[
11147,
11151
],
[
11187,
11191
],
[
11249,
11253
],
[
11311,
11315
],
[
11351,
11355
],
[
11391,
11395
],
[
11452,
11456
],
[
11514,
11518
],
[
11561,
11565
],
[
11614,
11618
],
[
11963,
11967
],
[
11973,
11977
],
[
12011,
12015
],
[
12023,
12027
],
[
12072,
12076
],
[
12084,
12088
],
[
12132,
12136
],
[
12144,
12148
],
[
12194,
12198
],
[
12206,
12210
],
[
16439,
16443
],
[
16481,
16485
],
[
16565,
16569
],
[
16616,
16620
],
[
17014,
17018
],
[
17060,
17064
],
[
17251,
17255
],
[
17262,
17266
],
[
17308,
17312
],
[
17319,
17323
],
[
17367,
17371
],
[
17378,
17382
],
[
17427,
17431
],
[
17438,
17442
],
[
17474,
17478
],
[
17510,
17514
],
[
17557,
17561
],
[
17605,
17609
],
[
17643,
17647
],
[
17692,
17696
],
[
17757,
17761
],
[
17806,
17810
],
[
17840,
17844
],
[
17851,
17855
],
[
17900,
17904
],
[
17911,
17915
],
[
17962,
17966
],
[
17973,
17977
],
[
18025,
18029
],
[
18036,
18040
],
[
18074,
18078
],
[
18124,
18128
],
[
18175,
18179
],
[
18226,
18230
],
[
18279,
18283
],
[
18289,
18293
],
[
18299,
18303
],
[
18360,
18364
],
[
18370,
18374
],
[
18380,
18384
],
[
18440,
18444
],
[
18450,
18454
],
[
18460,
18464
],
[
18521,
18525
],
[
18531,
18535
],
[
18541,
18545
],
[
18603,
18607
],
[
18613,
18617
],
[
18623,
18627
],
[
18685,
18689
],
[
18695,
18699
],
[
18705,
18709
],
[
18766,
18770
],
[
18776,
18780
],
[
18786,
18790
],
[
18847,
18851
],
[
18857,
18861
],
[
18867,
18871
],
[
21137,
21141
],
[
21422,
21426
],
[
21431,
21435
],
[
21436,
21440
],
[
21498,
21502
],
[
23080,
23084
],
[
23130,
23134
],
[
23172,
23176
],
[
23227,
23231
],
[
23646,
23650
],
[
23694,
23698
],
[
23885,
23889
],
[
23919,
23923
],
[
23964,
23968
],
[
24010,
24014
],
[
24047,
24051
],
[
24097,
24101
],
[
24146,
24150
],
[
24196,
24200
],
[
24230,
24234
],
[
24281,
24285
],
[
24331,
24335
],
[
24383,
24387
],
[
24431,
24435
],
[
24441,
24445
],
[
24455,
24459
],
[
24512,
24516
],
[
24522,
24526
],
[
24536,
24540
],
[
24593,
24597
],
[
24603,
24607
],
[
24617,
24621
],
[
24674,
24678
],
[
24684,
24688
],
[
24698,
24702
],
[
24756,
24760
],
[
24766,
24770
],
[
24780,
24784
],
[
24837,
24841
],
[
24847,
24851
],
[
24861,
24865
],
[
24918,
24922
],
[
24928,
24932
],
[
24942,
24946
],
[
25000,
25004
],
[
25010,
25014
],
[
25024,
25028
],
[
28064,
28068
],
[
28345,
28349
],
[
28373,
28377
],
[
28442,
28446
],
[
28447,
28451
],
[
31081,
31085
],
[
31117,
31121
],
[
31153,
31157
],
[
31189,
31193
],
[
31225,
31229
],
[
31235,
31239
],
[
31273,
31277
],
[
31283,
31287
],
[
31396,
31400
],
[
31405,
31409
],
[
31444,
31448
],
[
31453,
31457
],
[
31492,
31496
],
[
31535,
31539
],
[
31578,
31582
],
[
31591,
31595
],
[
31630,
31634
],
[
31643,
31647
],
[
31897,
31901
],
[
32710,
32714
],
[
32776,
32780
],
[
32890,
32894
],
[
33071,
33075
],
[
33531,
33535
],
[
33567,
33571
],
[
33866,
33870
],
[
35077,
35081
],
[
35147,
35151
],
[
35181,
35185
],
[
35397,
35401
],
[
35586,
35590
],
[
35919,
35923
],
[
35958,
35962
],
[
36006,
36010
],
[
36020,
36024
],
[
36056,
36060
],
[
36069,
36073
],
[
36108,
36112
],
[
36121,
36125
],
[
36178,
36182
],
[
36217,
36221
],
[
36269,
36273
],
[
38055,
38059
],
[
38079,
38083
],
[
38128,
38132
],
[
38146,
38150
],
[
38246,
38250
],
[
38262,
38266
],
[
38323,
38327
],
[
38347,
38351
],
[
39120,
39124
],
[
39746,
39750
],
[
39944,
39948
],
[
40020,
40024
],
[
40317,
40321
],
[
40357,
40361
],
[
40898,
40902
],
[
41941,
41945
],
[
41978,
41982
],
[
42014,
42018
],
[
42734,
42738
],
[
42746,
42750
],
[
42771,
42775
],
[
42823,
42827
],
[
42842,
42846
],
[
42854,
42858
],
[
42955,
42959
],
[
42974,
42978
],
[
43042,
43046
],
[
43073,
43077
],
[
51677,
51681
],
[
51723,
51727
],
[
52117,
52121
],
[
52126,
52130
],
[
52184,
52188
],
[
52297,
52301
],
[
52307,
52311
],
[
52433,
52437
],
[
52544,
52548
],
[
52550,
52554
],
[
52565,
52569
],
[
52570,
52574
],
[
52600,
52604
],
[
52609,
52613
],
[
52614,
52618
],
[
52635,
52639
],
[
52640,
52644
],
[
52663,
52667
],
[
52671,
52675
],
[
52679,
52683
],
[
52684,
52688
],
[
52701,
52705
],
[
52709,
52713
],
[
52715,
52719
],
[
52740,
52744
],
[
52749,
52753
],
[
52755,
52759
],
[
52774,
52778
],
[
52794,
52798
],
[
52800,
52804
],
[
52823,
52827
],
[
52831,
52835
],
[
52839,
52843
],
[
52844,
52848
],
[
52861,
52865
],
[
52869,
52873
],
[
52875,
52879
],
[
52900,
52904
],
[
52909,
52913
],
[
52915,
52919
],
[
52934,
52938
],
[
52981,
52985
],
[
52989,
52993
],
[
52997,
53001
],
[
53005,
53009
],
[
53010,
53014
],
[
53027,
53031
],
[
53035,
53039
],
[
53041,
53045
],
[
53066,
53070
],
[
53075,
53079
],
[
53081,
53085
],
[
53100,
53104
],
[
53130,
53134
],
[
53138,
53142
],
[
53143,
53147
],
[
53164,
53168
],
[
53173,
53177
],
[
53179,
53183
],
[
53211,
53215
],
[
53219,
53223
],
[
53225,
53229
],
[
53246,
53250
],
[
53255,
53259
],
[
53261,
53265
],
[
53284,
53288
],
[
53292,
53296
],
[
53300,
53304
],
[
53305,
53309
],
[
53322,
53326
],
[
53330,
53334
],
[
53336,
53340
],
[
53361,
53365
],
[
53370,
53374
],
[
53376,
53380
],
[
53395,
53399
],
[
53418,
53422
],
[
53440,
53444
],
[
53448,
53452
],
[
53454,
53458
],
[
53462,
53466
],
[
53467,
53471
],
[
53482,
53486
],
[
53487,
53491
],
[
53509,
53513
],
[
53517,
53521
],
[
53523,
53527
],
[
53551,
53555
],
[
53559,
53563
],
[
53567,
53571
],
[
53575,
53579
],
[
53580,
53584
],
[
53597,
53601
],
[
53605,
53609
],
[
53611,
53615
],
[
53636,
53640
],
[
53645,
53649
],
[
53651,
53655
],
[
53670,
53674
],
[
53722,
53726
],
[
53730,
53734
],
[
53735,
53739
],
[
53749,
53753
],
[
53757,
53761
],
[
53763,
53767
],
[
53791,
53795
],
[
53799,
53803
],
[
53807,
53811
],
[
53815,
53819
],
[
53820,
53824
],
[
53837,
53841
],
[
53845,
53849
],
[
53851,
53855
],
[
53876,
53880
],
[
53885,
53889
],
[
53891,
53895
],
[
53910,
53914
],
[
53956,
53960
],
[
53964,
53968
],
[
53972,
53976
],
[
53977,
53981
],
[
53994,
53998
],
[
54002,
54006
],
[
54008,
54012
],
[
54033,
54037
],
[
54042,
54046
],
[
54048,
54052
],
[
54067,
54071
],
[
54097,
54101
],
[
54105,
54109
],
[
54111,
54115
],
[
54126,
54130
],
[
54131,
54135
],
[
54153,
54157
],
[
54161,
54165
],
[
54167,
54171
],
[
54195,
54199
],
[
54203,
54207
],
[
54211,
54215
],
[
54219,
54223
],
[
54224,
54228
],
[
54241,
54245
],
[
54249,
54253
],
[
54255,
54259
],
[
54280,
54284
],
[
54289,
54293
],
[
54295,
54299
],
[
54314,
54318
],
[
54365,
54369
],
[
54374,
54378
],
[
54379,
54383
],
[
54393,
54397
],
[
54401,
54405
],
[
54407,
54411
],
[
54435,
54439
],
[
54443,
54447
],
[
54451,
54455
],
[
54459,
54463
],
[
54464,
54468
],
[
54481,
54485
],
[
54489,
54493
],
[
54495,
54499
],
[
54520,
54524
],
[
54529,
54533
],
[
54535,
54539
],
[
54554,
54558
],
[
54600,
54604
],
[
54608,
54612
],
[
54616,
54620
],
[
54621,
54625
],
[
54638,
54642
],
[
54646,
54650
],
[
54652,
54656
],
[
54677,
54681
],
[
54686,
54690
],
[
54692,
54696
],
[
54711,
54715
],
[
54739,
54743
],
[
54744,
54748
],
[
54758,
54762
],
[
54766,
54770
],
[
54772,
54776
],
[
54800,
54804
],
[
54808,
54812
],
[
54816,
54820
],
[
54824,
54828
],
[
54829,
54833
],
[
54846,
54850
],
[
54854,
54858
],
[
54860,
54864
],
[
54885,
54889
],
[
54894,
54898
],
[
54900,
54904
],
[
54919,
54923
],
[
54978,
54982
],
[
54984,
54988
],
[
54999,
55003
],
[
55004,
55008
],
[
55018,
55022
],
[
55026,
55030
],
[
55032,
55036
],
[
55060,
55064
],
[
55068,
55072
],
[
55076,
55080
],
[
55084,
55088
],
[
55089,
55093
],
[
55114,
55118
],
[
55122,
55126
],
[
55128,
55132
],
[
55145,
55149
],
[
55154,
55158
],
[
55160,
55164
],
[
55187,
55191
],
[
55225,
55229
],
[
55233,
55237
],
[
55241,
55245
],
[
55246,
55250
],
[
55271,
55275
],
[
55279,
55283
],
[
55285,
55289
],
[
55302,
55306
],
[
55311,
55315
],
[
55317,
55321
],
[
55344,
55348
],
[
55364,
55368
],
[
55373,
55377
],
[
55379,
55383
],
[
55394,
55398
],
[
55399,
55403
],
[
55421,
55425
],
[
55429,
55433
],
[
55435,
55439
],
[
55463,
55467
],
[
55471,
55475
],
[
55479,
55483
],
[
55487,
55491
],
[
55492,
55496
],
[
55509,
55513
],
[
55517,
55521
],
[
55523,
55527
],
[
55548,
55552
],
[
55557,
55561
],
[
55563,
55567
],
[
55582,
55586
],
[
55656,
55660
],
[
55872,
55876
],
[
56330,
56334
],
[
56612,
56616
],
[
57075,
57079
],
[
57251,
57255
],
[
58301,
58305
],
[
58420,
58424
],
[
58470,
58474
],
[
59139,
59143
],
[
60048,
60052
],
[
60195,
60199
],
[
61928,
61932
],
[
61976,
61980
],
[
62009,
62013
],
[
62020,
62024
],
[
62055,
62059
],
[
62066,
62070
],
[
62113,
62117
],
[
62124,
62128
],
[
62175,
62179
],
[
62186,
62190
],
[
62236,
62240
],
[
62246,
62250
],
[
62317,
62321
],
[
62425,
62429
],
[
62604,
62608
],
[
62628,
62632
],
[
62689,
62693
],
[
62713,
62717
],
[
63549,
63553
],
[
63632,
63636
],
[
63643,
63647
],
[
63680,
63684
],
[
63691,
63695
],
[
63758,
63762
],
[
63769,
63773
],
[
63804,
63808
],
[
63815,
63819
],
[
63866,
63870
],
[
63900,
63904
],
[
63910,
63914
],
[
63982,
63986
],
[
64091,
64095
],
[
64258,
64262
],
[
64282,
64286
],
[
64340,
64344
],
[
64364,
64368
],
[
64579,
64583
],
[
66113,
66117
]
],
[
[
104,
108
],
[
1773,
1777
],
[
10252,
10256
],
[
16423,
16427
],
[
23068,
23072
],
[
30809,
30813
],
[
30838,
30842
],
[
30876,
30880
],
[
30905,
30909
],
[
30934,
30938
],
[
30996,
31000
],
[
31020,
31024
],
[
31047,
31051
],
[
31076,
31080
],
[
31111,
31115
],
[
31148,
31152
],
[
31183,
31187
],
[
31220,
31224
],
[
31267,
31271
],
[
31316,
31320
],
[
31348,
31352
],
[
31390,
31394
],
[
31437,
31441
],
[
31486,
31490
],
[
31528,
31532
],
[
31572,
31576
],
[
31623,
31627
],
[
31836,
31840
],
[
31876,
31880
],
[
31924,
31928
],
[
31961,
31965
],
[
31998,
32002
],
[
32034,
32038
],
[
32072,
32076
],
[
32140,
32144
],
[
32210,
32214
],
[
32249,
32253
],
[
32310,
32314
],
[
32381,
32385
],
[
32432,
32436
],
[
32524,
32528
],
[
32588,
32592
],
[
32673,
32677
],
[
32737,
32741
],
[
32804,
32808
],
[
32857,
32861
],
[
32925,
32929
],
[
32979,
32983
],
[
33050,
33054
],
[
33822,
33826
],
[
34758,
34762
],
[
35326,
35330
],
[
35344,
35348
],
[
38037,
38041
],
[
38048,
38052
],
[
42716,
42720
],
[
42729,
42733
],
[
44153,
44157
],
[
45195,
45199
],
[
45421,
45425
],
[
49776,
49780
],
[
62483,
62487
],
[
62493,
62497
],
[
64142,
64146
],
[
64151,
64155
],
[
66686,
66690
],
[
66721,
66725
],
[
67147,
67151
],
[
67240,
67244
],
[
33125,
33129
]
],
[
[
110,
114
],
[
1890,
1894
],
[
10294,
10298
],
[
16515,
16519
],
[
23006,
23010
],
[
32873,
32877
],
[
32884,
32888
],
[
35379,
35383
],
[
35391,
35395
],
[
36763,
36767
],
[
38191,
38195
],
[
38200,
38204
],
[
41738,
41742
],
[
41767,
41771
],
[
41805,
41809
],
[
41831,
41835
],
[
41856,
41860
],
[
41882,
41886
],
[
41909,
41913
],
[
41936,
41940
],
[
41971,
41975
],
[
42006,
42010
],
[
42043,
42047
],
[
42088,
42092
],
[
42133,
42137
],
[
42171,
42175
],
[
42207,
42211
],
[
42246,
42250
],
[
42286,
42290
],
[
42325,
42329
],
[
42365,
42369
],
[
42428,
42432
],
[
42463,
42467
],
[
42488,
42492
],
[
42543,
42547
],
[
42633,
42637
],
[
42700,
42704
],
[
42796,
42800
],
[
42882,
42886
],
[
42928,
42932
],
[
43008,
43012
],
[
43099,
43103
],
[
43121,
43125
],
[
43174,
43178
],
[
43195,
43199
],
[
43272,
43276
],
[
44165,
44169
],
[
45324,
45328
],
[
45439,
45443
],
[
46410,
46414
],
[
46438,
46442
],
[
49654,
49658
],
[
49884,
49888
],
[
62668,
62672
],
[
62680,
62684
],
[
64322,
64326
],
[
64333,
64337
],
[
67044,
67048
],
[
67083,
67087
],
[
67977,
67981
],
[
68080,
68084
],
[
43344,
43348
]
],
[
[
116,
120
],
[
6299,
6303
],
[
6376,
6380
],
[
13959,
13963
],
[
14036,
14040
],
[
20340,
20344
],
[
20417,
20421
],
[
23312,
23316
],
[
23348,
23352
],
[
23383,
23387
],
[
25057,
25061
],
[
25094,
25098
],
[
27369,
27373
],
[
27446,
27450
]
],
[
[
122,
123
],
[
44204,
44205
]
],
[
[
125,
126
],
[
1606,
1607
],
[
1637,
1638
],
[
2599,
2600
],
[
2793,
2794
],
[
2895,
2896
],
[
2997,
2998
],
[
3099,
3100
],
[
7994,
7995
],
[
8053,
8054
],
[
8123,
8124
],
[
8144,
8145
],
[
8189,
8190
],
[
8238,
8239
],
[
8246,
8247
],
[
8274,
8275
],
[
8287,
8288
],
[
8323,
8324
],
[
8344,
8345
],
[
8385,
8386
],
[
8411,
8412
],
[
11054,
11055
],
[
11140,
11141
],
[
11180,
11181
],
[
11344,
11345
],
[
11384,
11385
],
[
15534,
15535
],
[
15591,
15592
],
[
15599,
15600
],
[
15628,
15629
],
[
15667,
15668
],
[
15688,
15689
],
[
15733,
15734
],
[
15782,
15783
],
[
15803,
15804
],
[
15844,
15845
],
[
15870,
15871
],
[
15919,
15920
],
[
15927,
15928
],
[
15955,
15956
],
[
15968,
15969
],
[
17093,
17094
],
[
17124,
17125
],
[
17176,
17177
],
[
17220,
17221
],
[
19936,
19937
],
[
21636,
21637
],
[
21677,
21678
],
[
22510,
22511
],
[
22518,
22519
],
[
22591,
22592
],
[
22599,
22600
],
[
22627,
22628
],
[
22640,
22641
],
[
23727,
23728
],
[
23758,
23759
],
[
23810,
23811
],
[
23854,
23855
],
[
26666,
26667
],
[
28638,
28639
],
[
29553,
29554
],
[
29561,
29562
],
[
29621,
29622
],
[
29629,
29630
],
[
29657,
29658
],
[
29670,
29671
],
[
29913,
29914
],
[
29935,
29936
],
[
29950,
29951
],
[
29973,
29974
],
[
29996,
29997
],
[
30019,
30020
],
[
30029,
30030
],
[
30051,
30052
],
[
30073,
30074
],
[
30153,
30154
],
[
30186,
30187
],
[
30586,
30587
],
[
31321,
31322
],
[
32829,
32830
],
[
33395,
33396
],
[
34989,
34990
],
[
35231,
35232
],
[
35335,
35336
],
[
38719,
38720
],
[
39791,
39792
],
[
40685,
40686
],
[
45471,
45472
],
[
50853,
50854
],
[
50881,
50882
],
[
52145,
52146
],
[
52341,
52342
],
[
52375,
52376
],
[
54969,
54970
],
[
62878,
62879
],
[
65994,
65995
]
],
[
[
128,
131
],
[
5869,
5872
],
[
5879,
5882
],
[
5893,
5896
],
[
6610,
6613
],
[
6686,
6689
],
[
8647,
8650
],
[
8659,
8662
],
[
9059,
9062
],
[
9074,
9077
],
[
9092,
9095
],
[
9109,
9112
],
[
9459,
9462
],
[
9494,
9497
],
[
9542,
9545
],
[
9607,
9610
],
[
9655,
9658
],
[
9703,
9706
],
[
9792,
9795
],
[
9846,
9849
],
[
13137,
13140
],
[
13514,
13517
],
[
13527,
13530
],
[
13545,
13548
],
[
14270,
14273
],
[
14346,
14349
],
[
16123,
16126
],
[
16151,
16154
],
[
16178,
16181
],
[
16221,
16224
],
[
16231,
16234
],
[
16274,
16277
],
[
16311,
16314
],
[
16337,
16340
],
[
16366,
16369
],
[
16390,
16393
],
[
16419,
16422
],
[
16465,
16468
],
[
16511,
16514
],
[
16544,
16547
],
[
16600,
16603
],
[
16650,
16653
],
[
16686,
16689
],
[
16721,
16724
],
[
16758,
16761
],
[
16794,
16797
],
[
16818,
16821
],
[
16843,
16846
],
[
16869,
16872
],
[
16896,
16899
],
[
16931,
16934
],
[
16959,
16962
],
[
17001,
17004
],
[
17033,
17036
],
[
17080,
17083
],
[
17110,
17113
],
[
17149,
17152
],
[
17193,
17196
],
[
17238,
17241
],
[
17282,
17285
],
[
17339,
17342
],
[
17398,
17401
],
[
17459,
17462
],
[
17493,
17496
],
[
17529,
17532
],
[
17576,
17579
],
[
17625,
17628
],
[
17662,
17665
],
[
17726,
17729
],
[
17776,
17779
],
[
17826,
17829
],
[
17873,
17876
],
[
17933,
17936
],
[
17995,
17998
],
[
18059,
18062
],
[
18097,
18100
],
[
18147,
18150
],
[
18198,
18201
],
[
18250,
18253
],
[
18318,
18321
],
[
18399,
18402
],
[
18479,
18482
],
[
18560,
18563
],
[
18642,
18645
],
[
18724,
18727
],
[
18805,
18808
],
[
18887,
18890
],
[
18921,
18924
],
[
18947,
18950
],
[
18977,
18980
],
[
19017,
19020
],
[
19051,
19054
],
[
19095,
19098
],
[
19142,
19145
],
[
19181,
19184
],
[
19223,
19226
],
[
19250,
19253
],
[
19284,
19287
],
[
19312,
19315
],
[
19346,
19349
],
[
19374,
19377
],
[
19409,
19412
],
[
19437,
19440
],
[
19459,
19462
],
[
19489,
19492
],
[
19512,
19515
],
[
19548,
19551
],
[
19611,
19614
],
[
19774,
19777
],
[
19850,
19853
],
[
19905,
19908
],
[
19978,
19981
],
[
20021,
20024
],
[
20074,
20077
],
[
20126,
20129
],
[
20179,
20182
],
[
20231,
20234
],
[
20284,
20287
],
[
20336,
20339
],
[
20389,
20392
],
[
20441,
20444
],
[
20493,
20496
],
[
20544,
20547
],
[
20596,
20599
],
[
20647,
20650
],
[
20651,
20654
],
[
20699,
20702
],
[
20727,
20730
],
[
20750,
20753
],
[
20802,
20805
],
[
20853,
20856
],
[
20941,
20944
],
[
21007,
21010
],
[
21056,
21059
],
[
21083,
21086
],
[
21105,
21108
],
[
21146,
21149
],
[
21181,
21184
],
[
21252,
21255
],
[
21324,
21327
],
[
21352,
21355
],
[
21375,
21378
],
[
21543,
21546
],
[
21555,
21558
],
[
21582,
21585
],
[
21603,
21606
],
[
21621,
21624
],
[
21662,
21665
],
[
21744,
21747
],
[
21778,
21781
],
[
21787,
21790
],
[
21800,
21803
],
[
21807,
21810
],
[
21836,
21839
],
[
21870,
21873
],
[
21879,
21882
],
[
21892,
21895
],
[
21899,
21902
],
[
21928,
21931
],
[
21975,
21978
],
[
21984,
21987
],
[
21993,
21996
],
[
22002,
22005
],
[
22009,
22012
],
[
22016,
22019
],
[
22038,
22041
],
[
22045,
22048
],
[
22054,
22057
],
[
22061,
22064
],
[
22070,
22073
],
[
22077,
22080
],
[
22111,
22114
],
[
22146,
22149
],
[
22158,
22161
],
[
22207,
22210
],
[
22242,
22245
],
[
22254,
22257
],
[
22304,
22307
],
[
22346,
22349
],
[
22358,
22361
],
[
22433,
22436
],
[
22494,
22497
],
[
22575,
22578
],
[
22623,
22626
],
[
22636,
22639
],
[
22684,
22687
],
[
22702,
22705
],
[
25818,
25821
],
[
26083,
26086
],
[
27022,
27025
],
[
27032,
27035
],
[
27577,
27580
],
[
27653,
27656
],
[
36481,
36484
],
[
42469,
42472
],
[
42548,
42551
],
[
44143,
44146
],
[
44897,
44900
],
[
45109,
45112
],
[
45411,
45414
],
[
46683,
46686
],
[
46814,
46817
],
[
46933,
46936
],
[
47072,
47075
],
[
47219,
47222
],
[
47236,
47239
],
[
47394,
47397
],
[
47509,
47512
],
[
47628,
47631
],
[
47751,
47754
],
[
47898,
47901
],
[
47915,
47918
],
[
48039,
48042
],
[
48155,
48158
],
[
48274,
48277
],
[
48398,
48401
],
[
48545,
48548
],
[
48562,
48565
],
[
48733,
48736
],
[
48748,
48751
],
[
48886,
48889
],
[
48901,
48904
],
[
49047,
49050
],
[
49064,
49067
],
[
49216,
49219
],
[
49233,
49236
],
[
49385,
49388
],
[
49402,
49405
],
[
49598,
49601
],
[
49869,
49872
],
[
50224,
50227
],
[
50606,
50609
],
[
50641,
50644
],
[
56315,
56318
],
[
56383,
56386
],
[
56462,
56465
],
[
56665,
56668
],
[
58688,
58691
],
[
59027,
59030
],
[
59036,
59039
],
[
59055,
59058
],
[
59321,
59324
],
[
59330,
59333
],
[
59353,
59356
],
[
59490,
59493
],
[
64726,
64729
],
[
64735,
64738
],
[
64755,
64758
],
[
65322,
65325
],
[
65606,
65609
],
[
65972,
65975
],
[
66548,
66551
],
[
67637,
67640
],
[
67696,
67699
],
[
67707,
67710
],
[
67739,
67742
],
[
67798,
67801
],
[
67809,
67812
],
[
67937,
67940
],
[
68085,
68088
],
[
68173,
68176
],
[
22755,
22758
]
],
[
[
133,
137
],
[
6194,
6198
],
[
6271,
6275
],
[
13854,
13858
],
[
13931,
13935
],
[
16699,
16703
],
[
16736,
16740
],
[
16772,
16776
],
[
18899,
18903
],
[
18992,
18996
],
[
20235,
20239
],
[
20312,
20316
],
[
27264,
27268
],
[
27341,
27345
]
],
[
[
139,
142
],
[
5377,
5380
],
[
5452,
5455
],
[
5519,
5522
],
[
5594,
5597
],
[
6507,
6510
],
[
6583,
6586
],
[
6933,
6936
],
[
6941,
6944
],
[
7058,
7061
],
[
7086,
7089
],
[
7270,
7273
],
[
7279,
7282
],
[
7343,
7346
],
[
7352,
7355
],
[
7409,
7412
],
[
7432,
7435
],
[
7489,
7492
],
[
7615,
7618
],
[
7633,
7636
],
[
7688,
7691
],
[
8486,
8489
],
[
8618,
8621
],
[
8629,
8632
],
[
8883,
8886
],
[
8899,
8902
],
[
8917,
8920
],
[
8934,
8937
],
[
8952,
8955
],
[
8970,
8973
],
[
8988,
8991
],
[
9005,
9008
],
[
9023,
9026
],
[
9041,
9044
],
[
9212,
9215
],
[
9261,
9264
],
[
9309,
9312
],
[
9327,
9330
],
[
9362,
9365
],
[
9411,
9414
],
[
9738,
9741
],
[
9756,
9759
],
[
9909,
9912
],
[
9946,
9949
],
[
9974,
9977
],
[
10015,
10018
],
[
10025,
10028
],
[
10066,
10069
],
[
10093,
10096
],
[
10121,
10124
],
[
10149,
10152
],
[
10173,
10176
],
[
10202,
10205
],
[
10248,
10251
],
[
10290,
10293
],
[
10340,
10343
],
[
10386,
10389
],
[
10419,
10422
],
[
10473,
10476
],
[
10506,
10509
],
[
10540,
10543
],
[
10573,
10576
],
[
10599,
10602
],
[
10626,
10629
],
[
10652,
10655
],
[
10679,
10682
],
[
10721,
10724
],
[
10843,
10846
],
[
10871,
10874
],
[
10910,
10913
],
[
10935,
10938
],
[
10961,
10964
],
[
10987,
10990
],
[
11014,
11017
],
[
11041,
11044
],
[
11072,
11075
],
[
11127,
11130
],
[
11166,
11169
],
[
11206,
11209
],
[
11268,
11271
],
[
11331,
11334
],
[
11370,
11373
],
[
11410,
11413
],
[
11471,
11474
],
[
11534,
11537
],
[
11587,
11590
],
[
11640,
11643
],
[
11667,
11670
],
[
11701,
11704
],
[
11728,
11731
],
[
11762,
11765
],
[
11789,
11792
],
[
11823,
11826
],
[
11849,
11852
],
[
11884,
11887
],
[
11915,
11918
],
[
11950,
11953
],
[
11997,
12000
],
[
12044,
12047
],
[
12105,
12108
],
[
12165,
12168
],
[
12228,
12231
],
[
12259,
12262
],
[
12282,
12285
],
[
12313,
12316
],
[
12337,
12340
],
[
12369,
12372
],
[
12392,
12395
],
[
12424,
12427
],
[
12448,
12451
],
[
12479,
12482
],
[
12517,
12520
],
[
12552,
12555
],
[
12599,
12602
],
[
12638,
12641
],
[
12714,
12717
],
[
12748,
12751
],
[
12796,
12799
],
[
12828,
12831
],
[
12976,
12979
],
[
12986,
12989
],
[
13132,
13135
],
[
13312,
13315
],
[
13440,
13443
],
[
13499,
13502
],
[
13569,
13572
],
[
13640,
13643
],
[
13693,
13696
],
[
13745,
13748
],
[
13798,
13801
],
[
13850,
13853
],
[
13903,
13906
],
[
13955,
13958
],
[
14008,
14011
],
[
14060,
14063
],
[
14112,
14115
],
[
14163,
14166
],
[
14167,
14170
],
[
14215,
14218
],
[
14243,
14246
],
[
14266,
14269
],
[
14318,
14321
],
[
14369,
14372
],
[
14421,
14424
],
[
14472,
14475
],
[
14528,
14531
],
[
14571,
14574
],
[
14635,
14638
],
[
14702,
14705
],
[
14730,
14733
],
[
14781,
14784
],
[
14813,
14816
],
[
14820,
14823
],
[
14854,
14857
],
[
14886,
14889
],
[
14893,
14896
],
[
14927,
14930
],
[
14959,
14962
],
[
14966,
14969
],
[
15000,
15003
],
[
15032,
15035
],
[
15057,
15060
],
[
15089,
15092
],
[
15103,
15106
],
[
15121,
15124
],
[
15153,
15156
],
[
15167,
15170
],
[
15192,
15195
],
[
15222,
15225
],
[
15247,
15250
],
[
15277,
15280
],
[
15291,
15294
],
[
15339,
15342
],
[
15398,
15401
],
[
15455,
15458
],
[
15513,
15516
],
[
15574,
15577
],
[
15623,
15626
],
[
15651,
15654
],
[
15729,
15732
],
[
15766,
15769
],
[
15840,
15843
],
[
15866,
15869
],
[
15903,
15906
],
[
15951,
15954
],
[
15964,
15967
],
[
16012,
16015
],
[
19920,
19923
],
[
19928,
19931
],
[
19960,
19963
],
[
20548,
20551
],
[
20624,
20627
],
[
20930,
20933
],
[
20996,
20999
],
[
25842,
25845
],
[
25976,
25979
],
[
26107,
26110
],
[
26231,
26234
],
[
26951,
26954
],
[
26959,
26962
],
[
26966,
26969
],
[
27799,
27802
],
[
27923,
27926
],
[
30433,
30436
],
[
32387,
32390
],
[
33763,
33766
],
[
43475,
43478
],
[
43526,
43529
],
[
43561,
43564
],
[
43577,
43580
],
[
44064,
44067
],
[
44122,
44125
],
[
44826,
44829
],
[
45067,
45070
],
[
45406,
45409
],
[
46627,
46630
],
[
46742,
46745
],
[
46757,
46760
],
[
46873,
46876
],
[
46996,
46999
],
[
47013,
47016
],
[
47135,
47138
],
[
47152,
47155
],
[
47338,
47341
],
[
47453,
47456
],
[
47568,
47571
],
[
47691,
47694
],
[
47814,
47817
],
[
47831,
47834
],
[
47983,
47986
],
[
48098,
48101
],
[
48214,
48217
],
[
48337,
48340
],
[
48461,
48464
],
[
48478,
48481
],
[
48657,
48660
],
[
48672,
48675
],
[
48810,
48813
],
[
48825,
48828
],
[
48963,
48966
],
[
48980,
48983
],
[
49132,
49135
],
[
49149,
49152
],
[
49301,
49304
],
[
49318,
49321
],
[
49833,
49836
],
[
50097,
50100
],
[
50128,
50131
],
[
50176,
50179
],
[
50269,
50272
],
[
50315,
50318
],
[
50399,
50402
],
[
50446,
50449
],
[
50545,
50548
],
[
50580,
50583
],
[
50870,
50873
],
[
51708,
51711
],
[
51767,
51770
],
[
51846,
51849
],
[
52025,
52028
],
[
52098,
52101
],
[
52131,
52134
],
[
52164,
52167
],
[
52203,
52206
],
[
52236,
52239
],
[
52313,
52316
],
[
52360,
52363
],
[
52453,
52456
],
[
55851,
55854
],
[
55927,
55930
],
[
55937,
55940
],
[
57491,
57494
],
[
57575,
57578
],
[
57695,
57698
],
[
57769,
57772
],
[
57822,
57825
],
[
58984,
58987
],
[
58994,
58997
],
[
60538,
60541
],
[
60583,
60586
],
[
60664,
60667
],
[
60798,
60801
],
[
64856,
64859
],
[
64866,
64869
],
[
65028,
65031
],
[
65052,
65055
],
[
65068,
65071
],
[
65111,
65114
],
[
65282,
65285
],
[
65571,
65574
],
[
65818,
65821
],
[
65871,
65874
],
[
66514,
66517
],
[
67245,
67248
],
[
67303,
67306
],
[
67406,
67409
],
[
67437,
67440
],
[
67492,
67495
],
[
67503,
67506
],
[
67535,
67538
],
[
67592,
67595
],
[
67603,
67606
],
[
68286,
68289
],
[
68297,
68300
],
[
68421,
68424
],
[
68657,
68660
],
[
16077,
16080
],
[
49569,
49572
]
],
[
[
152,
156
],
[
6089,
6093
],
[
6166,
6170
],
[
10486,
10490
],
[
10520,
10524
],
[
10553,
10557
],
[
12460,
12464
],
[
12494,
12498
],
[
13749,
13753
],
[
13826,
13830
],
[
20130,
20134
],
[
20207,
20211
],
[
27159,
27163
],
[
27236,
27240
],
[
50088,
50092
],
[
50183,
50187
],
[
50292,
50296
],
[
50326,
50330
],
[
50422,
50426
],
[
50457,
50461
],
[
57296,
57300
],
[
57329,
57333
],
[
57502,
57506
],
[
57589,
57593
],
[
57709,
57713
],
[
60457,
60461
],
[
60484,
60488
],
[
60621,
60625
]
],
[
[
158,
163
],
[
2025,
2030
],
[
10423,
10428
],
[
16654,
16659
],
[
23266,
23271
],
[
38665,
38670
],
[
38704,
38709
],
[
38736,
38741
],
[
38764,
38769
],
[
38795,
38800
],
[
38826,
38831
],
[
38871,
38876
],
[
38901,
38906
],
[
38948,
38953
],
[
38981,
38986
],
[
39081,
39086
],
[
39292,
39297
],
[
39371,
39376
],
[
39432,
39437
],
[
39447,
39452
],
[
39522,
39527
],
[
39585,
39590
],
[
39615,
39620
],
[
39645,
39650
],
[
39701,
39706
],
[
39776,
39781
],
[
39807,
39812
],
[
40209,
40214
],
[
40434,
40439
],
[
40617,
40622
],
[
40710,
40715
],
[
40752,
40757
],
[
40831,
40836
],
[
41050,
41055
],
[
41066,
41071
],
[
41117,
41122
],
[
41169,
41174
],
[
41230,
41235
],
[
41305,
41310
],
[
41371,
41376
],
[
41659,
41664
],
[
45602,
45607
],
[
45710,
45715
],
[
45758,
45763
],
[
45838,
45843
],
[
45886,
45891
],
[
45986,
45991
],
[
46038,
46043
],
[
46113,
46118
],
[
41445,
41450
]
],
[
[
165,
168
],
[
5807,
5810
],
[
5819,
5822
],
[
5830,
5833
],
[
6010,
6013
],
[
6048,
6051
],
[
6115,
6118
],
[
6153,
6156
],
[
6220,
6223
],
[
6258,
6261
],
[
6325,
6328
],
[
6363,
6366
],
[
6429,
6432
],
[
6467,
6470
],
[
6532,
6535
],
[
6570,
6573
],
[
6635,
6638
],
[
6673,
6676
],
[
6738,
6741
],
[
6776,
6779
],
[
13455,
13458
],
[
13463,
13466
],
[
13476,
13479
],
[
13670,
13673
],
[
13708,
13711
],
[
13775,
13778
],
[
13813,
13816
],
[
13880,
13883
],
[
13918,
13921
],
[
13985,
13988
],
[
14023,
14026
],
[
14089,
14092
],
[
14127,
14130
],
[
14192,
14195
],
[
14230,
14233
],
[
14295,
14298
],
[
14333,
14336
],
[
14398,
14401
],
[
14436,
14439
],
[
19743,
19746
],
[
19754,
19757
],
[
19789,
19792
],
[
20051,
20054
],
[
20089,
20092
],
[
20156,
20159
],
[
20194,
20197
],
[
20261,
20264
],
[
20299,
20302
],
[
20366,
20369
],
[
20404,
20407
],
[
20470,
20473
],
[
20508,
20511
],
[
20573,
20576
],
[
20611,
20614
],
[
20676,
20679
],
[
20714,
20717
],
[
20779,
20782
],
[
20817,
20820
],
[
26770,
26773
],
[
26781,
26784
],
[
26816,
26819
],
[
27080,
27083
],
[
27118,
27121
],
[
27185,
27188
],
[
27223,
27226
],
[
27290,
27293
],
[
27328,
27331
],
[
27395,
27398
],
[
27433,
27436
],
[
27499,
27502
],
[
27537,
27540
],
[
27602,
27605
],
[
27640,
27643
],
[
58921,
58924
],
[
58932,
58935
],
[
58945,
58948
],
[
64622,
64625
],
[
64635,
64638
],
[
64646,
64649
],
[
64999,
65002
]
],
[
[
170,
173
],
[
6816,
6819
],
[
14476,
14479
],
[
20857,
20860
],
[
27680,
27683
],
[
32689,
32692
],
[
32700,
32703
],
[
35050,
35053
],
[
35067,
35070
],
[
35294,
35297
],
[
37970,
37973
],
[
37981,
37984
],
[
37994,
37997
],
[
38410,
38413
],
[
38484,
38487
],
[
39106,
39109
],
[
39721,
39724
],
[
39732,
39735
],
[
39940,
39943
],
[
40016,
40019
],
[
40300,
40303
],
[
41250,
41253
],
[
41325,
41328
],
[
42649,
42652
],
[
42660,
42663
],
[
42673,
42676
],
[
43137,
43140
],
[
43211,
43214
],
[
62376,
62379
],
[
62411,
62414
],
[
62421,
62424
],
[
64041,
64044
],
[
64076,
64079
],
[
64087,
64090
]
],
[
[
175,
180
],
[
32051,
32056
]
],
[
[
182,
187
],
[
42150,
42155
],
[
42187,
42192
]
],
[
[
189,
194
],
[
36645,
36650
]
],
[
[
196,
197
],
[
5747,
5748
],
[
13395,
13396
],
[
19686,
19687
],
[
26385,
26386
],
[
26506,
26507
],
[
26576,
26577
],
[
30649,
30650
],
[
32507,
32508
],
[
34705,
34706
],
[
45784,
45785
],
[
45912,
45913
],
[
46072,
46073
],
[
58225,
58226
],
[
58343,
58344
],
[
58603,
58604
],
[
61225,
61226
],
[
61351,
61352
]
],
[
[
199,
205
],
[
45595,
45601
],
[
45675,
45681
],
[
45803,
45809
],
[
45945,
45951
]
],
[
[
207,
213
],
[
46105,
46111
],
[
46157,
46163
]
],
[
[
215,
217
],
[
4770,
4772
],
[
4826,
4828
],
[
39677,
39679
],
[
40478,
40480
],
[
40947,
40949
],
[
40976,
40978
],
[
57495,
57497
],
[
57519,
57521
],
[
57579,
57581
],
[
57615,
57617
],
[
57639,
57641
],
[
57699,
57701
],
[
60450,
60452
],
[
60474,
60476
],
[
60542,
60544
],
[
60587,
60589
],
[
60611,
60613
],
[
60668,
60670
]
],
[
[
219,
221
],
[
4778,
4780
],
[
4834,
4836
],
[
40992,
40994
],
[
57507,
57509
],
[
57534,
57536
],
[
57594,
57596
],
[
57627,
57629
],
[
57654,
57656
],
[
57714,
57716
],
[
60462,
60464
],
[
60489,
60491
],
[
60557,
60559
],
[
60599,
60601
],
[
60626,
60628
],
[
60683,
60685
]
],
[
[
231,
236
],
[
13064,
13069
],
[
41602,
41607
]
],
[
[
238,
241
],
[
6832,
6835
],
[
7074,
7077
],
[
14492,
14495
],
[
14722,
14725
],
[
20873,
20876
],
[
21344,
21347
],
[
27696,
27699
],
[
28267,
28270
]
],
[
[
243,
246
],
[
31797,
31800
],
[
33717,
33720
],
[
36435,
36438
],
[
51595,
51598
],
[
56195,
56198
]
],
[
[
248,
251
],
[
6999,
7002
],
[
7011,
7014
],
[
14543,
14546
],
[
14553,
14556
],
[
21196,
21199
],
[
21204,
21207
],
[
21211,
21214
],
[
28110,
28113
],
[
28118,
28121
],
[
28152,
28155
],
[
56890,
56893
],
[
56928,
56931
],
[
56955,
56958
],
[
56978,
56981
],
[
57003,
57006
],
[
57031,
57034
],
[
57060,
57063
],
[
57096,
57099
],
[
57122,
57125
],
[
57163,
57166
],
[
57190,
57193
],
[
57224,
57227
],
[
57284,
57287
],
[
57315,
57318
],
[
57348,
57351
],
[
57359,
57362
],
[
57378,
57381
],
[
57408,
57411
],
[
57430,
57433
],
[
57461,
57464
],
[
57739,
57742
],
[
57787,
57790
],
[
57849,
57852
],
[
57892,
57895
],
[
57927,
57930
],
[
57966,
57969
],
[
58008,
58011
],
[
58036,
58039
],
[
58055,
58058
],
[
58091,
58094
],
[
58127,
58130
],
[
58175,
58178
],
[
58306,
58309
],
[
58425,
58428
],
[
58670,
58673
],
[
58695,
58698
],
[
58739,
58742
],
[
58788,
58791
],
[
58839,
58842
],
[
58906,
58909
],
[
58969,
58972
],
[
59012,
59015
],
[
59083,
59086
],
[
59106,
59109
],
[
59124,
59127
],
[
59148,
59151
],
[
59166,
59169
],
[
59236,
59239
],
[
59306,
59309
],
[
59383,
59386
],
[
59472,
59475
],
[
59497,
59500
],
[
61862,
61865
],
[
63522,
63525
],
[
64922,
64925
],
[
64930,
64933
],
[
65672,
65675
],
[
66583,
66586
],
[
68231,
68234
],
[
68570,
68573
],
[
68733,
68736
],
[
68751,
68754
],
[
68770,
68773
],
[
68788,
68791
],
[
68807,
68810
],
[
68824,
68827
],
[
68843,
68846
],
[
68860,
68863
],
[
69064,
69067
],
[
69099,
69102
],
[
69148,
69151
],
[
69197,
69200
],
[
69216,
69219
],
[
69251,
69254
],
[
69287,
69290
],
[
69335,
69338
],
[
49747,
49750
],
[
59543,
59546
]
],
[
[
253,
256
],
[
6890,
6893
],
[
6900,
6903
],
[
14650,
14653
],
[
14660,
14663
],
[
21267,
21270
],
[
21275,
21278
],
[
21306,
21309
],
[
28185,
28188
],
[
28193,
28196
],
[
28202,
28205
],
[
59398,
59401
],
[
59406,
59409
],
[
59696,
59699
],
[
59836,
59839
],
[
59874,
59877
],
[
59899,
59902
],
[
59925,
59928
],
[
59953,
59956
],
[
59979,
59982
],
[
60007,
60010
],
[
60033,
60036
],
[
60069,
60072
],
[
60108,
60111
],
[
60135,
60138
],
[
60169,
60172
],
[
60228,
60231
],
[
60260,
60263
],
[
60294,
60297
],
[
60306,
60309
],
[
60325,
60328
],
[
60355,
60358
],
[
60377,
60380
],
[
60408,
60411
],
[
60708,
60711
],
[
60756,
60759
],
[
60818,
60821
],
[
60861,
60864
],
[
60896,
60899
],
[
60935,
60938
],
[
60977,
60980
],
[
61005,
61008
],
[
61024,
61027
],
[
61061,
61064
],
[
61097,
61100
],
[
61144,
61147
],
[
61271,
61274
],
[
61371,
61374
],
[
61397,
61400
],
[
61416,
61419
],
[
61457,
61460
],
[
61507,
61510
],
[
63370,
63373
],
[
63405,
63408
],
[
63440,
63443
],
[
63487,
63490
],
[
63582,
63585
],
[
64523,
64526
],
[
64546,
64549
],
[
64564,
64567
],
[
64588,
64591
],
[
64607,
64610
],
[
64668,
64671
],
[
64711,
64714
],
[
64776,
64779
],
[
64841,
64844
],
[
64907,
64910
],
[
64991,
64994
],
[
65707,
65710
],
[
66617,
66620
],
[
68329,
68332
],
[
68466,
68469
],
[
68880,
68883
],
[
68898,
68901
],
[
68917,
68920
],
[
68936,
68939
],
[
68955,
68958
],
[
68972,
68975
],
[
68991,
68994
],
[
69007,
69010
],
[
69047,
69050
],
[
69082,
69085
],
[
69117,
69120
],
[
69166,
69169
],
[
69233,
69236
],
[
69269,
69272
],
[
69317,
69320
],
[
69366,
69369
],
[
49694,
49697
],
[
61588,
61591
]
],
[
[
258,
261
],
[
5932,
5935
],
[
5942,
5945
],
[
5956,
5959
],
[
6713,
6716
],
[
6789,
6792
],
[
8677,
8680
],
[
8689,
8692
],
[
9127,
9130
],
[
9142,
9145
],
[
9160,
9163
],
[
9177,
9180
],
[
9476,
9479
],
[
9524,
9527
],
[
9572,
9575
],
[
9590,
9593
],
[
9625,
9628
],
[
9673,
9676
],
[
9810,
9813
],
[
9828,
9831
],
[
13142,
13145
],
[
13584,
13587
],
[
13598,
13601
],
[
13616,
13619
],
[
14373,
14376
],
[
14449,
14452
],
[
19993,
19996
],
[
20003,
20006
],
[
20754,
20757
],
[
20830,
20833
],
[
22801,
22804
],
[
22829,
22832
],
[
22866,
22869
],
[
22893,
22896
],
[
22920,
22923
],
[
22948,
22951
],
[
22973,
22976
],
[
23002,
23005
],
[
23031,
23034
],
[
23064,
23067
],
[
23110,
23113
],
[
23156,
23159
],
[
23206,
23209
],
[
23262,
23265
],
[
23298,
23301
],
[
23334,
23337
],
[
23370,
23373
],
[
23405,
23408
],
[
23416,
23419
],
[
23429,
23432
],
[
23450,
23453
],
[
23462,
23465
],
[
23476,
23479
],
[
23499,
23502
],
[
23525,
23528
],
[
23552,
23555
],
[
23591,
23594
],
[
23631,
23634
],
[
23665,
23668
],
[
23714,
23717
],
[
23744,
23747
],
[
23783,
23786
],
[
23827,
23830
],
[
23872,
23875
],
[
23904,
23907
],
[
23938,
23941
],
[
23983,
23986
],
[
24030,
24033
],
[
24066,
24069
],
[
24116,
24119
],
[
24165,
24168
],
[
24216,
24219
],
[
24253,
24256
],
[
24304,
24307
],
[
24354,
24357
],
[
24407,
24410
],
[
24474,
24477
],
[
24555,
24558
],
[
24636,
24639
],
[
24717,
24720
],
[
24799,
24802
],
[
24880,
24883
],
[
24961,
24964
],
[
25044,
25047
],
[
25078,
25081
],
[
25119,
25122
],
[
25153,
25156
],
[
25197,
25200
],
[
25236,
25239
],
[
25278,
25281
],
[
25305,
25308
],
[
25339,
25342
],
[
25367,
25370
],
[
25401,
25404
],
[
25429,
25432
],
[
25464,
25467
],
[
25492,
25495
],
[
25527,
25530
],
[
25557,
25560
],
[
25593,
25596
],
[
25629,
25632
],
[
25701,
25704
],
[
25738,
25741
],
[
25928,
25931
],
[
26183,
26186
],
[
26302,
26305
],
[
26421,
26424
],
[
26522,
26525
],
[
26595,
26598
],
[
26638,
26641
],
[
26684,
26687
],
[
26801,
26804
],
[
26877,
26880
],
[
26936,
26939
],
[
27007,
27010
],
[
27050,
27053
],
[
27103,
27106
],
[
27155,
27158
],
[
27208,
27211
],
[
27260,
27263
],
[
27313,
27316
],
[
27365,
27368
],
[
27418,
27421
],
[
27470,
27473
],
[
27522,
27525
],
[
27573,
27576
],
[
27625,
27628
],
[
27676,
27679
],
[
27748,
27751
],
[
27882,
27885
],
[
27996,
27999
],
[
28023,
28026
],
[
28045,
28048
],
[
28073,
28076
],
[
28095,
28098
],
[
28170,
28173
],
[
28247,
28250
],
[
28275,
28278
],
[
28298,
28301
],
[
28509,
28512
],
[
28521,
28524
],
[
28548,
28551
],
[
28569,
28572
],
[
28587,
28590
],
[
28623,
28626
],
[
28690,
28693
],
[
28724,
28727
],
[
28731,
28734
],
[
28744,
28747
],
[
28753,
28756
],
[
28782,
28785
],
[
28817,
28820
],
[
28824,
28827
],
[
28837,
28840
],
[
28846,
28849
],
[
28875,
28878
],
[
28922,
28925
],
[
28929,
28932
],
[
28936,
28939
],
[
28945,
28948
],
[
28954,
28957
],
[
28963,
28966
],
[
28986,
28989
],
[
28993,
28996
],
[
29002,
29005
],
[
29009,
29012
],
[
29018,
29021
],
[
29025,
29028
],
[
29054,
29057
],
[
29086,
29089
],
[
29100,
29103
],
[
29111,
29114
],
[
29152,
29155
],
[
29187,
29190
],
[
29199,
29202
],
[
29247,
29250
],
[
29282,
29285
],
[
29294,
29297
],
[
29344,
29347
],
[
29386,
29389
],
[
29398,
29401
],
[
29475,
29478
],
[
29536,
29539
],
[
29605,
29608
],
[
29653,
29656
],
[
29666,
29669
],
[
29714,
29717
],
[
29733,
29736
],
[
36681,
36684
],
[
36705,
36708
],
[
42434,
42437
],
[
42493,
42496
],
[
44148,
44151
],
[
44778,
44781
],
[
44788,
44791
],
[
44802,
44805
],
[
44841,
44844
],
[
44855,
44858
],
[
44873,
44876
],
[
44912,
44915
],
[
44922,
44925
],
[
44940,
44943
],
[
44963,
44966
],
[
45151,
45154
],
[
45416,
45419
],
[
46711,
46714
],
[
46842,
46845
],
[
46963,
46966
],
[
47102,
47105
],
[
47261,
47264
],
[
47278,
47281
],
[
47422,
47425
],
[
47537,
47540
],
[
47658,
47661
],
[
47781,
47784
],
[
47940,
47943
],
[
47957,
47960
],
[
48067,
48070
],
[
48183,
48186
],
[
48304,
48307
],
[
48428,
48431
],
[
48587,
48590
],
[
48604,
48607
],
[
48771,
48774
],
[
48786,
48789
],
[
48924,
48927
],
[
48939,
48942
],
[
49089,
49092
],
[
49106,
49109
],
[
49258,
49261
],
[
49275,
49278
],
[
49427,
49430
],
[
49444,
49447
],
[
49634,
49637
],
[
49905,
49908
],
[
50353,
50356
],
[
50667,
50670
],
[
50702,
50705
],
[
56378,
56381
],
[
56597,
56600
],
[
56660,
56663
],
[
56744,
56747
],
[
59181,
59184
],
[
59190,
59193
],
[
59208,
59211
],
[
61390,
61393
],
[
64791,
64794
],
[
64800,
64803
],
[
64820,
64823
],
[
65639,
65642
],
[
65777,
65780
],
[
66651,
66654
],
[
67841,
67844
],
[
67926,
67929
],
[
67982,
67985
],
[
68042,
68045
],
[
68053,
68056
],
[
68184,
68187
],
[
29786,
29789
]
],
[
[
263,
267
],
[
41112,
41116
],
[
41164,
41168
],
[
41225,
41229
],
[
41300,
41304
]
],
[
[
269,
277
],
[
41216,
41224
],
[
41291,
41299
]
],
[
[
279,
288
],
[
39667,
39676
]
],
[
[
290,
293
],
[
39821,
39824
],
[
39873,
39876
],
[
39924,
39927
],
[
39998,
40001
],
[
40074,
40077
],
[
40245,
40248
],
[
40343,
40346
]
],
[
[
303,
312
],
[
34572,
34581
],
[
41040,
41049
],
[
41072,
41081
],
[
41086,
41095
],
[
57434,
57443
],
[
60381,
60390
]
],
[
[
314,
320
],
[
58168,
58174
],
[
58294,
58300
],
[
58413,
58419
],
[
61137,
61143
],
[
61264,
61270
]
],
[
[
322,
331
],
[
1362,
1371
],
[
9922,
9931
],
[
16287,
16296
],
[
22842,
22851
],
[
30852,
30861
],
[
33244,
33253
],
[
35731,
35740
],
[
38680,
38689
],
[
41781,
41790
],
[
56903,
56912
],
[
59849,
59858
]
],
[
[
333,
337
],
[
1979,
1983
],
[
10390,
10394
],
[
16604,
16608
],
[
23210,
23214
],
[
32941,
32945
],
[
32951,
32955
],
[
35455,
35459
],
[
35464,
35468
],
[
38237,
38241
],
[
38257,
38261
],
[
42944,
42948
],
[
42969,
42973
],
[
57382,
57386
],
[
61666,
61670
],
[
61692,
61696
],
[
61720,
61724
],
[
61744,
61748
],
[
61770,
61774
],
[
61798,
61802
],
[
61827,
61831
],
[
61857,
61861
],
[
61919,
61923
],
[
61969,
61973
],
[
62004,
62008
],
[
62049,
62053
],
[
62108,
62112
],
[
62169,
62173
],
[
62231,
62235
],
[
62290,
62294
],
[
62346,
62350
],
[
62395,
62399
],
[
62467,
62471
],
[
62521,
62525
],
[
62567,
62571
],
[
62652,
62656
],
[
62737,
62741
],
[
62873,
62877
],
[
62963,
62967
],
[
63007,
63011
],
[
63042,
63046
],
[
63122,
63126
],
[
64404,
64408
],
[
64414,
64418
],
[
66756,
66760
],
[
66791,
66795
],
[
68226,
68230
],
[
68324,
68328
],
[
62817,
62821
]
],
[
[
339,
343
],
[
1946,
1950
],
[
10344,
10348
],
[
16548,
16552
],
[
23160,
23164
],
[
32995,
32999
],
[
33004,
33008
],
[
35501,
35505
],
[
35511,
35515
],
[
38307,
38311
],
[
38318,
38322
],
[
43024,
43028
],
[
43037,
43041
],
[
60329,
60333
],
[
62753,
62757
],
[
62763,
62767
],
[
63177,
63181
],
[
63205,
63209
],
[
63232,
63236
],
[
63261,
63265
],
[
63286,
63290
],
[
63312,
63316
],
[
63338,
63342
],
[
63365,
63369
],
[
63400,
63404
],
[
63435,
63439
],
[
63481,
63485
],
[
63516,
63520
],
[
63542,
63546
],
[
63577,
63581
],
[
63627,
63631
],
[
63674,
63678
],
[
63723,
63727
],
[
63752,
63756
],
[
63799,
63803
],
[
63857,
63861
],
[
63895,
63899
],
[
63954,
63958
],
[
64011,
64015
],
[
64060,
64064
],
[
64126,
64130
],
[
64172,
64176
],
[
64226,
64230
],
[
64306,
64310
],
[
64388,
64392
],
[
66896,
66900
],
[
66931,
66935
],
[
68461,
68465
],
[
68565,
68569
],
[
64468,
64472
]
],
[
[
345,
348
],
[
52364,
52367
]
],
[
[
350,
354
],
[
29844,
29848
],
[
29853,
29857
],
[
29902,
29906
],
[
29930,
29934
],
[
29968,
29972
],
[
30014,
30018
],
[
30046,
30050
],
[
30139,
30143
],
[
30171,
30175
],
[
30204,
30208
],
[
30236,
30240
],
[
30269,
30273
],
[
30318,
30322
],
[
30369,
30373
],
[
30381,
30385
],
[
30401,
30405
],
[
30492,
30496
],
[
30515,
30519
],
[
30557,
30561
],
[
30605,
30609
],
[
30669,
30673
],
[
30712,
30716
],
[
66340,
66344
],
[
66380,
66384
]
],
[
[
356,
358
],
[
30685,
30687
],
[
30692,
30694
]
],
[
[
368,
379
],
[
1452,
1463
],
[
1503,
1514
],
[
1670,
1681
],
[
1715,
1726
],
[
7803,
7814
],
[
7828,
7839
],
[
7862,
7873
],
[
7885,
7896
],
[
7919,
7930
],
[
7943,
7954
],
[
7977,
7988
],
[
8004,
8015
],
[
8038,
8049
],
[
8078,
8089
],
[
8111,
8122
],
[
8169,
8180
],
[
8226,
8237
],
[
8258,
8269
],
[
8311,
8322
],
[
8369,
8380
],
[
9985,
9996
],
[
10036,
10047
],
[
15343,
15354
],
[
15368,
15379
],
[
15402,
15413
],
[
15425,
15436
],
[
15459,
15470
],
[
15483,
15494
],
[
15517,
15528
],
[
15544,
15555
],
[
15578,
15589
],
[
15611,
15622
],
[
15655,
15666
],
[
15713,
15724
],
[
15770,
15781
],
[
15828,
15839
],
[
15907,
15918
],
[
15939,
15950
],
[
16189,
16200
],
[
16242,
16253
],
[
22437,
22448
],
[
22462,
22473
],
[
22498,
22509
],
[
22543,
22554
],
[
22579,
22590
],
[
22611,
22622
],
[
29479,
29490
],
[
29504,
29515
],
[
29540,
29551
],
[
29573,
29584
],
[
29609,
29620
],
[
29641,
29652
],
[
35826,
35837
]
],
[
[
381,
389
],
[
4880,
4888
],
[
4959,
4967
]
],
[
[
391,
399
],
[
4908,
4916
]
],
[
[
401,
407
],
[
4917,
4923
]
],
[
[
409,
416
],
[
65004,
65011
],
[
65074,
65081
],
[
65125,
65132
]
],
[
[
455,
460
],
[
4995,
5000
],
[
5038,
5043
],
[
5300,
5305
],
[
12864,
12869
],
[
12907,
12912
],
[
31756,
31761
],
[
33692,
33697
],
[
36394,
36399
],
[
51536,
51541
],
[
56136,
56141
]
],
[
[
489,
498
],
[
32371,
32380
],
[
36665,
36674
],
[
42418,
42427
],
[
42453,
42462
],
[
63471,
63480
],
[
63506,
63515
]
],
[
[
531,
549
],
[
8504,
8522
],
[
16049,
16067
],
[
22727,
22745
],
[
29758,
29776
],
[
33097,
33115
],
[
35612,
35630
],
[
38588,
38606
],
[
41417,
41435
],
[
43316,
43334
],
[
59515,
59533
],
[
61560,
61578
],
[
62789,
62807
],
[
64440,
64458
]
],
[
[
584,
586
],
[
30458,
30460
],
[
30757,
30759
],
[
40538,
40540
],
[
40673,
40675
],
[
40919,
40921
],
[
40989,
40991
]
],
[
[
588,
590
],
[
66256,
66258
],
[
66323,
66325
]
],
[
[
640,
649
],
[
30419,
30428
],
[
30736,
30745
],
[
40463,
40472
],
[
40646,
40655
],
[
40785,
40794
],
[
40860,
40869
],
[
66242,
66251
],
[
66309,
66318
]
],
[
[
681,
688
],
[
4872,
4879
],
[
4900,
4907
]
],
[
[
724,
729
],
[
26024,
26029
],
[
50716,
50721
]
],
[
[
731,
735
],
[
50890,
50894
],
[
55964,
55968
]
],
[
[
737,
743
],
[
8497,
8503
],
[
16042,
16048
],
[
22720,
22726
],
[
29751,
29757
],
[
33090,
33096
],
[
35605,
35611
],
[
38581,
38587
],
[
41410,
41416
],
[
43309,
43315
],
[
49485,
49491
],
[
49538,
49544
],
[
49663,
49669
],
[
49716,
49722
],
[
59508,
59514
],
[
61553,
61559
],
[
62782,
62788
],
[
64433,
64439
],
[
66005,
66011
]
],
[
[
746,
747
],
[
5679,
5680
],
[
5689,
5690
],
[
5711,
5712
],
[
5715,
5716
],
[
5724,
5725
],
[
5735,
5736
],
[
5749,
5750
],
[
5796,
5797
],
[
5825,
5826
],
[
5837,
5838
],
[
5858,
5859
],
[
5883,
5884
],
[
5897,
5898
],
[
5921,
5922
],
[
5946,
5947
],
[
5960,
5961
],
[
5989,
5990
],
[
6020,
6021
],
[
6037,
6038
],
[
6058,
6059
],
[
6094,
6095
],
[
6125,
6126
],
[
6142,
6143
],
[
6163,
6164
],
[
6199,
6200
],
[
6230,
6231
],
[
6247,
6248
],
[
6268,
6269
],
[
6304,
6305
],
[
6335,
6336
],
[
6352,
6353
],
[
6373,
6374
],
[
6408,
6409
],
[
6439,
6440
],
[
6456,
6457
],
[
6477,
6478
],
[
6511,
6512
],
[
6542,
6543
],
[
6559,
6560
],
[
6580,
6581
],
[
6614,
6615
],
[
6645,
6646
],
[
6662,
6663
],
[
6683,
6684
],
[
6717,
6718
],
[
6748,
6749
],
[
6765,
6766
],
[
6786,
6787
],
[
6820,
6821
],
[
6842,
6843
],
[
6856,
6857
],
[
6879,
6880
],
[
6904,
6905
],
[
6922,
6923
],
[
6945,
6946
],
[
6988,
6989
],
[
7015,
7016
],
[
7062,
7063
],
[
7090,
7091
],
[
7235,
7236
],
[
7267,
7268
],
[
7283,
7284
],
[
7308,
7309
],
[
7340,
7341
],
[
7356,
7357
],
[
7385,
7386
],
[
7413,
7414
],
[
7429,
7430
],
[
7456,
7457
],
[
7486,
7487
],
[
7493,
7494
],
[
7513,
7514
],
[
7544,
7545
],
[
7558,
7559
],
[
7578,
7579
],
[
7609,
7610
],
[
7619,
7620
],
[
7630,
7631
],
[
7637,
7638
],
[
8472,
8473
],
[
8490,
8491
],
[
8593,
8594
],
[
8604,
8605
],
[
8623,
8624
],
[
8633,
8634
],
[
8652,
8653
],
[
8663,
8664
],
[
8682,
8683
],
[
8693,
8694
],
[
8711,
8712
],
[
8727,
8728
],
[
8745,
8746
],
[
8762,
8763
],
[
8780,
8781
],
[
8798,
8799
],
[
8816,
8817
],
[
8833,
8834
],
[
8851,
8852
],
[
8869,
8870
],
[
8887,
8888
],
[
8903,
8904
],
[
8921,
8922
],
[
8938,
8939
],
[
8956,
8957
],
[
8974,
8975
],
[
8992,
8993
],
[
9009,
9010
],
[
9027,
9028
],
[
9045,
9046
],
[
9063,
9064
],
[
9078,
9079
],
[
9096,
9097
],
[
9113,
9114
],
[
9131,
9132
],
[
9146,
9147
],
[
9164,
9165
],
[
9181,
9182
],
[
9206,
9207
],
[
9216,
9217
],
[
9254,
9255
],
[
9265,
9266
],
[
9303,
9304
],
[
9313,
9314
],
[
9338,
9339
],
[
9348,
9349
],
[
9386,
9387
],
[
9397,
9398
],
[
9435,
9436
],
[
9445,
9446
],
[
9470,
9471
],
[
9480,
9481
],
[
9518,
9519
],
[
9528,
9529
],
[
9566,
9567
],
[
9576,
9577
],
[
9601,
9602
],
[
9611,
9612
],
[
9649,
9650
],
[
9659,
9660
],
[
9697,
9698
],
[
9707,
9708
],
[
9732,
9733
],
[
9742,
9743
],
[
9767,
9768
],
[
9778,
9779
],
[
9803,
9804
],
[
9814,
9815
],
[
9839,
9840
],
[
9850,
9851
],
[
13316,
13317
],
[
13326,
13327
],
[
13352,
13353
],
[
13361,
13362
],
[
13371,
13372
],
[
13382,
13383
],
[
13397,
13398
],
[
13444,
13445
],
[
13469,
13470
],
[
13483,
13484
],
[
13503,
13504
],
[
13531,
13532
],
[
13549,
13550
],
[
13573,
13574
],
[
13602,
13603
],
[
13620,
13621
],
[
13649,
13650
],
[
13680,
13681
],
[
13697,
13698
],
[
13718,
13719
],
[
13754,
13755
],
[
13785,
13786
],
[
13802,
13803
],
[
13823,
13824
],
[
13859,
13860
],
[
13890,
13891
],
[
13907,
13908
],
[
13928,
13929
],
[
13964,
13965
],
[
13995,
13996
],
[
14012,
14013
],
[
14033,
14034
],
[
14068,
14069
],
[
14099,
14100
],
[
14116,
14117
],
[
14137,
14138
],
[
14171,
14172
],
[
14202,
14203
],
[
14219,
14220
],
[
14240,
14241
],
[
14274,
14275
],
[
14305,
14306
],
[
14322,
14323
],
[
14343,
14344
],
[
14377,
14378
],
[
14408,
14409
],
[
14425,
14426
],
[
14446,
14447
],
[
14480,
14481
],
[
14500,
14501
],
[
14509,
14510
],
[
14532,
14533
],
[
14557,
14558
],
[
14575,
14576
],
[
14598,
14599
],
[
14639,
14640
],
[
14665,
14666
],
[
14710,
14711
],
[
14738,
14739
],
[
14785,
14786
],
[
14817,
14818
],
[
14833,
14834
],
[
14858,
14859
],
[
14890,
14891
],
[
14906,
14907
],
[
14935,
14936
],
[
14963,
14964
],
[
14979,
14980
],
[
15006,
15007
],
[
15036,
15037
],
[
15063,
15064
],
[
15093,
15094
],
[
15107,
15108
],
[
15127,
15128
],
[
15157,
15158
],
[
15171,
15172
],
[
16016,
16017
],
[
16035,
16036
],
[
16399,
16400
],
[
16406,
16407
],
[
16428,
16429
],
[
16435,
16436
],
[
16448,
16449
],
[
16474,
16475
],
[
16490,
16491
],
[
16498,
16499
],
[
16520,
16521
],
[
16531,
16532
],
[
16553,
16554
],
[
16578,
16579
],
[
16586,
16587
],
[
16609,
16610
],
[
16629,
16630
],
[
16637,
16638
],
[
16663,
16664
],
[
16672,
16673
],
[
18891,
18892
],
[
18904,
18905
],
[
19615,
19616
],
[
19625,
19626
],
[
19647,
19648
],
[
19651,
19652
],
[
19662,
19663
],
[
19675,
19676
],
[
19688,
19689
],
[
19748,
19749
],
[
19758,
19759
],
[
19778,
19779
],
[
19854,
19855
],
[
19879,
19880
],
[
19891,
19892
],
[
19909,
19910
],
[
19932,
19933
],
[
19964,
19965
],
[
19982,
19983
],
[
20007,
20008
],
[
20030,
20031
],
[
20061,
20062
],
[
20078,
20079
],
[
20099,
20100
],
[
20135,
20136
],
[
20166,
20167
],
[
20183,
20184
],
[
20204,
20205
],
[
20240,
20241
],
[
20271,
20272
],
[
20288,
20289
],
[
20309,
20310
],
[
20345,
20346
],
[
20376,
20377
],
[
20393,
20394
],
[
20414,
20415
],
[
20449,
20450
],
[
20480,
20481
],
[
20497,
20498
],
[
20518,
20519
],
[
20552,
20553
],
[
20583,
20584
],
[
20600,
20601
],
[
20621,
20622
],
[
20655,
20656
],
[
20686,
20687
],
[
20703,
20704
],
[
20724,
20725
],
[
20758,
20759
],
[
20789,
20790
],
[
20806,
20807
],
[
20827,
20828
],
[
20861,
20862
],
[
20884,
20885
],
[
20892,
20893
],
[
20899,
20900
],
[
20907,
20908
],
[
21185,
21186
],
[
21208,
21209
],
[
21215,
21216
],
[
21256,
21257
],
[
21280,
21281
],
[
21310,
21311
],
[
21332,
21333
],
[
21360,
21361
],
[
21547,
21548
],
[
21559,
21560
],
[
21586,
21587
],
[
21594,
21595
],
[
21625,
21626
],
[
21633,
21634
],
[
21666,
21667
],
[
21674,
21675
],
[
21748,
21749
],
[
21782,
21783
],
[
21804,
21805
],
[
21840,
21841
],
[
21874,
21875
],
[
21896,
21897
],
[
21932,
21933
],
[
21979,
21980
],
[
22006,
22007
],
[
22042,
22043
],
[
22058,
22059
],
[
22117,
22118
],
[
22162,
22163
],
[
22213,
22214
],
[
22258,
22259
],
[
22310,
22311
],
[
22362,
22363
],
[
22688,
22689
],
[
22706,
22707
],
[
23011,
23012
],
[
23018,
23019
],
[
23040,
23041
],
[
23051,
23052
],
[
23073,
23074
],
[
23089,
23090
],
[
23097,
23098
],
[
23119,
23120
],
[
23126,
23127
],
[
23139,
23140
],
[
23165,
23166
],
[
23185,
23186
],
[
23193,
23194
],
[
23215,
23216
],
[
23240,
23241
],
[
23248,
23249
],
[
23275,
23276
],
[
23282,
23283
],
[
25048,
25049
],
[
25062,
25063
],
[
25597,
25598
],
[
25742,
25743
],
[
25750,
25751
],
[
26306,
26307
],
[
26316,
26317
],
[
26340,
26341
],
[
26344,
26345
],
[
26350,
26351
],
[
26362,
26363
],
[
26373,
26374
],
[
26387,
26388
],
[
26425,
26426
],
[
26432,
26433
],
[
26445,
26446
],
[
26467,
26468
],
[
26479,
26480
],
[
26486,
26487
],
[
26498,
26499
],
[
26508,
26509
],
[
26532,
26533
],
[
26543,
26544
],
[
26562,
26563
],
[
26570,
26571
],
[
26578,
26579
],
[
26599,
26600
],
[
26617,
26618
],
[
26625,
26626
],
[
26642,
26643
],
[
26660,
26661
],
[
26688,
26689
],
[
26706,
26707
],
[
26713,
26714
],
[
26775,
26776
],
[
26785,
26786
],
[
26805,
26806
],
[
26881,
26882
],
[
26906,
26907
],
[
26917,
26918
],
[
26940,
26941
],
[
26963,
26964
],
[
26970,
26971
],
[
27011,
27012
],
[
27036,
27037
],
[
27059,
27060
],
[
27090,
27091
],
[
27107,
27108
],
[
27128,
27129
],
[
27164,
27165
],
[
27195,
27196
],
[
27212,
27213
],
[
27233,
27234
],
[
27269,
27270
],
[
27300,
27301
],
[
27317,
27318
],
[
27338,
27339
],
[
27374,
27375
],
[
27405,
27406
],
[
27422,
27423
],
[
27443,
27444
],
[
27478,
27479
],
[
27509,
27510
],
[
27526,
27527
],
[
27547,
27548
],
[
27581,
27582
],
[
27612,
27613
],
[
27629,
27630
],
[
27650,
27651
],
[
27684,
27685
],
[
27708,
27709
],
[
27716,
27717
],
[
27723,
27724
],
[
27731,
27732
],
[
28099,
28100
],
[
28122,
28123
],
[
28156,
28157
],
[
28174,
28175
],
[
28197,
28198
],
[
28208,
28209
],
[
28255,
28256
],
[
28283,
28284
],
[
28513,
28514
],
[
28525,
28526
],
[
28552,
28553
],
[
28560,
28561
],
[
28591,
28592
],
[
28599,
28600
],
[
28627,
28628
],
[
28635,
28636
],
[
28694,
28695
],
[
28728,
28729
],
[
28748,
28749
],
[
28786,
28787
],
[
28821,
28822
],
[
28841,
28842
],
[
28879,
28880
],
[
28926,
28927
],
[
28949,
28950
],
[
28990,
28991
],
[
29006,
29007
],
[
29060,
29061
],
[
29090,
29091
],
[
29104,
29105
],
[
29115,
29116
],
[
29158,
29159
],
[
29203,
29204
],
[
29253,
29254
],
[
29298,
29299
],
[
29350,
29351
],
[
29402,
29403
],
[
29718,
29719
],
[
29737,
29738
],
[
29849,
29850
],
[
30375,
30376
],
[
30386,
30387
],
[
30406,
30407
],
[
30431,
30432
],
[
30437,
30438
],
[
30446,
30447
],
[
30452,
30453
],
[
30461,
30462
],
[
30497,
30498
],
[
30505,
30506
],
[
30520,
30521
],
[
30541,
30542
],
[
30562,
30563
],
[
30577,
30578
],
[
30610,
30611
],
[
30629,
30630
],
[
30638,
30639
],
[
30651,
30652
],
[
30674,
30675
],
[
30698,
30699
],
[
30717,
30718
],
[
30751,
30752
],
[
30754,
30755
],
[
30760,
30761
],
[
31881,
31882
],
[
31889,
31890
],
[
31906,
31907
],
[
32391,
32392
],
[
32437,
32438
],
[
32447,
32448
],
[
32469,
32470
],
[
32473,
32474
],
[
32484,
32485
],
[
32496,
32497
],
[
32509,
32510
],
[
32529,
32530
],
[
32547,
32548
],
[
32569,
32570
],
[
32593,
32594
],
[
32611,
32612
],
[
32626,
32627
],
[
32678,
32679
],
[
32706,
32707
],
[
32719,
32720
],
[
32742,
32743
],
[
32769,
32770
],
[
32785,
32786
],
[
32809,
32810
],
[
32843,
32844
],
[
32862,
32863
],
[
32896,
32897
],
[
32911,
32912
],
[
32930,
32931
],
[
32958,
32959
],
[
32984,
32985
],
[
33011,
33012
],
[
33055,
33056
],
[
33080,
33081
],
[
33849,
33850
],
[
33857,
33858
],
[
33875,
33876
],
[
34628,
34629
],
[
34638,
34639
],
[
34667,
34668
],
[
34671,
34672
],
[
34682,
34683
],
[
34694,
34695
],
[
34707,
34708
],
[
34729,
34730
],
[
34739,
34740
],
[
34763,
34764
],
[
34773,
34774
],
[
34796,
34797
],
[
34814,
34815
],
[
34837,
34838
],
[
34861,
34862
],
[
34879,
34880
],
[
34895,
34896
],
[
34920,
34921
],
[
34938,
34939
],
[
34965,
34966
],
[
34983,
34984
],
[
35039,
35040
],
[
35073,
35074
],
[
35086,
35087
],
[
35109,
35110
],
[
35156,
35157
],
[
35162,
35163
],
[
35179,
35180
],
[
35188,
35189
],
[
35315,
35316
],
[
35349,
35350
],
[
35368,
35369
],
[
35403,
35404
],
[
35418,
35419
],
[
35444,
35445
],
[
35471,
35472
],
[
35490,
35491
],
[
35518,
35519
],
[
35569,
35570
],
[
35595,
35596
],
[
36553,
36554
],
[
36561,
36562
],
[
36574,
36575
],
[
36685,
36686
],
[
36820,
36821
],
[
37959,
37960
],
[
37991,
37992
],
[
38004,
38005
],
[
38026,
38027
],
[
38060,
38061
],
[
38084,
38085
],
[
38090,
38091
],
[
38108,
38109
],
[
38133,
38134
],
[
38151,
38152
],
[
38162,
38163
],
[
38180,
38181
],
[
38207,
38208
],
[
38226,
38227
],
[
38251,
38252
],
[
38267,
38268
],
[
38278,
38279
],
[
38296,
38297
],
[
38328,
38329
],
[
38352,
38353
],
[
38358,
38359
],
[
38399,
38400
],
[
38427,
38428
],
[
38473,
38474
],
[
38501,
38502
],
[
38550,
38551
],
[
38567,
38568
],
[
42048,
42049
],
[
42056,
42057
],
[
42070,
42071
],
[
42212,
42213
],
[
42438,
42439
],
[
42473,
42474
],
[
42638,
42639
],
[
42670,
42671
],
[
42683,
42684
],
[
42705,
42706
],
[
42725,
42726
],
[
42740,
42741
],
[
42752,
42753
],
[
42776,
42777
],
[
42801,
42802
],
[
42821,
42822
],
[
42828,
42829
],
[
42848,
42849
],
[
42860,
42861
],
[
42887,
42888
],
[
42914,
42915
],
[
42933,
42934
],
[
42953,
42954
],
[
42960,
42961
],
[
42980,
42981
],
[
42990,
42991
],
[
43013,
43014
],
[
43033,
43034
],
[
43048,
43049
],
[
43058,
43059
],
[
43078,
43079
],
[
43126,
43127
],
[
43154,
43155
],
[
43200,
43201
],
[
43228,
43229
],
[
43277,
43278
],
[
43295,
43296
],
[
43402,
43403
],
[
43414,
43415
],
[
43469,
43470
],
[
43479,
43480
],
[
43502,
43503
],
[
43512,
43513
],
[
43537,
43538
],
[
43547,
43548
],
[
43570,
43571
],
[
43581,
43582
],
[
44217,
44218
],
[
44223,
44224
],
[
44412,
44413
],
[
44431,
44432
],
[
44659,
44660
],
[
44679,
44680
],
[
44767,
44768
],
[
44792,
44793
],
[
44806,
44807
],
[
44830,
44831
],
[
44859,
44860
],
[
44877,
44878
],
[
44901,
44902
],
[
44926,
44927
],
[
44944,
44945
],
[
44967,
44968
],
[
45029,
45030
],
[
45048,
45049
],
[
45054,
45055
],
[
45071,
45072
],
[
45090,
45091
],
[
45113,
45114
],
[
45132,
45133
],
[
45138,
45139
],
[
45155,
45156
],
[
45174,
45175
],
[
45182,
45183
],
[
45200,
45201
],
[
45219,
45220
],
[
45225,
45226
],
[
45243,
45244
],
[
45262,
45263
],
[
45268,
45269
],
[
45286,
45287
],
[
45305,
45306
],
[
45311,
45312
],
[
45329,
45330
],
[
45348,
45349
],
[
45354,
45355
],
[
45468,
45469
],
[
45545,
45546
],
[
45608,
45609
],
[
45614,
45615
],
[
45626,
45627
],
[
45636,
45637
],
[
45642,
45643
],
[
45655,
45656
],
[
45689,
45690
],
[
45719,
45720
],
[
45767,
45768
],
[
45817,
45818
],
[
45827,
45828
],
[
45847,
45848
],
[
45857,
45858
],
[
45915,
45916
],
[
45927,
45928
],
[
45962,
45963
],
[
45965,
45966
],
[
45975,
45976
],
[
45996,
45997
],
[
45999,
46000
],
[
46009,
46010
],
[
46075,
46076
],
[
46087,
46088
],
[
46122,
46123
],
[
46140,
46141
],
[
46166,
46167
],
[
46176,
46177
],
[
46194,
46195
],
[
48663,
48664
],
[
48681,
48682
],
[
48701,
48702
],
[
48719,
48720
],
[
48739,
48740
],
[
48757,
48758
],
[
48777,
48778
],
[
48795,
48796
],
[
48816,
48817
],
[
48834,
48835
],
[
48854,
48855
],
[
48872,
48873
],
[
48892,
48893
],
[
48910,
48911
],
[
48930,
48931
],
[
48948,
48949
],
[
48971,
48972
],
[
48991,
48992
],
[
49013,
49014
],
[
49033,
49034
],
[
49055,
49056
],
[
49075,
49076
],
[
49097,
49098
],
[
49117,
49118
],
[
49140,
49141
],
[
49160,
49161
],
[
49182,
49183
],
[
49202,
49203
],
[
49224,
49225
],
[
49244,
49245
],
[
49266,
49267
],
[
49286,
49287
],
[
49309,
49310
],
[
49329,
49330
],
[
49351,
49352
],
[
49371,
49372
],
[
49393,
49394
],
[
49413,
49414
],
[
49435,
49436
],
[
49455,
49456
],
[
49602,
49603
],
[
49638,
49639
],
[
49781,
49782
],
[
49817,
49818
],
[
49853,
49854
],
[
49889,
49890
],
[
58910,
58911
],
[
58938,
58939
],
[
58952,
58953
],
[
58973,
58974
],
[
58998,
58999
],
[
59016,
59017
],
[
59040,
59041
],
[
59059,
59060
],
[
59087,
59088
],
[
59110,
59111
],
[
59128,
59129
],
[
59152,
59153
],
[
59240,
59241
],
[
59267,
59268
],
[
59310,
59311
],
[
59334,
59335
],
[
59357,
59358
],
[
59387,
59388
],
[
59411,
59412
],
[
59476,
59477
],
[
59494,
59495
],
[
59501,
59502
],
[
62295,
62296
],
[
62303,
62304
],
[
62312,
62313
],
[
62328,
62329
],
[
62351,
62352
],
[
62370,
62371
],
[
62380,
62381
],
[
62400,
62401
],
[
62436,
62437
],
[
62446,
62447
],
[
62472,
62473
],
[
62500,
62501
],
[
62526,
62527
],
[
62553,
62554
],
[
62572,
62573
],
[
62600,
62601
],
[
62609,
62610
],
[
62633,
62634
],
[
62639,
62640
],
[
62657,
62658
],
[
62685,
62686
],
[
62694,
62695
],
[
62718,
62719
],
[
62724,
62725
],
[
62742,
62743
],
[
62768,
62769
],
[
63012,
63013
],
[
63491,
63492
],
[
63526,
63527
],
[
63959,
63960
],
[
63967,
63968
],
[
63977,
63978
],
[
63993,
63994
],
[
64016,
64017
],
[
64035,
64036
],
[
64045,
64046
],
[
64065,
64066
],
[
64102,
64103
],
[
64112,
64113
],
[
64131,
64132
],
[
64158,
64159
],
[
64177,
64178
],
[
64205,
64206
],
[
64231,
64232
],
[
64263,
64264
],
[
64287,
64288
],
[
64293,
64294
],
[
64311,
64312
],
[
64345,
64346
],
[
64369,
64370
],
[
64375,
64376
],
[
64393,
64394
],
[
64419,
64420
],
[
64527,
64528
],
[
64550,
64551
],
[
64568,
64569
],
[
64592,
64593
],
[
64611,
64612
],
[
64641,
64642
],
[
64653,
64654
],
[
64672,
64673
],
[
64697,
64698
],
[
64715,
64716
],
[
64739,
64740
],
[
64759,
64760
],
[
64780,
64781
],
[
64804,
64805
],
[
64824,
64825
],
[
64845,
64846
],
[
64870,
64871
],
[
64911,
64912
],
[
64935,
64936
],
[
66102,
66103
],
[
66126,
66127
],
[
66144,
66145
],
[
66167,
66168
],
[
8536,
8537
],
[
16081,
16082
],
[
22759,
22760
],
[
29790,
29791
],
[
33130,
33131
],
[
35645,
35646
],
[
38621,
38622
],
[
43349,
43350
],
[
46301,
46302
],
[
46311,
46312
],
[
46336,
46337
],
[
49520,
49521
],
[
49573,
49574
],
[
49698,
49699
],
[
49751,
49752
],
[
59547,
59548
],
[
62822,
62823
],
[
64473,
64474
]
],
[
[
749,
750
],
[
7239,
7240
],
[
7274,
7275
],
[
7290,
7291
],
[
7312,
7313
],
[
7347,
7348
],
[
7363,
7364
],
[
7381,
7382
],
[
7420,
7421
],
[
7436,
7437
],
[
14789,
14790
],
[
14824,
14825
],
[
14840,
14841
],
[
14862,
14863
],
[
14897,
14898
],
[
14913,
14914
],
[
14931,
14932
],
[
14970,
14971
],
[
14986,
14987
],
[
16660,
16661
],
[
16670,
16671
],
[
21563,
21564
],
[
21569,
21570
],
[
21597,
21598
],
[
21607,
21608
],
[
21752,
21753
],
[
21791,
21792
],
[
21811,
21812
],
[
21844,
21845
],
[
21883,
21884
],
[
21903,
21904
],
[
21936,
21937
],
[
21988,
21989
],
[
22013,
22014
],
[
22049,
22050
],
[
22074,
22075
],
[
23272,
23273
],
[
23284,
23285
],
[
28529,
28530
],
[
28535,
28536
],
[
28563,
28564
],
[
28573,
28574
],
[
28698,
28699
],
[
28735,
28736
],
[
28757,
28758
],
[
28790,
28791
],
[
28828,
28829
],
[
28850,
28851
],
[
28883,
28884
],
[
28933,
28934
],
[
28958,
28959
],
[
28997,
28998
],
[
29022,
29023
],
[
44219,
44220
],
[
44227,
44228
],
[
44416,
44417
],
[
44427,
44428
],
[
44663,
44664
],
[
44675,
44676
],
[
45687,
45688
],
[
45699,
45700
],
[
45716,
45717
],
[
45729,
45730
],
[
45786,
45787
],
[
45815,
45816
],
[
45844,
45845
],
[
45892,
45893
],
[
45905,
45906
],
[
45958,
45959
],
[
45992,
45993
],
[
46048,
46049
],
[
46065,
46066
],
[
46119,
46120
],
[
46137,
46138
],
[
46169,
46170
],
[
46184,
46185
],
[
46187,
46188
]
],
[
[
752,
753
],
[
21940,
21941
],
[
21997,
21998
],
[
22020,
22021
],
[
22065,
22066
],
[
22081,
22082
],
[
28887,
28888
],
[
28940,
28941
],
[
28967,
28968
],
[
29013,
29014
],
[
29029,
29030
],
[
34549,
34550
],
[
34582,
34583
],
[
59170,
59171
],
[
59194,
59195
],
[
59212,
59213
]
],
[
[
773,
774
],
[
4383,
4384
],
[
12521,
12522
],
[
19021,
19022
],
[
19055,
19056
],
[
19516,
19517
],
[
19554,
19555
],
[
25123,
25124
],
[
25157,
25158
],
[
25633,
25634
],
[
32003,
32004
],
[
33972,
33973
],
[
36597,
36598
],
[
42093,
42094
],
[
63047,
63048
]
],
[
[
800,
801
],
[
2625,
2626
],
[
2688,
2689
],
[
2714,
2715
],
[
2722,
2723
],
[
2738,
2739
],
[
2748,
2749
],
[
4287,
4288
],
[
4316,
4317
],
[
4343,
4344
],
[
4359,
4360
],
[
12483,
12484
],
[
12499,
12500
],
[
12800,
12801
],
[
12815,
12816
],
[
12834,
12835
],
[
18925,
18926
],
[
18954,
18955
],
[
18981,
18982
],
[
18997,
18998
],
[
25082,
25083
],
[
25099,
25100
]
],
[
[
830,
831
],
[
32215,
32216
],
[
34214,
34215
],
[
34258,
34259
],
[
34286,
34287
],
[
34495,
34496
],
[
34524,
34525
],
[
36823,
36824
],
[
42291,
42292
],
[
66691,
66692
],
[
66761,
66762
],
[
66831,
66832
],
[
66901,
66902
],
[
66971,
66972
],
[
67049,
67050
]
],
[
[
861,
862
],
[
34310,
34311
],
[
34338,
34339
],
[
34499,
34500
],
[
34528,
34529
],
[
36826,
36827
],
[
42251,
42252
],
[
66726,
66727
],
[
66796,
66797
],
[
66866,
66867
],
[
66936,
66937
],
[
67010,
67011
],
[
67088,
67089
]
],
[
[
892,
894
],
[
36829,
36831
]
],
[
[
927,
929
],
[
36833,
36835
]
],
[
[
962,
964
],
[
36837,
36839
]
],
[
[
994,
996
],
[
36841,
36843
]
],
[
[
1036,
1038
],
[
36845,
36847
]
],
[
[
1078,
1081
],
[
36849,
36852
]
],
[
[
1124,
1127
],
[
36854,
36857
]
],
[
[
1170,
1173
],
[
36859,
36862
]
],
[
[
1213,
1214
],
[
4465,
4466
],
[
12603,
12604
],
[
19146,
19147
],
[
25201,
25202
],
[
57931,
57932
],
[
60900,
60901
]
],
[
[
1245,
1247
],
[
4504,
4506
],
[
12642,
12644
],
[
19185,
19187
],
[
25240,
25242
],
[
57970,
57972
],
[
60939,
60941
]
],
[
[
1299,
1307
]
],
[
[
5168,
5180
]
],
[
[
5645,
5660
]
],
[
[
5761,
5777
]
],
[
[
7100,
7118
]
],
[
[
7764,
7784
]
],
[
[
8439,
8453
]
],
[
[
8555,
8573
]
],
[
[
9859,
9867
]
],
[
[
13037,
13052
]
],
[
[
13282,
13297
]
],
[
[
13409,
13425
]
],
[
[
14748,
14766
]
],
[
[
15304,
15324
]
],
[
[
15983,
15997
]
],
[
[
16100,
16108
]
],
[
[
19581,
19596
]
],
[
[
19700,
19716
]
],
[
[
21515,
21528
]
],
[
[
21711,
21729
]
],
[
[
22398,
22418
]
],
[
[
22655,
22669
]
],
[
[
22778,
22786
]
],
[
[
25772,
25798
]
],
[
[
26034,
26062
]
],
[
[
26272,
26287
]
],
[
[
26727,
26743
]
],
[
[
28481,
28494
]
],
[
[
28657,
28675
]
],
[
[
29440,
29460
]
],
[
[
29685,
29699
]
],
[
[
29809,
29818
]
],
[
[
30785,
30794
]
],
[
[
32401,
32417
]
],
[
[
32641,
32658
]
],
[
[
33020,
33035
]
],
[
[
33149,
33158
]
],
[
[
34592,
34608
]
],
[
[
35002,
35019
]
],
[
[
35534,
35549
]
],
[
[
35664,
35673
]
],
[
[
37922,
37939
]
],
[
[
38515,
38530
]
],
[
[
38640,
38650
]
],
[
[
41472,
41488
]
],
[
[
41714,
41723
]
],
[
[
42601,
42618
]
],
[
[
43242,
43257
]
],
[
[
43368,
43383
]
],
[
[
43424,
43443
]
],
[
[
43590,
43609
],
[
44303,
44322
]
],
[
[
43753,
43771
],
[
44551,
44569
]
],
[
[
43912,
43929
],
[
44350,
44367
],
[
44597,
44614
]
],
[
[
44032,
44052
]
],
[
[
44733,
44748
]
],
[
[
44979,
45010
]
],
[
[
45362,
45380
]
],
[
[
45560,
45580
]
],
[
[
46209,
46221
]
],
[
[
46467,
46482
]
],
[
[
49464,
49477
]
],
[
[
49916,
49930
]
],
[
[
50726,
50752
]
],
[
[
50899,
50923
]
],
[
[
55973,
55997
]
],
[
[
56813,
56821
]
],
[
[
58875,
58891
]
],
[
[
59443,
59457
]
],
[
[
59566,
59574
]
],
[
[
61611,
61620
]
],
[
[
62841,
62858
]
],
[
[
63153,
63162
]
],
[
[
64492,
64508
]
],
[
[
65178,
65193
]
],
[
[
65357,
65372
]
],
[
[
65477,
65493
]
],
[
[
66068,
66083
]
],
[
[
66176,
66192
]
],
[
[
66399,
66420
]
],
[
[
67116,
67132
]
],
[
[
68702,
68718
]
]
] |
# coding=utf-8
#########################################################################################
# This code provides SCT integration into FSLeyes for the following tools:
#
# - sct_propseg
# - sct_deepseg_gm
# - sct_deepseg_sc
# - sct_label_vertebrae
# - sct_register_to_template
# - sct_process_segmentation
# - sct_dmri_moco
# - sct_dmri_compute_dti
#
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2018 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: Christian S. Perone, Thiago JR Rezende, Julien Cohen-Adad
##########################################################################################
# TODO: add keyboard shortcuts to Run (ctrl+R)
# TODO: add help when user leaves cursor on button
import os
import select
import subprocess
import signal
from threading import Thread
import logging
import webbrowser
import wx
import wx.lib.agw.aui as aui
import wx.html as html
logger = logging.getLogger(__name__)
aui_manager = frame.getAuiManager() # from FSLeyes context
class ErrorDialog(wx.Dialog):
"""
Panel to display if there is an error, instructing user what to do.
"""
def __init__(self, parent, msg=None):
wx.Dialog.__init__(self, parent, title="An Error Occurred")
self.SetSize((600, 275))
if msg is None:
msg = "An error has occurred while running SCT. Please go to the Terminal, copy all the content and paste it as a new issue in SCT's forum: \
http://forum.spinalcordmri.org/"
vbox = wx.BoxSizer(wx.VERTICAL)
error_msg_box = wx.TextCtrl(self, wx.ID_ANY, size=(500,150),
style = wx.TE_MULTILINE|wx.TE_READONLY|wx.HSCROLL)
error_msg_box.AppendText(msg)
vbox.Add(error_msg_box, 0, wx.TOP|wx.EXPAND, 20)
btns = self.CreateSeparatedButtonSizer(wx.OK)
vbox.Add(btns, 0, wx.CENTER|wx.ALL, 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
save_ico = wx.ArtProvider.GetBitmap(wx.ART_ERROR, wx.ART_TOOLBAR, (50, 50))
img_info = wx.StaticBitmap(self, -1, save_ico, wx.DefaultPosition, (save_ico.GetWidth(), save_ico.GetHeight()))
hbox.Add(img_info, 0, wx.ALL, 20)
hbox.Add(vbox, 0, wx.ALL, 0)
self.SetSizer(hbox)
self.Centre()
self.CenterOnParent()
class ProgressDialog(wx.Dialog):
"""
Panel to display while running SCT command.
"""
def __init__(self, parent):
self.stop_run = False
wx.Dialog.__init__(self, parent, title="SCT Processing")
self.SetSize((300, 120))
vbox = wx.BoxSizer(wx.VERTICAL)
lbldesc = wx.StaticText(self, id=wx.ID_ANY, label="Processing, please wait...")
vbox.Add(lbldesc, 0, wx.ALIGN_CENTER|wx.ALL, 10)
stop_button = wx.Button(self, wx.ID_CANCEL, 'Stop')
vbox.Add(stop_button, 0, wx.CENTER|wx.ALL, 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
# TODO: use a nicer image, showing two gears (similar to ID_EXECUTE)
save_ico = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_TOOLBAR, (50, 50))
img_info = wx.StaticBitmap(self, -1, save_ico, wx.DefaultPosition, (save_ico.GetWidth(), save_ico.GetHeight()))
hbox.Add(img_info, 0, wx.ALL, 10)
hbox.Add(vbox, 0, wx.ALL, 0)
self.SetSizer(hbox)
self.Centre()
self.CenterOnParent()
stop_button.Bind(wx.EVT_BUTTON, self.OnStop)
def OnStop(self, event):
print(f"Stop was pressed. event={event}")
self.stop_run = True
self.Destroy()
class SCTCallThread(Thread):
def __init__(self, command, text_window_ctrl):
Thread.__init__(self)
self.command = [command]
self.status = None
self.stdout = ""
self.stderr = ""
self.text_window = text_window_ctrl
def sct_call(self, command):
env = os.environ.copy()
if 'PYTHONHOME' in env:
del env["PYTHONHOME"]
if 'PYTHONPATH' in env:
del env["PYTHONPATH"]
proc = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env)
self.p = proc
stdout_fd = proc.stdout.fileno()
stderr_fd = proc.stderr.fileno()
os.set_blocking(stdout_fd, False)
os.set_blocking(stderr_fd, False)
while proc.poll() is None:
timeout = 1
rs = [ proc.stdout, proc.stderr ]
ws = []
xs = []
rs, ws, xs = select.select(rs, ws, xs, timeout)
for r in rs:
msg = None
if r is proc.stdout:
msg = os.read(stdout_fd, 1024)
if msg:
self.stdout += msg.decode('utf-8')
elif r is proc.stderr:
msg = os.read(stderr_fd, 1024)
if msg:
self.stderr += msg.decode('utf-8')
if msg:
wx.CallAfter(self.text_window.WriteText, msg)
return proc.returncode, self.stdout, self.stderr
def sct_interrupt(self):
if self.p:
self.p.send_signal(signal.SIGINT)
else:
print("No process running?")
def run(self):
"""
overrides Thread.run() function
:return:
"""
self.status, self.stdout, self.stderr = self.sct_call(self.command)
class TextBox:
"""
Create a horizontal box composed of a button (left) and a text box (right). When the button is
pressed, the file name highlighted in the list of overlay is fetched and passed into the text box.
This file name can be accessed by: TextBox.textctrl.GetValue()
"""
def __init__(self, sctpanel, label=""):
"""
:param sctpanel: SCTPanel Class
:param label: Label to display on the button
"""
self.textctrl = wx.TextCtrl(sctpanel)
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
button_fetch_file = wx.Button(sctpanel, -1, label=label)
button_fetch_file.Bind(wx.EVT_BUTTON, self.get_highlighted_file_name)
self.hbox.Add(button_fetch_file, 0, wx.ALIGN_LEFT| wx.RIGHT, 10)
self.hbox.Add(self.textctrl, 1, wx.ALIGN_LEFT|wx.LEFT, 10)
def get_highlighted_file_name(self, event):
"""
Fetch path to file highlighted in the Overlay list.
"""
selected_overlay = displayCtx.getSelectedOverlay() # displayCtx is a class from FSLeyes
filename_path = selected_overlay.dataSource
print("Fetched file name: {}".format(filename_path))
self.textctrl.SetValue(filename_path)
def get_file_name(self):
return self.textctrl.GetValue()
# Creates the standard panel for each tool
class SCTPanel(wx.Panel):
"""
Creates the standard panel for each tool
:param sizer_h: Main wx.BoxSizer object that encloses SCT information, for each panel
"""
DESCRIPTION_SCT = """
<br><br><b>General citation (please always cite)</b>:<br>
De Leener B, Levy S, Dupont SM, Fonov VS, Stikov N, Louis Collins D, Callot V,
Cohen-Adad J. <i>SCT: Spinal Cord Toolbox, an open-source software for processing
spinal cord MRI data</i>. Neuroimage. 2017 Jan 15;145(Pt A):24-43.
"""
SCT_DIR_ENV = 'SCT_DIR'
SCT_LOGO_REL_PATH = 'documentation/imgs/logo_sct_small.png'
SCT_TUTORIAL_PATH = 'documentation/Manual_v1_SCT.pdf' # TODO: fix this path
def __init__(self, parent, id_):
super(SCTPanel, self).__init__(parent=parent, id=id_)
# main layout consists of one row with 3 main columns
self.main_row = wx.BoxSizer(wx.HORIZONTAL)
self.column_left = wx.BoxSizer(wx.VERTICAL)
self.column_center = wx.BoxSizer(wx.VERTICAL)
self.column_right = wx.BoxSizer(wx.VERTICAL)
sct_logo = self.get_logo()
logo_help_hbox = wx.BoxSizer(wx.HORIZONTAL)
logo_help_hbox.Add(sct_logo, 1, wx.HORIZONTAL, 5)
button_help = wx.Button(self, id=id_, label="Help")
button_help.Bind(wx.EVT_BUTTON, self.help_url)
logo_help_hbox.Add(button_help, 0, wx.ALIGN_BOTTOM|wx.LEFT, 90)
self.column_left.Add(logo_help_hbox, proportion=0, flag=wx.ALL, border=5)
html_desc_window = self.get_description()
self.column_left.Add(html_desc_window, 0, wx.ALL, 5)
self.log_window = wx.TextCtrl(self, wx.ID_ANY, size=(100, 300),
style = wx.TE_MULTILINE|wx.TE_READONLY|wx.HSCROLL)
self.column_right.Add(self.log_window, 1, wx.EXPAND|wx.ALL, 5)
self.main_row.Add(self.column_left, 0, wx.ALL, 10)
self.main_row.Add(self.column_center, 1, wx.ALL, 10)
self.main_row.Add(self.column_right, 1, wx.ALL, 10)
self.SetSizerAndFit(self.main_row)
def log_to_window(self, msg, level=None):
if level is None:
self.log_window.AppendText("{}\n".format(msg))
else:
self.log_window.AppendText("{}: {}\n".format(level, msg))
def tutorial(self,event):
pdfpath = os.path.join(os.environ[self.SCT_DIR_ENV],self.SCT_TUTORIAL_PATH)
print('PDF path:', pdfpath)
cmd_line = "open {}".format(pdfpath)
print('Command line:', cmd_line)
self.call_sct_command(cmd_line)
def help_url(self, event):
url = "http://forum.spinalcordmri.org/c/sct"
webbrowser.open(url)
def get_logo(self):
logo_file = os.path.join(os.environ[self.SCT_DIR_ENV],
self.SCT_LOGO_REL_PATH)
png = wx.Image(logo_file,
wx.BITMAP_TYPE_ANY).ConvertToBitmap()
img_logo = wx.StaticBitmap(self, -1, png, wx.DefaultPosition,
(png.GetWidth(), png.GetHeight()))
return img_logo
def get_description(self):
txt_style = wx.VSCROLL | \
wx.HSCROLL | wx.TE_READONLY | \
wx.BORDER_SIMPLE
htmlw = html.HtmlWindow(self, wx.ID_ANY,
size=(400, 220),
style=txt_style)
htmlw.SetPage(self.DESCRIPTION + self.DESCRIPTION_SCT)
htmlw.SetStandardFonts(size=10, normal_face="Noto Sans")
return htmlw
def call_sct_command(self, command):
self.log_to_window("Running: {}".format(command), level="INFO")
progress_dialog = ProgressDialog(frame)
progress_dialog.Show()
thr = SCTCallThread(command, self.log_window)
thr.start()
# No access to app.pending() from here
while True:
thr.join(0.1)
wx.Yield()
if not thr.isAlive():
break
if progress_dialog.stop_run:
thr.sct_interrupt()
thr.join()
self.log_to_window("Command completed.", level="INFO")
if progress_dialog:
progress_dialog.Destroy()
# show stderr output if an error occurred
if thr.status:
self.log_to_window("An error occurred", level="ERROR")
error_dialog = ErrorDialog(frame, msg=thr.stderr)
error_dialog.Show()
class TabPanelPropSeg(SCTPanel):
"""
sct_propseg
"""
DESCRIPTION = """
<b>Function description</b>:<br>
Segment the spinal cord using a deformable 3D mesh. This method is fast and robust, but could be prone to "leaking"
if the contrast between the cord and the CSF is not high enough.
<br><br>
<b>Usage</b>:
<br>
Select an image from the overlay list, then click the "Input file" button to fetch the file name. Then, select the
appropriate contrast and click "Run". For more options, please use the Terminal version of this function.
<br><br>
<b>Specific citation</b>:
<br>
De Leener et al. <i>Robust, accurate and fast automatic segmentation of the spinal cord.</i> Neuroimage 2014
"""
def __init__(self, parent):
super(TabPanelPropSeg, self).__init__(parent=parent, id_=wx.ID_ANY)
self.hbox_filein = TextBox(self, label="Input file")
lbl_contrasts = ['t1', 't2', 't2s', 'dwi']
self.rbox_contrast = wx.RadioBox(self, label='Select contrast:',
choices=lbl_contrasts,
majorDimension=1,
style=wx.RA_SPECIFY_ROWS)
button_run = wx.Button(self, id=wx.ID_ANY, label="Run")
button_run.Bind(wx.EVT_BUTTON, self.on_button_run)
self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5)
self.column_center.Add(button_run, 0, wx.ALL, 5)
def on_button_run(self, event):
# Build and run SCT command
fname_input = self.hbox_filein.get_file_name()
if not fname_input:
msg = "No input file selected! Select a file from the overlay list and then press Input file."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
contrast = self.rbox_contrast.GetStringSelection()
base_name = os.path.basename(fname_input)
fname, fext = base_name.split(os.extsep, 1)
fname_out = "{}_seg.{}".format(fname, fext)
cmd_line = "sct_propseg -i {} -c {}".format(fname_input, contrast)
self.call_sct_command(cmd_line)
# Add output to the list of overlay
image = Image(fname_out) # <class 'fsl.data.image.Image'>
overlayList.append(image)
opts = displayCtx.getOpts(image)
opts.cmap = 'red'
class TabPanelSCSeg(SCTPanel):
"""
sct_deepseg_sc
"""
DESCRIPTION = """
<b>Function description</b>:<br>
Segment the spinal cord using deep learning. The convolutional neural network was trained on ~1,500 subjects
from multiple centers, and including various pathologies (compression, MS, ALS, etc.).
<br><br>
<b>Usage</b>:
<br>
Select an image from the overlay list, then click the "Input file" button to fetch the file name. Then, select the
appropriate contrast and click "Run". For more options, please use the Terminal version of this function.
<br><br>
<b>Specific citation</b>:
<br>
Gros et al. <i>Automatic segmentation of the spinal cord and intramedullary multiple sclerosis lesions with
convolutional neural networks.</i> Neuroimage 2019
"""
def __init__(self, parent):
super(TabPanelSCSeg, self).__init__(parent=parent, id_=wx.ID_ANY)
self.hbox_filein = TextBox(self, label="Input file")
lbl_contrasts = ['t1', 't2', 't2s', 'dwi']
self.rbox_contrast = wx.RadioBox(self, label='Select contrast:',
choices=lbl_contrasts,
majorDimension=1,
style=wx.RA_SPECIFY_ROWS)
button_run = wx.Button(self, id=wx.ID_ANY, label="Run")
button_run.Bind(wx.EVT_BUTTON, self.on_button_run)
self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5)
self.column_center.Add(button_run, 0, wx.ALL, 5)
def on_button_run(self, event):
# Build and run SCT command
fname_input = self.hbox_filein.get_file_name()
if not fname_input:
msg = "No input file selected! Select a file from the overlay list and then press Input file."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
contrast = self.rbox_contrast.GetStringSelection()
base_name = os.path.basename(fname_input)
fname, fext = base_name.split(os.extsep, 1)
fname_out = "{}_seg.{}".format(fname, fext)
cmd_line = "sct_deepseg_sc -i {} -c {}".format(fname_input, contrast)
self.call_sct_command(cmd_line)
# Add output to the list of overlay
image = Image(fname_out) # <class 'fsl.data.image.Image'>
overlayList.append(image)
opts = displayCtx.getOpts(image)
opts.cmap = 'red'
class TabPanelGMSeg(SCTPanel):
"""
sct_deepseg_gm
"""
DESCRIPTION = """
<b>Function description</b>:<br>
Segment the spinal cord gray matter using deep learning. The convolutional neural network features dilated
convolutions and was trained on 232 subjects (3963 axial slices) from multiple centers, and including various
pathologies (compression, MS, ALS, etc.).
<br><br>
<b>Usage</b>:
<br>
Select an image from the overlay list that has a good white and gray matter contrast (e.g., T2*-weighted image),
then click "Run". For more options, please use the Terminal version of this function.
<br><br>
<b>Specific citation</b>:
<br>
Perone et al. <i>Spinal cord gray matter segmentation using deep dilated convolutions.</i> Sci Rep. 2018
"""
def __init__(self, parent):
super(TabPanelGMSeg, self).__init__(parent=parent, id_=wx.ID_ANY)
self.hbox_filein = TextBox(self, label="Input file")
button_run = wx.Button(self, id=wx.ID_ANY, label="Run")
button_run.Bind(wx.EVT_BUTTON, self.on_button_run)
self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(button_run, 0, wx.ALL, 5)
def on_button_run(self, event):
# Build and run SCT command
fname_input = self.hbox_filein.get_file_name()
if not fname_input:
msg = "No input file selected! Select a file from the overlay list and then press Input file."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
base_name = os.path.basename(fname_input)
fname, fext = base_name.split(os.extsep, 1)
fname_out = "{}_gmseg.{}".format(fname, fext)
cmd_line = "sct_deepseg_gm -i {} -o {}".format(fname_input, fname_out)
self.call_sct_command(cmd_line)
# Add output to the list of overlay
image = Image(fname_out) # <class 'fsl.data.image.Image'>
overlayList.append(image)
opts = displayCtx.getOpts(image)
opts.cmap = 'yellow'
class TabPanelVertLB(SCTPanel):
"""
sct_label_vertebrae
"""
DESCRIPTION = """
<b>Function description</b>:<br>
Automatically find intervertebral discs and label an input segmentation with vertebral levels. The values on the
output labeled segmentation corresponds to the level, e.g., 2 corresponds to C2, 8 corresponds to T1, etc.
<br><br>
<b>Usage</b>:
<br>
Select an image from the overlay list where discs are clearly visible (e.g., T1w or T2w scans are usually good for
this task). Then, select a segmentation associated with the image, select the appropriate contrast and click "Run".
For more options, please use the Terminal version of this function.
<br><br>
<b>Specific citation</b>:
<br>
Ullmann et al. <i>Automatic labeling of vertebral levels using a robust template-based approach.</i> Int J Biomed
Imaging 2014
"""
def __init__(self, parent):
super(TabPanelVertLB, self).__init__(parent=parent, id_=wx.ID_ANY)
self.hbox_im = TextBox(self, label="Input image")
self.hbox_seg = TextBox(self, label="Input segmentation")
lbl_contrasts = ['t1', 't2']
self.rbox_contrast = wx.RadioBox(self, label='Select contrast:',
choices=lbl_contrasts,
majorDimension=1,
style=wx.RA_SPECIFY_ROWS)
# Run button
button_run = wx.Button(self, id=wx.ID_ANY, label="Run")
button_run.Bind(wx.EVT_BUTTON, self.on_button_run)
self.column_center.Add(self.hbox_im.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.hbox_seg.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5)
self.column_center.Add(button_run, 0, wx.ALL, 5)
def on_button_run(self, event):
# Build and run SCT command
fname_im = self.hbox_im.textctrl.GetValue()
fname_seg = self.hbox_seg.textctrl.GetValue()
fname_im = self.hbox_im.get_file_name()
if not fname_im:
msg = "No input image selected! Select an image from the overlay list and then press Input image."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
fname_seg = self.hbox_seg.get_file_name()
if not fname_seg:
msg = "No input segmentation selected! Select a segmentation file from the overlay list and then press Input segmentation."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
contrast = self.rbox_contrast.GetStringSelection()
base_name = os.path.basename(fname_seg)
fname, fext = base_name.split(os.extsep, 1)
fname_out = "{}_labeled.{}".format(fname, fext)
cmd_line = "sct_label_vertebrae -i {} -s {} -c {}".format(fname_im, fname_seg, contrast)
self.call_sct_command(cmd_line)
# Add output to the list of overlay
image = Image(fname_out) # <class 'fsl.data.image.Image'>
overlayList.append(image)
opts = displayCtx.getOpts(image)
opts.cmap = 'subcortical'
class TabPanelRegisterToTemplate(SCTPanel):
"""
sct_register_to_template
"""
DESCRIPTION = """
<b>Function description</b>:<br>
Register an image with the default PAM50 spinal cord MRI template.
<br><br>
<b>Usage</b>:
<br>
Select an image, its segmentation and a label file. The label file contains single-pixel labels located at the
posterior edge of the intervertebral discs. The value of the label corresponds to the lower vertebrae, e.g., label 3
corresponds to the C2-C3 disc. This label file can be created within FSLeyes by clicking on Tools > Edit mode, then
Edit > Create mask. Select the "pen", adjust the size to one pixel width and select the proper label value, then
click on the image and save the label(s): Overlay > save. Then, select the appropriate contrast and click "Run".
For more options, please use the Terminal version of this function.
<br><br>
<b>Specific citation</b>:
<br>
De Leener et al. <i>PAM50: Unbiased multimodal template of the brainstem and spinal cord aligned with the ICBM152
space.</i> Neuroimage 2017
"""
def __init__(self, parent):
super(TabPanelRegisterToTemplate, self).__init__(parent=parent, id_=wx.ID_ANY)
self.hbox_im = TextBox(self, label="Input image")
self.hbox_seg = TextBox(self, label="Input segmentation")
self.hbox_label = TextBox(self, label="Input labels")
lbl_contrasts = ['t1', 't2']
self.rbox_contrast = wx.RadioBox(self, label='Select contrast:',
choices=lbl_contrasts,
majorDimension=1,
style=wx.RA_SPECIFY_ROWS)
button_run = wx.Button(self, id=wx.ID_ANY, label="Run")
button_run.Bind(wx.EVT_BUTTON, self.on_button_run)
self.column_center.Add(self.hbox_im.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.hbox_seg.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.hbox_label.hbox, 0, wx.EXPAND|wx.ALL, 5)
self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5)
self.column_center.Add(button_run, 0, wx.ALL, 5)
def on_button_run(self, event):
# Build and run SCT command
fname_im = self.hbox_im.textctrl.GetValue()
fname_seg = self.hbox_seg.textctrl.GetValue()
fname_label = self.hbox_label.textctrl.GetValue()
fname_im = self.hbox_im.textctrl.GetValue()
fname_seg = self.hbox_seg.textctrl.GetValue()
fname_im = self.hbox_im.get_file_name()
if not fname_im:
msg = "No input image selected! Select an image from the overlay list and then press Input image."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
fname_seg = self.hbox_seg.get_file_name()
if not fname_seg:
msg = "No input segmentation selected! Select a segmentation file from the overlay list and then press Input segmentation."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
fname_label = self.hbox_label.get_file_name()
if not fname_label:
msg = "No input labels selected! Select input labels from the overlay list and then press Input labels."
self.log_to_window(msg, level="ERROR")
error_dialog = ErrorDialog(frame, msg=msg)
error_dialog.Show()
return
contrast = self.rbox_contrast.GetStringSelection()
cmd_line = \
"sct_register_to_template -i {} -s {} -ldisc {} -c {}".format(fname_im, fname_seg, fname_label, contrast)
self.call_sct_command(cmd_line)
# Add output to the list of overlay
base_name = os.path.basename(fname_im)
fname, fext = base_name.split(os.extsep, 1)
# TODO: at some point we will modify SCT's function to output the file name below
# fname_out = "PAM50_{}_reg.{}".format(contrast, fext)
fname_out = 'template2anat.nii.gz'
image = Image(fname_out) # <class 'fsl.data.image.Image'>
overlayList.append(image)
opts = displayCtx.getOpts(image)
opts.cmap = 'gray'
def run_main():
window = aui_manager.GetManagedWindow()
if 'SCT_DIR' not in os.environ:
dlg = wx.MessageDialog(window, 'Spinal Cord Toolbox (SCT) was not '
'found in your system. Make sure you open fsleyes '
'from the Terminal (not by clicking on the App). '
'If you are indeed running from the Terminal, please '
'check the installation procedure at: '
'https://github.com/neuropoly/spinalcordtoolbox',
'SCT not found!', wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
return
# Adding panels
notebook = aui.AuiNotebook(parent=window)
panel_propseg = TabPanelPropSeg(parent=notebook)
panel_sc = TabPanelSCSeg(parent=notebook)
panel_gm = TabPanelGMSeg(parent=notebook)
panel_vlb = TabPanelVertLB(parent=notebook)
panel_reg = TabPanelRegisterToTemplate(parent=notebook)
notebook.AddPage(page=panel_propseg, caption="sct_propseg", select=True)
notebook.AddPage(page=panel_sc, caption="sct_deepseg_sc", select=False)
notebook.AddPage(page=panel_gm, caption="sct_deepseg_gm", select=False)
notebook.AddPage(page=panel_vlb, caption="sct_label_vertebrae", select=False)
notebook.AddPage(page=panel_reg, caption="sct_register_to_template", select=False)
aui_manager.AddPane(notebook, aui.AuiPaneInfo().Name("notebook_content").CenterPane().PaneBorder(False))
aui_manager.Update()
run_main()
| [
[
[
813,
815
],
[
3919,
3921
],
[
4298,
4300
],
[
4340,
4342
],
[
4696,
4698
],
[
4874,
4876
],
[
9110,
9112
],
[
9123,
9125
],
[
9497,
9499
],
[
9510,
9512
],
[
13283,
13285
],
[
13351,
13353
],
[
15882,
15884
],
[
15950,
15952
],
[
18033,
18035
],
[
18101,
18103
],
[
21336,
21338
],
[
21402,
21404
],
[
25762,
25764
],
[
25827,
25829
],
[
26293,
26295
]
],
[
[
823,
829
],
[
4546,
4552
]
],
[
[
837,
847
],
[
4085,
4095
],
[
4123,
4133
],
[
4147,
4157
]
],
[
[
855,
861
],
[
5215,
5221
]
],
[
[
884,
890
],
[
3627,
3633
],
[
3695,
3701
]
],
[
[
898,
905
],
[
997,
1004
]
],
[
[
913,
923
],
[
9431,
9441
]
],
[
[
932,
934
],
[
1103,
1105
],
[
2390,
2392
],
[
6822,
6824
],
[
1253,
1255
],
[
1587,
1589
],
[
1599,
1601
],
[
1637,
1639
],
[
1655,
1657
],
[
1716,
1718
],
[
1732,
1734
],
[
1747,
1749
],
[
1833,
1835
],
[
1840,
1842
],
[
1903,
1905
],
[
1936,
1938
],
[
1946,
1948
],
[
1974,
1976
],
[
1986,
1988
],
[
2021,
2023
],
[
2046,
2048
],
[
2060,
2062
],
[
2105,
2107
],
[
2141,
2143
],
[
2237,
2239
],
[
2275,
2277
],
[
2536,
2538
],
[
2642,
2644
],
[
2654,
2656
],
[
2685,
2687
],
[
2708,
2710
],
[
2784,
2786
],
[
2800,
2802
],
[
2835,
2837
],
[
2851,
2853
],
[
2906,
2908
],
[
2916,
2918
],
[
2943,
2945
],
[
2955,
2957
],
[
3067,
3069
],
[
3092,
3094
],
[
3112,
3114
],
[
3157,
3159
],
[
3193,
3195
],
[
3289,
3291
],
[
3327,
3329
],
[
3445,
3447
],
[
5031,
5033
],
[
5949,
5951
],
[
5991,
5993
],
[
6003,
6005
],
[
6047,
6049
],
[
6115,
6117
],
[
6207,
6209
],
[
6222,
6224
],
[
6276,
6278
],
[
6290,
6292
],
[
7682,
7684
],
[
7694,
7696
],
[
7737,
7739
],
[
7749,
7751
],
[
7791,
7793
],
[
7803,
7805
],
[
7844,
7846
],
[
7856,
7858
],
[
7930,
7932
],
[
7942,
7944
],
[
7997,
7999
],
[
8038,
8040
],
[
8101,
8103
],
[
8174,
8176
],
[
8190,
8192
],
[
8268,
8270
],
[
8387,
8389
],
[
8425,
8427
],
[
8443,
8445
],
[
8505,
8507
],
[
8521,
8523
],
[
8536,
8538
],
[
8599,
8601
],
[
8609,
8611
],
[
8668,
8670
],
[
8729,
8731
],
[
8789,
8791
],
[
9611,
9613
],
[
9654,
9656
],
[
9711,
9713
],
[
9742,
9744
],
[
9908,
9910
],
[
9943,
9945
],
[
9956,
9958
],
[
9995,
9997
],
[
10050,
10052
],
[
10683,
10685
],
[
12069,
12071
],
[
12223,
12225
],
[
12437,
12439
],
[
12479,
12481
],
[
12498,
12500
],
[
12546,
12548
],
[
12639,
12641
],
[
12649,
12651
],
[
12714,
12716
],
[
12771,
12773
],
[
14669,
14671
],
[
14822,
14824
],
[
15036,
15038
],
[
15078,
15080
],
[
15097,
15099
],
[
15145,
15147
],
[
15238,
15240
],
[
15248,
15250
],
[
15313,
15315
],
[
15370,
15372
],
[
17258,
17260
],
[
17353,
17355
],
[
17372,
17374
],
[
17420,
17422
],
[
17513,
17515
],
[
17523,
17525
],
[
17580,
17582
],
[
19509,
19511
],
[
19712,
19714
],
[
19926,
19928
],
[
19989,
19991
],
[
20008,
20010
],
[
20056,
20058
],
[
20145,
20147
],
[
20155,
20157
],
[
20220,
20222
],
[
20230,
20232
],
[
20295,
20297
],
[
20352,
20354
],
[
23072,
23074
],
[
23337,
23339
],
[
23551,
23553
],
[
23593,
23595
],
[
23612,
23614
],
[
23660,
23662
],
[
23749,
23751
],
[
23759,
23761
],
[
23824,
23826
],
[
23834,
23836
],
[
23901,
23903
],
[
23911,
23913
],
[
23976,
23978
],
[
24033,
24035
],
[
26319,
26321
],
[
26873,
26875
],
[
26881,
26883
]
],
[
[
942,
963
],
[
26999,
27002
],
[
27717,
27720
]
],
[
[
971,
986
],
[
10028,
10032
]
],
[
[
988,
994
]
],
[
[
1025,
1036
],
[
26237,
26248
],
[
27687,
27698
],
[
27796,
27807
]
],
[
[
1091,
1102
],
[
11146,
11157
],
[
13124,
13135
],
[
15723,
15734
],
[
17933,
17944
],
[
20806,
20817
],
[
21176,
21187
],
[
24652,
24663
],
[
25022,
25033
],
[
25379,
25390
]
],
[
[
2375,
2389
],
[
10449,
10463
]
],
[
[
3613,
3626
],
[
10517,
10530
]
],
[
[
5470,
5477
],
[
12108,
12115
],
[
14708,
14715
],
[
17297,
17304
],
[
19544,
19551
],
[
19603,
19610
],
[
23107,
23114
],
[
23166,
23173
],
[
23234,
23241
]
],
[
[
6813,
6821
],
[
11237,
11245
],
[
13767,
13775
],
[
16369,
16377
],
[
18527,
18535
],
[
21865,
21873
],
[
7547,
7555
]
],
[
[
11221,
11236
],
[
12018,
12033
],
[
27050,
27065
]
],
[
[
13753,
13766
],
[
14620,
14633
],
[
27098,
27111
]
],
[
[
16355,
16368
],
[
17209,
17222
],
[
27144,
27157
]
],
[
[
18512,
18526
],
[
19459,
19473
],
[
27191,
27205
]
],
[
[
21838,
21864
],
[
23010,
23036
],
[
27239,
27265
]
],
[
[
26212,
26220
],
[
27818,
27826
]
]
] |
# coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UpdateTaskStatusResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""UpdateTaskStatusResponse - a model defined in huaweicloud sdk"""
super(UpdateTaskStatusResponse, self).__init__()
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateTaskStatusResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
[
[
24,
26
]
],
[
[
34,
37
],
[
895,
898
],
[
1845,
1848
]
],
[
[
84,
95
],
[
204,
215
]
],
[
[
144,
170
],
[
1971,
1997
]
],
[
[
179,
203
],
[
697,
721
],
[
2221,
2245
]
]
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Red Hat, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .update_db_on_advisory_change import UpdateDBOnAdvisoryChange # noqa
from .generate_advisory_signed_event_on_rpm_sign import GenerateAdvisorySignedEventOnRPMSign # noqa
from .update_db_on_odcs_compose_fail import UpdateDBOnODCSComposeFail # noqa
from .cancel_event_on_freshmaker_manage_request import CancelEventOnFreshmakerManageRequest # noqa
| [
[
[
1160,
1184
]
],
[
[
1249,
1285
]
],
[
[
1338,
1363
]
],
[
[
1427,
1463
]
]
] |
import datetime
import time
import pytz
PACIFIC_TZ = pytz.timezone('America/Vancouver')
def today_pacific():
now_pacific = datetime.datetime.fromtimestamp(time.time(), PACIFIC_TZ)
return now_pacific.date()
| [
[
[
7,
15
],
[
132,
140
]
],
[
[
23,
27
],
[
164,
168
]
],
[
[
36,
40
],
[
56,
60
]
],
[
[
43,
53
],
[
177,
187
]
],
[
[
97,
110
]
]
] |
__author__ = 'Tom Schaul, [email protected]'
from scipy import array, exp, tanh, clip, log, dot, sqrt, power, pi, tan, diag, rand, real_if_close
from scipy.linalg import inv, det, svd, logm, expm2
def semilinear(x):
""" This function ensures that the values of the array are always positive. It is
x+1 for x=>0 and exp(x) for x<0. """
try:
# assume x is a numpy array
shape = x.shape
x.flatten()
x = x.tolist()
except AttributeError:
# no, it wasn't: build shape from length of list
shape = (1, len(x))
def f(val):
if val < 0:
# exponential function for x<0
return safeExp(val)
else:
# linear function for x>=0
return val + 1.0
return array(map(f, x)).reshape(shape)
def semilinearPrime(x):
""" This function is the first derivative of the semilinear function (above).
It is needed for the backward pass of the module. """
try:
# assume x is a numpy array
shape = x.shape
x.flatten()
x = x.tolist()
except AttributeError:
# no, it wasn't: build shape from length of list
shape = (1, len(x))
def f(val):
if val < 0:
# exponential function for x<0
return safeExp(val)
else:
# linear function for x>=0
return 1.0
return array(map(f, x)).reshape(shape)
def safeExp(x):
""" Bounded range for the exponential function (won't produce inf or NaN). """
return exp(clip(x, -500, 500))
def sigmoid(x):
""" Logistic sigmoid function. """
return 1. / (1. + safeExp(-x))
def sigmoidPrime(x):
""" Derivative of logistic sigmoid. """
tmp = sigmoid(x)
return tmp * (1 - tmp)
def tanhPrime(x):
""" Derivative of tanh. """
tmp = tanh(x)
return 1 - tmp * tmp
def ranking(R):
""" Produces a linear ranking of the values in R. """
l = sorted(list(enumerate(R)), cmp=lambda a, b: cmp(a[1], b[1]))
l = sorted(list(enumerate(l)), cmp=lambda a, b: cmp(a[1], b[1]))
return array(map(lambda kv: kv[0], l))
def expln(x):
""" This continuous function ensures that the values of the array are always positive.
It is ln(x+1)+1 for x >= 0 and exp(x) for x < 0. """
def f(val):
if val < 0:
# exponential function for x < 0
return exp(val)
else:
# natural log function for x >= 0
return log(val + 1.0) + 1
try:
result = array(map(f, x))
except TypeError:
result = array(f(x))
return result
def explnPrime(x):
""" This function is the first derivative of the expln function (above).
It is needed for the backward pass of the module. """
def f(val):
if val < 0:
# exponential function for x<0
return exp(val)
else:
# linear function for x>=0
return 1.0 / (val + 1.0)
try:
result = array(map(f, x))
except TypeError:
result = array(f(x))
return result
def multivariateNormalPdf(z, x, sigma):
""" The pdf of a multivariate normal distribution (not in scipy).
The sample z and the mean x should be 1-dim-arrays, and sigma a square 2-dim-array. """
assert len(z.shape) == 1 and len(x.shape) == 1 and len(x) == len(z) and sigma.shape == (len(x), len(z))
tmp = -0.5 * dot(dot((z - x), inv(sigma)), (z - x))
res = (1. / power(2.0 * pi, len(z) / 2.)) * (1. / sqrt(det(sigma))) * exp(tmp)
return res
def simpleMultivariateNormalPdf(z, detFactorSigma):
""" Assuming z has been transformed to a mean of zero and an identity matrix of covariances.
Needs to provide the determinant of the factorized (real) covariance matrix. """
dim = len(z)
return exp(-0.5 * dot(z, z)) / (power(2.0 * pi, dim / 2.) * detFactorSigma)
def multivariateCauchy(mu, sigma, onlyDiagonal=True):
""" Generates a sample according to a given multivariate Cauchy distribution. """
if not onlyDiagonal:
u, s, d = svd(sigma)
coeffs = sqrt(s)
else:
coeffs = diag(sigma)
r = rand(len(mu))
res = coeffs * tan(pi * (r - 0.5))
if not onlyDiagonal:
res = dot(d, dot(res, u))
return res + mu
def approxChiFunction(dim):
""" Returns Chi (expectation of the length of a normal random vector)
approximation according to: Ostermeier 1997. """
dim = float(dim)
return sqrt(dim) * (1 - 1 / (4 * dim) + 1 / (21 * dim ** 2))
def sqrtm(M):
""" Returns the symmetric semi-definite positive square root of a matrix. """
r = real_if_close(expm2(0.5 * logm(M)), 1e-8)
return (r + r.T) / 2
| [
[
[
0,
10
]
],
[
[
59,
64
],
[
773,
778
],
[
1397,
1402
],
[
2092,
2097
],
[
2525,
2530
],
[
2581,
2586
],
[
2995,
3000
],
[
3051,
3056
]
],
[
[
66,
69
],
[
1541,
1544
],
[
3524,
3527
],
[
3812,
3815
],
[
2392,
2395
],
[
2870,
2873
]
],
[
[
71,
75
],
[
1834,
1838
]
],
[
[
77,
81
],
[
1545,
1549
]
],
[
[
83,
86
],
[
2480,
2483
]
],
[
[
88,
91
],
[
3411,
3414
],
[
3415,
3418
],
[
3823,
3826
],
[
4241,
4244
],
[
4248,
4251
]
],
[
[
93,
97
],
[
3504,
3508
],
[
4094,
4098
],
[
4470,
4474
]
],
[
[
99,
104
],
[
3466,
3471
],
[
3837,
3842
]
],
[
[
106,
108
],
[
3478,
3480
],
[
3849,
3851
],
[
4186,
4188
]
],
[
[
110,
113
],
[
4182,
4185
]
],
[
[
115,
119
],
[
4129,
4133
]
],
[
[
121,
125
],
[
4149,
4153
]
],
[
[
127,
140
],
[
4630,
4643
]
],
[
[
166,
169
],
[
3428,
3431
]
],
[
[
171,
174
],
[
3509,
3512
]
],
[
[
176,
179
],
[
4066,
4069
]
],
[
[
181,
185
],
[
4656,
4660
]
],
[
[
187,
192
],
[
4644,
4649
]
],
[
[
199,
209
]
],
[
[
811,
826
]
],
[
[
1435,
1442
],
[
1644,
1651
],
[
667,
674
],
[
1297,
1304
]
],
[
[
1571,
1578
],
[
1734,
1741
]
],
[
[
1663,
1675
]
],
[
[
1778,
1787
]
],
[
[
1873,
1880
]
],
[
[
2130,
2135
]
],
[
[
2618,
2628
]
],
[
[
3088,
3109
]
],
[
[
3554,
3581
]
],
[
[
3887,
3905
]
],
[
[
4287,
4304
]
],
[
[
4530,
4535
]
]
] |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import os
import sys
import unittest
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn as nn
import torch.optim as optim
from opacus import PrivacyEngine
from opacus.distributed import DifferentiallyPrivateDistributedDataParallel as DPDDP
from torch.nn.parallel import DistributedDataParallel as DDP
PRIVACY_ALPHAS = [1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64))
def setup_and_get_device(rank, world_size, nonce=0):
"""
Initialize the torch.distributed process group.
If you run multiple groups in parallel or if you have zombie processes, you can add a nonce to avoid errors.
"""
device = 0
if sys.platform == "win32":
# Distributed package only covers collective communications with Gloo
# backend and FileStore on Windows platform. Set init_method parameter
# in init_process_group to a local file.
# Example init_method="file:///f:/libtmp/some_file"
init_method = "file:///{your local file path}"
# initialize the process group
dist.init_process_group(
"gloo", init_method=init_method, rank=rank, world_size=world_size
)
device = rank
elif os.environ.get("SLURM_NTASKS") is not None:
# Running on a Slurm cluster
os.environ["MASTER_ADDR"] = "127.0.0.1"
os.environ["MASTER_PORT"] = str(7440 + nonce)
local_rank = int(os.environ.get("SLURM_LOCALID"))
dist.init_process_group(backend="gloo", rank=rank, world_size=world_size)
# The device is the local rank (if you have 2 nodes with 8 GPUs each, you will have two "cuda:0" devices)
device = local_rank
else:
os.environ["MASTER_ADDR"] = "localhost"
os.environ["MASTER_PORT"] = "12355"
os.environ["RANK"] = str(rank)
os.environ["WORLD_SIZE"] = str(world_size)
dist.init_process_group(
init_method="env://",
backend="nccl",
)
# Single node experiment
device = rank
return device
def cleanup():
dist.destroy_process_group()
class ToyModel(nn.Module):
def __init__(self):
super(ToyModel, self).__init__()
self.net1 = nn.Linear(10, 10)
self.relu = nn.ReLU()
self.net2 = nn.Linear(10, 5)
def forward(self, x):
return self.net2(self.relu(self.net1(x)))
def demo_basic(rank, world_size, weight, dp, noise_multiplier=0, max_grad_norm=1e8):
# We don't want the 2 GPUs to work on the same examples/labels in parallel
torch.manual_seed(rank)
batch_size = 32
withdp = "with" + ("out " if not dp else "")
print(f"Running basic DDP {withdp} differential privacy example on rank {rank}.")
device = setup_and_get_device(rank, world_size)
# create model and move it to GPU with id rank
model = ToyModel().to(device)
print(f"Initial weight: {model.net1.weight.data}")
# Freeze all the parameters except one, to ensure that the noise is the same
# (the DDP hook does not browse the layers in the same order as the naive implementation)
model.net1.bias.requires_grad = False
model.net2.bias.requires_grad = False
model.net2.weight.requires_grad = False
if dp:
ddp_model = DPDDP(model)
engine = PrivacyEngine(
ddp_model,
batch_size=batch_size,
sample_size=10 * batch_size,
alphas=PRIVACY_ALPHAS,
noise_multiplier=noise_multiplier,
max_grad_norm=[max_grad_norm],
)
engine.random_number_generator = engine._set_seed(0)
else:
ddp_model = DDP(model, device_ids=[device])
loss_fn = nn.MSELoss()
optimizer = optim.SGD(ddp_model.parameters(), lr=1)
if dp:
engine.attach(optimizer)
optimizer.zero_grad()
labels = torch.randn(batch_size, 5).to(device)
outputs = ddp_model(torch.randn(batch_size, 10).to(device))
loss_fn(outputs, labels).backward()
optimizer.step()
weight.copy_(model.net1.weight.data.cpu())
cleanup()
def demo_ddp_hook(rank, world_size, weight, dp, noise_multiplier, max_grad_norm):
torch.manual_seed(rank)
batch_size = 32
withdp = "with" + ("out " if not dp else "")
print(f"Running DDP hook {withdp} differential privacy example on rank {rank}.")
device = setup_and_get_device(rank, world_size, nonce=1)
# create model and move it to GPU with id rank
model = ToyModel().to(device)
model.net1.bias.requires_grad = False
model.net2.bias.requires_grad = False
model.net2.weight.requires_grad = False
ddp_model = DDP(model, device_ids=[device])
if dp:
engine = PrivacyEngine(
ddp_model,
batch_size=batch_size,
sample_size=10 * batch_size,
alphas=PRIVACY_ALPHAS,
noise_multiplier=noise_multiplier,
max_grad_norm=[max_grad_norm],
)
engine.random_number_generator = engine._set_seed(0)
loss_fn = nn.MSELoss()
optimizer = optim.SGD(ddp_model.parameters(), lr=1)
if dp:
engine.attach(optimizer)
optimizer.zero_grad()
labels = torch.randn(batch_size, 5).to(device)
outputs = ddp_model(torch.randn(batch_size, 10).to(device))
loss_fn(outputs, labels).backward()
optimizer.step()
weight.copy_(model.net1.weight.data.cpu())
del ddp_model
cleanup()
def add_remove_ddp_hooks(
rank, world_size, remaining_hooks, dp, noise_multiplier=0, max_grad_norm=1e8
):
device = setup_and_get_device(rank, world_size, nonce=2)
model = ToyModel().to(device)
ddp_model = nn.parallel.DistributedDataParallel(model, device_ids=[device])
engine = PrivacyEngine(
ddp_model,
batch_size=1,
sample_size=10,
alphas=PRIVACY_ALPHAS,
noise_multiplier=noise_multiplier,
max_grad_norm=[max_grad_norm],
)
optimizer = optim.SGD(ddp_model.parameters(), lr=1)
engine.attach(optimizer)
remaining_hooks["attached"] = {
p: p._backward_hooks for p in engine.module.parameters() if p._backward_hooks
}
engine.detach()
remaining_hooks["detached"] = {
p: p._backward_hooks for p in engine.module.parameters() if p._backward_hooks
}
cleanup()
def debug(rank, world_size, tensor, dp, noise_multiplier=0, max_grad_norm=1e8):
local_rank = setup_and_get_device(rank, world_size)
print(f"Rank: {rank},World size: {world_size}, local_rank: {local_rank}")
tensor = tensor.to(local_rank)
print(f"dp: {dp}")
print(tensor)
cleanup()
def run_function(local_function, tensor, dp, noise_multiplier=0, max_grad_norm=1e8):
if os.environ.get("SLURM_NTASKS") is not None:
world_size = int(os.environ.get("SLURM_NTASKS"))
rank = int(os.environ.get("SLURM_PROCID"))
print(f"Running on a Slurm cluster with {world_size} tasks.")
local_function(rank, world_size, tensor, dp, noise_multiplier, max_grad_norm)
else:
world_size = torch.cuda.device_count()
print(f"Spawning multiple processes on a local machine with {world_size} GPUs")
# The rank will be passed as the first argument
mp.spawn(
local_function,
args=(
world_size,
tensor,
dp,
noise_multiplier,
max_grad_norm,
),
nprocs=world_size,
join=True,
)
return world_size
class GradientComputationTest(unittest.TestCase):
def test_connection(self):
tensor = torch.zeros(10, 10)
world_size = run_function(debug, tensor, dp=True)
self.assertTrue(
world_size >= 2, f"Need at least 2 gpus but was provided only {world_size}."
)
def test_gradient_noclip_zeronoise(self):
# Tests that gradient is the same with DP or with DDP
weight_dp, weight_nodp = torch.zeros(10, 10), torch.zeros(10, 10)
run_function(demo_basic, weight_dp, dp=True)
run_function(demo_basic, weight_nodp, dp=False)
self.assertTrue(torch.norm(weight_dp - weight_nodp) < 1e-7)
def test_ddp_hook(self):
# Tests that the DDP hook does the same thing as naive aggregation with per layer clipping
weight_ddp_naive, weight_ddp_hook = torch.zeros(10, 10), torch.zeros(10, 10)
run_function(
demo_basic,
weight_ddp_naive,
dp=True,
noise_multiplier=0.1,
max_grad_norm=1.0,
)
run_function(
demo_ddp_hook,
weight_ddp_hook,
dp=True,
noise_multiplier=0.1,
max_grad_norm=1.0,
)
self.assertTrue(
torch.norm(weight_ddp_naive - weight_ddp_hook) < 1e-7,
f"DDP naive: {weight_ddp_naive}\nDDP hook: {weight_ddp_hook}",
)
def test_add_remove_ddp_hooks(self):
remaining_hooks = {
"attached": None,
"detached": None,
}
run_function(
add_remove_ddp_hooks,
remaining_hooks,
dp=True,
noise_multiplier=0.1,
max_grad_norm=1.0,
)
assert remaining_hooks["attached"], "There are no hooks."
assert not remaining_hooks[
"detached"
], f"Some hooks remain after .remove_hooks(): {remaining_hooks}"
| [
[
[
101,
103
],
[
1317,
1319
],
[
1406,
1408
],
[
1454,
1456
],
[
1525,
1527
],
[
1801,
1803
],
[
1849,
1851
],
[
1894,
1896
],
[
1933,
1935
],
[
6783,
6785
],
[
6852,
6854
],
[
6903,
6905
]
],
[
[
111,
114
],
[
779,
782
]
],
[
[
122,
130
],
[
7629,
7637
]
],
[
[
139,
144
],
[
2651,
2656
],
[
3930,
3935
],
[
3993,
3998
],
[
4245,
4250
],
[
5256,
5261
],
[
5319,
5324
],
[
7123,
7128
],
[
7697,
7702
],
[
8042,
8047
],
[
8063,
8068
],
[
8218,
8223
],
[
8435,
8440
],
[
8456,
8461
],
[
8862,
8867
]
],
[
[
152,
177
],
[
1173,
1177
],
[
1566,
1570
],
[
1984,
1988
],
[
2176,
2180
]
],
[
[
185,
212
],
[
7302,
7304
]
],
[
[
220,
234
],
[
2222,
2224
],
[
2319,
2321
],
[
2357,
2359
],
[
2387,
2389
],
[
3777,
3779
],
[
5103,
5105
],
[
5725,
5727
]
],
[
[
242,
262
],
[
3806,
3811
],
[
5132,
5137
],
[
6019,
6024
]
],
[
[
282,
295
],
[
3390,
3403
],
[
4778,
4791
],
[
5803,
5816
]
],
[
[
327,
380
],
[
3360,
3365
]
],
[
[
411,
441
],
[
3730,
3733
],
[
4717,
4720
]
],
[
[
444,
458
],
[
3523,
3537
],
[
4911,
4925
],
[
5898,
5912
]
],
[
[
527,
547
],
[
2844,
2864
],
[
4437,
4457
],
[
5626,
5646
],
[
6481,
6501
]
],
[
[
2161,
2168
],
[
4147,
4154
],
[
5491,
5498
],
[
6372,
6379
],
[
6679,
6686
]
],
[
[
2213,
2221
],
[
2272,
2280
],
[
2947,
2955
],
[
4549,
4557
],
[
5687,
5695
]
],
[
[
2487,
2497
],
[
8105,
8115
],
[
8158,
8168
],
[
8511,
8521
]
],
[
[
4163,
4176
],
[
8684,
8697
]
],
[
[
5507,
5527
],
[
9178,
9198
]
],
[
[
6388,
6393
],
[
7751,
7756
]
],
[
[
6695,
6707
],
[
7738,
7750
],
[
8092,
8104
],
[
8145,
8157
],
[
8485,
8497
],
[
8658,
8670
],
[
9152,
9164
]
],
[
[
7605,
7628
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 16 19:55:29 2017
@author: Arko Chatterjee
"""
import datetime
import imutils
import time
import cv2
camera = cv2.VideoCapture(0)
time.sleep(0.50)
print("Hello!")
firstFrame = None
# loop over the frames of the video
while True:
# grab the current frame and initialize the occupied/unoccupied text
(grabbed, frame) = camera.read()
text = "Unossssccupied"
if not grabbed:
break
# resize the frame, convert it to grayscale, and blur it
frame = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
thresh = cv2.dilate(thresh, None, iterations=2)
(_, cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < 500:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
text = "Occupied"
# draw the text and timestamp on the frame
cv2.putText(frame, "Room Status: {}".format(text), (10, 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"),
(10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
# show the frame and record if the user presses a key
cv2.imshow("Security Feed", frame)
#cv2.imshow("Thresh", thresh)
#cv2.imshow("Frame Delta", frameDelta)
key = cv2.waitKey(1) & 0xFF
# if the `q` key is pressed, break from the lop
if key == ord("q"):
break
camera.release()
cv2.destroyAllWindows()
| [
[
[
107,
115
],
[
1608,
1616
]
],
[
[
124,
131
],
[
535,
542
]
],
[
[
140,
144
],
[
190,
194
]
],
[
[
153,
156
],
[
169,
172
],
[
577,
580
],
[
597,
600
],
[
626,
629
],
[
820,
823
],
[
861,
864
],
[
896,
899
],
[
933,
936
],
[
989,
992
],
[
1021,
1024
],
[
1039,
1042
],
[
1160,
1163
],
[
1313,
1316
],
[
1336,
1339
],
[
1476,
1479
],
[
1540,
1543
],
[
1589,
1592
],
[
1698,
1701
],
[
1810,
1813
],
[
1928,
1931
],
[
2056,
2059
]
],
[
[
160,
166
],
[
391,
397
],
[
2038,
2044
]
],
[
[
227,
237
],
[
669,
679
],
[
832,
842
]
],
[
[
373,
380
],
[
444,
451
]
],
[
[
382,
387
],
[
550,
555
]
],
[
[
407,
411
],
[
1520,
1524
]
],
[
[
527,
532
],
[
590,
595
],
[
1350,
1355
],
[
1488,
1493
],
[
1601,
1606
],
[
1676,
1681
],
[
1838,
1843
]
],
[
[
570,
574
],
[
643,
647
]
],
[
[
619,
623
],
[
705,
709
],
[
844,
848
]
],
[
[
692,
702
],
[
669,
679
],
[
832,
842
]
],
[
[
807,
817
],
[
875,
885
]
],
[
[
852,
858
],
[
944,
950
]
],
[
[
924,
930
],
[
1006,
1012
]
],
[
[
975,
976
]
],
[
[
978,
982
],
[
1104,
1108
]
],
[
[
984,
985
]
],
[
[
1099,
1100
],
[
1176,
1177
],
[
1330,
1331
]
],
[
[
1299,
1300
],
[
1358,
1359
],
[
1366,
1367
]
],
[
[
1302,
1303
],
[
1361,
1362
],
[
1373,
1374
]
],
[
[
1305,
1306
],
[
1370,
1371
]
],
[
[
1308,
1309
],
[
1377,
1378
]
],
[
[
1400,
1404
],
[
1520,
1524
]
],
[
[
1922,
1925
],
[
2007,
2010
]
]
] |
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# $Revision: #4 $
import unittest
from unittest import mock
import cgf_service_client
class UnitTest_CloudGemFramework_ServiceClient_service_client(unittest.TestCase):
def test_service_client_imports(self):
self.assertIsNotNone(cgf_service_client.Data)
self.assertIsNotNone(cgf_service_client.Path)
self.assertIsNotNone(cgf_service_client.HttpError)
self.assertIsNotNone(cgf_service_client.ClientError)
self.assertIsNotNone(cgf_service_client.NotFoundError)
self.assertIsNotNone(cgf_service_client.NotAllowedError)
self.assertIsNotNone(cgf_service_client.ServerError)
@mock.patch('cgf_service_client.Path')
def test_for_url(self, mock_Path):
client = cgf_service_client.for_url('http://example.com', A = 10, B = 20)
self.assertIs(client, mock_Path.return_value)
mock_Path.assert_called_once_with('http://example.com', A = 10, B = 20)
if __name__ == '__main__':
unittest.main()
| [
[
[
549,
557
],
[
675,
683
],
[
1491,
1499
]
],
[
[
579,
583
],
[
1163,
1167
]
],
[
[
592,
610
],
[
768,
786
],
[
822,
840
],
[
876,
894
],
[
935,
953
],
[
996,
1014
],
[
1059,
1077
],
[
1124,
1142
],
[
1258,
1276
]
],
[
[
619,
674
]
]
] |
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "aspc.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| [
[
[
7,
9
],
[
10,
12
]
],
[
[
105,
125
],
[
140,
160
]
],
[
[
126,
137
]
]
] |
from datetime import datetime
import cv2
import re
import base64
from flask import Flask, render_template, request, jsonify
from flask_cors import CORS
import numpy as np
from io import BytesIO
from PIL import Image, ImageOps
import os,sys
import requests
from graphpipe import remote
from matplotlib import pylab as plt
app = Flask(__name__)
CORS(app) # To Post by Ajax
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
ans,t1,t2,t3 = get_answer(request)
return jsonify({'ans': ans, 't1': t1, 't2': t2, 't3': t3})
else:
return render_template('index.html')
def result(img):
img = img.reshape(1, 784)
img = img.astype(np.float32)
img = np.multiply(img, 1.0 / 255.0)
pred = remote.execute("http://localhost:9001", img)
r = np.argmax(pred, axis=1)
pp = pred*100
top1 = str(np.argsort(-pp)[0][0])+ " (" +str(int(np.sort(-pp)[0][0]*-1))+"%)"
top2 = str(np.argsort(-pp)[0][1])+ " (" +str(int(np.sort(-pp)[0][1]*-1))+"%)"
top3 = str(np.argsort(-pp)[0][2])+ " (" +str(int(np.sort(-pp)[0][2]*-1))+"%)"
# return int(r)
return r,top1,top2,top3
def get_answer(req):
img_str = re.search(r'base64,(.*)', req.form['img']).group(1)
nparr = np.fromstring(base64.b64decode(img_str), np.uint8)
img_src = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
img_negaposi = 255 - img_src
img_gray = cv2.cvtColor(img_negaposi, cv2.COLOR_BGR2GRAY)
img_resize = cv2.resize(img_gray,(28,28))
cv2.imwrite(f"images/{datetime.now().strftime('%s')}.jpg",img_resize)
ans,t1,t2,t3 = result(img_resize)
return int(ans),t1,t2,t3
if __name__ == "__main__":
app.run(debug=False, host='0.0.0.0', port=8001)
| [
[
[
21,
29
],
[
1517,
1525
]
],
[
[
37,
40
],
[
1312,
1315
],
[
1332,
1335
],
[
1398,
1401
],
[
1425,
1428
],
[
1462,
1465
],
[
1495,
1498
]
],
[
[
48,
50
],
[
1183,
1185
]
],
[
[
58,
64
],
[
1261,
1267
]
],
[
[
83,
88
],
[
330,
335
]
],
[
[
90,
105
],
[
597,
612
]
],
[
[
107,
114
],
[
436,
443
],
[
496,
503
]
],
[
[
116,
123
],
[
520,
527
]
],
[
[
147,
151
],
[
346,
350
]
],
[
[
159,
170
],
[
696,
698
],
[
718,
720
],
[
812,
814
],
[
869,
871
],
[
907,
909
],
[
951,
953
],
[
989,
991
],
[
1033,
1035
],
[
1071,
1073
],
[
1247,
1249
],
[
1288,
1290
]
],
[
[
187,
194
]
],
[
[
211,
216
]
],
[
[
218,
226
]
],
[
[
234,
236
]
],
[
[
237,
240
]
],
[
[
248,
256
]
],
[
[
279,
285
],
[
759,
765
]
],
[
[
309,
321
]
],
[
[
324,
327
],
[
351,
354
],
[
376,
379
],
[
1664,
1667
]
],
[
[
420,
425
]
],
[
[
632,
638
],
[
1584,
1590
]
],
[
[
1152,
1162
],
[
485,
495
]
]
] |
import json
import pytest
from buildtrigger.test.githubmock import get_github_trigger
from buildtrigger.triggerutil import (SkipRequestException, ValidationRequestException,
InvalidPayloadException)
from endpoints.building import PreparedBuild
from util.morecollections import AttrDict
@pytest.fixture
def github_trigger():
return get_github_trigger()
@pytest.mark.parametrize('payload, expected_error, expected_message', [
('{"zen": true}', SkipRequestException, ""),
('{}', InvalidPayloadException, "Missing 'repository' on request"),
('{"repository": "foo"}', InvalidPayloadException, "Missing 'owner' on repository"),
# Valid payload:
('''{
"repository": {
"owner": {
"name": "someguy"
},
"name": "somerepo",
"ssh_url": "someurl"
},
"ref": "refs/tags/foo",
"head_commit": {
"id": "11d6fbc",
"url": "http://some/url",
"message": "some message",
"timestamp": "NOW"
}
}''', None, None),
# Skip message:
('''{
"repository": {
"owner": {
"name": "someguy"
},
"name": "somerepo",
"ssh_url": "someurl"
},
"ref": "refs/tags/foo",
"head_commit": {
"id": "11d6fbc",
"url": "http://some/url",
"message": "[skip build]",
"timestamp": "NOW"
}
}''', SkipRequestException, ''),
])
def test_handle_trigger_request(github_trigger, payload, expected_error, expected_message):
def get_payload():
return json.loads(payload)
request = AttrDict(dict(get_json=get_payload))
if expected_error is not None:
with pytest.raises(expected_error) as ipe:
github_trigger.handle_trigger_request(request)
assert str(ipe.value) == expected_message
else:
assert isinstance(github_trigger.handle_trigger_request(request), PreparedBuild)
@pytest.mark.parametrize('dockerfile_path, contents', [
('/Dockerfile', 'hello world'),
('somesubdir/Dockerfile', 'hi universe'),
('unknownpath', None),
])
def test_load_dockerfile_contents(dockerfile_path, contents):
trigger = get_github_trigger(dockerfile_path)
assert trigger.load_dockerfile_contents() == contents
@pytest.mark.parametrize('username, expected_response', [
('unknownuser', None),
('knownuser', {'html_url': 'https://bitbucket.org/knownuser', 'avatar_url': 'avatarurl'}),
])
def test_lookup_user(username, expected_response, github_trigger):
assert github_trigger.lookup_user(username) == expected_response
def test_list_build_subdirs(github_trigger):
assert github_trigger.list_build_subdirs() == ['Dockerfile', 'somesubdir/Dockerfile']
def test_list_build_source_namespaces(github_trigger):
namespaces_expected = [
{
'personal': True,
'score': 1,
'avatar_url': 'avatarurl',
'id': 'knownuser',
'title': 'knownuser',
'url': 'https://bitbucket.org/knownuser',
},
{
'score': 0,
'title': 'someorg',
'personal': False,
'url': '',
'avatar_url': 'avatarurl',
'id': 'someorg'
}
]
found = github_trigger.list_build_source_namespaces()
found.sort()
namespaces_expected.sort()
assert found == namespaces_expected
| [
[
[
7,
11
],
[
1511,
1515
]
],
[
[
19,
25
],
[
327,
333
],
[
397,
403
],
[
1857,
1863
],
[
2187,
2193
],
[
1624,
1630
]
],
[
[
68,
86
],
[
373,
391
],
[
2092,
2110
]
],
[
[
125,
145
],
[
488,
508
],
[
1357,
1377
]
],
[
[
147,
173
]
],
[
[
213,
236
],
[
525,
548
],
[
614,
637
]
],
[
[
269,
282
],
[
1839,
1852
]
],
[
[
316,
324
],
[
1544,
1552
]
],
[
[
346,
360
]
],
[
[
1391,
1418
]
],
[
[
2022,
2051
]
],
[
[
2369,
2385
]
],
[
[
2505,
2528
]
],
[
[
2640,
2673
]
]
] |
from QInstrument.lib import QInstrumentInterface
from QInstrument.instruments.Opus.Opus import Opus
from PyQt5.QtCore import (pyqtSlot, QTimer)
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
class QOpusWidget(QInstrumentInterface):
def __init__(self, *args, interval=None, **kwargs):
super().__init__(*args,
uiFile='OpusWidget.ui',
deviceClass=Opus,
**kwargs)
self.interval = interval or 200
self.timer = QTimer()
self.connectSignals()
self.startPolling()
def connectSignals(self):
self.timer.timeout.connect(self.poll)
self.ui.PowerDial.valueChanged.connect(self.updatePower)
self.ui.Power.editingFinished.connect(self.updatePowerDial)
self.ui.PowerDial.valueChanged.connect(self.uncheck)
self.ui.SendPower.clicked.connect(self.check)
self.device.dataReady.connect(self.updateValues)
self.ui.Disable.clicked.connect(self.disable)
def startPolling(self):
if self.isEnabled():
self.timer.start(self.interval)
return self
def stopPolling(self):
self.timer.stop()
@pyqtSlot()
def poll(self):
self.device.send('POWER?')
self.device.send('CURRENT?')
self.device.send('STATUS?')
@pyqtSlot(int)
def updatePower(self, value):
self.ui.Power.setValue(value)
@pyqtSlot(str)
def updateValues(self, data):
if 'mW' in data:
numeric_filter = filter(str.isdigit, data)
p = float((int("".join(numeric_filter))/10))
if p == 0.0:
self.ui.EnableSwitch.setChecked(False)
if p != 0.0:
self.ui.EnableSwitch.setChecked(True)
self.ui.ActualPower.setValue(p)
if '%' in data:
numeric_filter = filter(str.isdigit, data)
p = float((int("".join(numeric_filter))/10))
self.ui.CurrentBox.setValue(p)
@pyqtSlot()
def check(self):
self.ui.sentCheck.setChecked(True)
a = self.ui.Power.value()
self.device.set_power(a)
@pyqtSlot()
def uncheck(self):
self.ui.sentCheck.setChecked(False)
@pyqtSlot()
def updatePowerDial(self):
value = self.ui.Power.value()
self.ui.PowerDial.setValue(int(value))
def disable(self):
self.device.send('OFF')
def main():
import sys
from PyQt5.QtWidgets import QApplication
app = QApplication(sys.argv)
widget = QOpusWidget()
widget.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| [
[
[
28,
48
],
[
270,
290
]
],
[
[
95,
99
],
[
468,
472
]
],
[
[
126,
134
],
[
1257,
1265
],
[
1403,
1411
],
[
1496,
1504
],
[
2071,
2079
],
[
2219,
2227
],
[
2303,
2311
]
],
[
[
136,
142
],
[
570,
576
]
],
[
[
151,
158
],
[
160,
167
],
[
191,
198
],
[
235,
242
]
],
[
[
182,
188
],
[
219,
225
]
],
[
[
258,
269
],
[
2608,
2619
]
],
[
[
2492,
2496
],
[
2699,
2703
]
]
] |
import FWCore.ParameterSet.Config as cms
from DQM.EcalPreshowerMonitorModule.ESRawDataTask_cfi import *
from DQM.EcalPreshowerMonitorModule.ESIntegrityTask_cfi import *
ecalPreshowerIntegrityTask.DoLumiAnalysis = True
from DQM.EcalPreshowerMonitorModule.ESFEDIntegrityTask_cfi import *
from DQM.EcalPreshowerMonitorModule.ESOccupancyTask_cfi import *
from DQM.EcalPreshowerMonitorModule.ESTrendTask_cfi import *
dqmInfoES = cms.EDAnalyzer("DQMEventInfo",
subSystemFolder = cms.untracked.string('EcalPreshower')
)
es_dqm_source_offline = cms.Sequence(ecalPreshowerRawDataTask*ecalPreshowerFEDIntegrityTask*ecalPreshowerIntegrityTask*ecalPreshowerOccupancyTask*ecalPreshowerTrendTask)
| [
[
[
7,
40
],
[
426,
429
],
[
479,
482
],
[
544,
547
]
],
[
[
103,
104
]
],
[
[
168,
169
],
[
170,
196
]
],
[
[
285,
286
]
],
[
[
350,
351
]
],
[
[
411,
412
],
[
557,
581
],
[
582,
611
],
[
612,
638
],
[
639,
665
],
[
666,
688
]
],
[
[
414,
423
]
],
[
[
520,
541
]
]
] |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/16_callback.progress.ipynb (unless otherwise specified).
__all__ = ['ProgressCallback', 'no_bar', 'ShowGraphCallback', 'CSVLogger']
# Cell
from ..basics import *
# Cell
@docs
class ProgressCallback(Callback):
"A `Callback` to handle the display of progress bars"
run_after=Recorder
def begin_fit(self):
assert hasattr(self.learn, 'recorder')
if self.create_mbar: self.mbar = master_bar(list(range(self.n_epoch)))
if self.learn.logger != noop:
self.old_logger,self.learn.logger = self.logger,self._write_stats
self._write_stats(self.recorder.metric_names)
else: self.old_logger = noop
def begin_epoch(self):
if getattr(self, 'mbar', False): self.mbar.update(self.epoch)
def begin_train(self): self._launch_pbar()
def begin_validate(self): self._launch_pbar()
def after_train(self): self.pbar.on_iter_end()
def after_validate(self): self.pbar.on_iter_end()
def after_batch(self):
self.pbar.update(self.iter+1)
if hasattr(self, 'smooth_loss'): self.pbar.comment = f'{self.smooth_loss:.4f}'
def _launch_pbar(self):
self.pbar = progress_bar(self.dl, parent=getattr(self, 'mbar', None), leave=False)
self.pbar.update(0)
def after_fit(self):
if getattr(self, 'mbar', False):
self.mbar.on_iter_end()
delattr(self, 'mbar')
self.learn.logger = self.old_logger
def _write_stats(self, log):
if getattr(self, 'mbar', False): self.mbar.write([f'{l:.6f}' if isinstance(l, float) else str(l) for l in log], table=True)
_docs = dict(begin_fit="Setup the master bar over the epochs",
begin_epoch="Update the master bar",
begin_train="Launch a progress bar over the training dataloader",
begin_validate="Launch a progress bar over the validation dataloader",
after_train="Close the progress bar over the training dataloader",
after_validate="Close the progress bar over the validation dataloader",
after_batch="Update the current progress bar",
after_fit="Close the master bar")
defaults.callbacks = [TrainEvalCallback, Recorder, ProgressCallback]
# Cell
@patch
@contextmanager
def no_bar(self:Learner):
"Context manager that deactivates the use of progress bars"
has_progress = hasattr(self, 'progress')
if has_progress: self.remove_cb(self.progress)
yield self
if has_progress: self.add_cb(ProgressCallback())
# Cell
class ShowGraphCallback(Callback):
"Update a graph of training and validation loss"
run_after=ProgressCallback
def begin_fit(self):
self.run = not hasattr(self.learn, 'lr_finder') and not hasattr(self, "gather_preds")
self.nb_batches = []
assert hasattr(self.learn, 'progress')
def after_train(self): self.nb_batches.append(self.train_iter)
def after_epoch(self):
"Plot validation loss in the pbar graph"
rec = self.learn.recorder
iters = range_of(rec.losses)
val_losses = [v[1] for v in rec.values]
x_bounds = (0, (self.n_epoch - len(self.nb_batches)) * self.nb_batches[0] + len(rec.losses))
y_bounds = (0, max((max(Tensor(rec.losses)), max(Tensor(val_losses)))))
self.progress.mbar.update_graph([(iters, rec.losses), (self.nb_batches, val_losses)], x_bounds, y_bounds)
# Cell
class CSVLogger(Callback):
run_after=Recorder
"Log the results displayed in `learn.path/fname`"
def __init__(self, fname='history.csv', append=False):
self.fname,self.append = Path(fname),append
def read_log(self):
"Convenience method to quickly access the log."
return pd.read_csv(self.path/self.fname)
def begin_fit(self):
"Prepare file with metric names."
self.path.parent.mkdir(parents=True, exist_ok=True)
self.file = (self.path/self.fname).open('a' if self.append else 'w')
self.file.write(','.join(self.recorder.metric_names) + '\n')
self.old_logger,self.learn.logger = self.logger,self._write_line
def _write_line(self, log):
"Write a line with `log` and call the old logger."
self.file.write(','.join([str(t) for t in log]) + '\n')
self.old_logger(log)
def after_fit(self):
"Close the file and clean up."
self.file.close()
self.learn.logger = self.old_logger | [
[
[
106,
113
]
],
[
[
210,
211
],
[
249,
257
],
[
221,
225
],
[
332,
340
],
[
2262,
2279
],
[
2281,
2289
],
[
2240,
2248
],
[
2318,
2323
],
[
2325,
2339
],
[
2626,
2634
],
[
3500,
3508
],
[
3525,
3533
],
[
455,
465
],
[
525,
529
],
[
699,
703
],
[
1212,
1224
],
[
2356,
2363
],
[
3112,
3120
],
[
3314,
3320
],
[
3339,
3345
],
[
3680,
3684
],
[
3795,
3797
]
],
[
[
232,
248
],
[
2291,
2307
],
[
2704,
2720
],
[
2574,
2590
]
],
[
[
2344,
2350
]
],
[
[
2608,
2625
]
],
[
[
3490,
3499
]
]
] |
import simplejson
import binascii
import sys
import pdb
from pprint import pprint
import sys
import os
sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib')))
import gentariumlib
# ============================================================================
usage = "%s <hex>" % sys.argv[0]
obj = None
if len(sys.argv) < 2:
print(usage)
sys.exit(1)
else:
obj = gentariumlib.deserialise(sys.argv[1])
pdb.set_trace()
1
| [
[
[
7,
17
]
],
[
[
25,
33
]
],
[
[
41,
44
]
],
[
[
52,
55
],
[
442,
445
]
],
[
[
75,
81
]
],
[
[
89,
92
],
[
103,
106
],
[
308,
311
],
[
339,
342
],
[
375,
378
],
[
428,
431
]
],
[
[
100,
102
],
[
119,
121
],
[
136,
138
],
[
149,
151
]
],
[
[
195,
207
],
[
403,
415
]
],
[
[
287,
292
],
[
364,
369
]
],
[
[
321,
324
]
],
[
[
397,
400
]
]
] |
from bouncingball import BouncingBall, BouncingBox
balls = []
boxes = []
def setup():
size(600, 600)
for _ in range(60):
if random(10) < 5:
balls.append(BouncingBall())
else:
boxes.append(BouncingBox())
def draw():
background("#2b3e50")
for ball in balls:
ball.move()
ball.display()
for box in boxes:
box.move()
box.display()
| [
[
[
25,
37
],
[
183,
195
]
],
[
[
39,
50
],
[
238,
249
]
],
[
[
52,
57
],
[
170,
175
],
[
308,
313
]
],
[
[
63,
68
],
[
225,
230
],
[
373,
378
]
],
[
[
79,
84
]
],
[
[
258,
262
]
]
] |
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2020 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
import pyglet
def debug_print(enabled_or_option='debug'):
"""Get a debug printer that is enabled based on a boolean input or a pyglet option.
The debug print function returned should be used in an assert. This way it can be
optimized out when running python with the -O flag.
Usage example::
from pyglet.debug import debug_print
_debug_media = debug_print('debug_media')
def some_func():
assert _debug_media('My debug statement')
:parameters:
`enabled_or_options` : bool or str
If a bool is passed, debug printing is enabled if it is True. If str is passed
debug printing is enabled if the pyglet option with that name is True.
:returns: Function for debug printing.
"""
if isinstance(enabled_or_option, bool):
enabled = enabled_or_option
else:
enabled = pyglet.options.get(enabled_or_option, False)
if enabled:
def _debug_print(*args, **kwargs):
print(*args, **kwargs)
return True
else:
def _debug_print(*args, **kwargs):
return True
return _debug_print
| [
[
[
1769,
1775
],
[
2644,
2650
]
],
[
[
1782,
1793
]
]
] |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import InvalidNonce
class cobinhood (Exchange):
def describe(self):
return self.deep_extend(super(cobinhood, self).describe(), {
'id': 'cobinhood',
'name': 'COBINHOOD',
'countries': ['TW'],
'rateLimit': 1000 / 10,
'version': 'v1',
'has': {
'fetchCurrencies': True,
'fetchTickers': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOrderTrades': True,
'fetchOrder': True,
'fetchDepositAddress': True,
'createDepositAddress': True,
'fetchDeposits': True,
'fetchWithdrawals': True,
'withdraw': True,
'fetchMyTrades': True,
'editOrder': True,
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
},
'timeframes': {
# the first two don't seem to work at all
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'3h': '3h',
'6h': '6h',
'12h': '12h',
'1d': '1D',
'1w': '7D',
'2w': '14D',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/35755576-dee02e5c-0878-11e8-989f-1595d80ba47f.jpg',
'api': 'https://api.cobinhood.com',
'www': 'https://cobinhood.com',
'doc': 'https://cobinhood.github.io/api-public',
},
'api': {
'system': {
'get': [
'info',
'time',
'messages',
'messages/{message_id}',
],
},
'admin': {
'get': [
'system/messages',
'system/messages/{message_id}',
],
'post': [
'system/messages',
],
'patch': [
'system/messages/{message_id}',
],
'delete': [
'system/messages/{message_id}',
],
},
'public': {
'get': [
'market/fundingbook/precisions/{currency_id}',
'market/fundingbooks/{currency_id}',
'market/tickers',
'market/currencies',
'market/quote_currencies',
'market/trading_pairs',
'market/orderbook/precisions/{trading_pair_id}',
'market/orderbooks/{trading_pair_id}',
'market/stats',
'market/tickers', # fetchTickers
'market/tickers/{trading_pair_id}',
'market/trades/{trading_pair_id}',
'market/trades_history/{trading_pair_id}',
'market/trading_pairs',
'chart/candles/{trading_pair_id}',
'system/time',
],
},
'private': {
'get': [
'funding/auto_offerings',
'funding/auto_offerings/{currency_id}',
'funding/funding_history',
'funding/fundings',
'funding/loans',
'funding/loans/{loan_id}',
'trading/orders/{order_id}',
'trading/orders/{order_id}/trades',
'trading/orders',
'trading/order_history',
'trading/positions',
'trading/positions/{trading_pair_id}',
'trading/positions/{trading_pair_id}/claimable_size',
'trading/trades',
'trading/trades/{trade_id}',
'trading/volume',
'wallet/balances',
'wallet/ledger',
'wallet/limits/withdrawal',
'wallet/generic_deposits',
'wallet/generic_deposits/{generic_deposit_id}',
'wallet/generic_withdrawals',
'wallet/generic_withdrawals/{generic_withdrawal_id}',
# older endpoints
'wallet/deposit_addresses',
'wallet/deposit_addresses/iota',
'wallet/withdrawal_addresses',
'wallet/withdrawal_frozen',
'wallet/withdrawals/{withdrawal_id}',
'wallet/withdrawals',
'wallet/deposits/{deposit_id}',
'wallet/deposits',
],
'patch': [
'trading/positions/{trading_pair_id}',
],
'post': [
'funding/auto_offerings',
'funding/fundings',
'trading/check_order',
'trading/orders',
# older endpoints
'wallet/deposit_addresses',
'wallet/transfer',
'wallet/withdrawal_addresses',
'wallet/withdrawals',
'wallet/withdrawals/fee',
],
'put': [
'funding/fundings/{funding_id}',
'trading/orders/{order_id}',
],
'delete': [
'funding/auto_offerings/{currency_id}',
'funding/fundings/{funding_id}',
'funding/loans/{loan_id}',
'trading/orders/{order_id}',
'trading/positions/{trading_pair_id}',
'wallet/generic_withdrawals/{generic_withdrawal_id}',
'wallet/withdrawal_addresses/{wallet_id}',
],
},
},
'fees': {
'trading': {
'maker': 0.0,
'taker': 0.0,
},
},
'precision': {
'amount': 8,
'price': 8,
},
'exceptions': {
'insufficient_balance': InsufficientFunds,
'invalid_order_size': InvalidOrder,
'invalid_nonce': InvalidNonce,
'unauthorized_scope': PermissionDenied,
'invalid_address': InvalidAddress,
},
'commonCurrencies': {
'SMT': 'SocialMedia.Market',
'MTN': 'Motion Token',
},
})
def fetch_currencies(self, params={}):
response = self.publicGetMarketCurrencies(params)
currencies = response['result']['currencies']
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
id = currency['currency']
code = self.common_currency_code(id)
minUnit = self.safe_float(currency, 'min_unit')
result[code] = {
'id': id,
'code': code,
'name': currency['name'],
'active': True,
'fiat': False,
'precision': self.precision_from_string(currency['min_unit']),
'limits': {
'amount': {
'min': minUnit,
'max': None,
},
'price': {
'min': minUnit,
'max': None,
},
'deposit': {
'min': minUnit,
'max': None,
},
'withdraw': {
'min': minUnit,
'max': None,
},
},
'funding': {
'withdraw': {
'fee': self.safe_float(currency, 'withdrawal_fee'),
},
'deposit': {
'fee': self.safe_float(currency, 'deposit_fee'),
},
},
'info': currency,
}
return result
def fetch_markets(self, params={}):
response = self.publicGetMarketTradingPairs()
markets = response['result']['trading_pairs']
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['id']
baseId, quoteId = id.split('-')
base = self.common_currency_code(baseId)
quote = self.common_currency_code(quoteId)
symbol = base + '/' + quote
precision = {
'amount': 8,
'price': self.precision_from_string(market['quote_increment']),
}
active = self.safe_value(market, 'is_active', True)
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'base_min_size'),
'max': self.safe_float(market, 'base_max_size'),
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
symbol = None
if market is None:
marketId = self.safe_string(ticker, 'trading_pair_id')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
else:
baseId, quoteId = marketId.split('-')
base = self.common_currency_code(baseId)
quote = self.common_currency_code(quoteId)
symbol = base + '/' + quote
if market is not None:
symbol = market['symbol']
timestamp = self.safe_integer(ticker, 'timestamp')
last = self.safe_float(ticker, 'last_trade_price')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, '24h_high'),
'low': self.safe_float(ticker, '24h_low'),
'bid': self.safe_float(ticker, 'highest_bid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'lowest_ask'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': self.safe_float(ticker, 'percentChanged24hr'),
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, '24h_volume'),
'quoteVolume': self.safe_float(ticker, 'quote_volume'),
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetMarketTickersTradingPairId(self.extend({
'trading_pair_id': market['id'],
}, params))
ticker = response['result']['ticker']
return self.parse_ticker(ticker, market)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
response = self.publicGetMarketTickers(params)
tickers = response['result']['tickers']
result = []
for i in range(0, len(tickers)):
result.append(self.parse_ticker(tickers[i]))
return self.index_by(result, 'symbol')
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'trading_pair_id': self.market_id(symbol),
}
if limit is not None:
request['limit'] = limit # 100
response = self.publicGetMarketOrderbooksTradingPairId(self.extend(request, params))
return self.parse_order_book(response['result']['orderbook'], None, 'bids', 'asks', 0, 2)
def parse_trade(self, trade, market=None):
symbol = None
if market:
symbol = market['symbol']
timestamp = trade['timestamp']
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'size')
cost = price * amount
# you can't determine your side from maker/taker side and vice versa
# you can't determine if your order/trade was a maker or a taker based
# on just the side of your order/trade
# https://github.com/ccxt/ccxt/issues/4300
# side = 'sell' if (trade['maker_side'] == 'bid') else 'buy'
side = None
return {
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': trade['id'],
'order': None,
'type': None,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
def fetch_trades(self, symbol, since=None, limit=50, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetMarketTradesTradingPairId(self.extend({
'trading_pair_id': market['id'],
'limit': limit, # default 20, but that seems too little
}, params))
trades = response['result']['trades']
return self.parse_trades(trades, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='5m', since=None, limit=None):
return [
# they say that timestamps are Unix Timestamps in seconds, but in fact those are milliseconds
ohlcv['timestamp'],
float(ohlcv['open']),
float(ohlcv['high']),
float(ohlcv['low']),
float(ohlcv['close']),
float(ohlcv['volume']),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
#
# they say in their docs that end_time defaults to current server time
# but if you don't specify it, their range limits does not allow you to query anything
#
# they also say that start_time defaults to 0,
# but most calls fail if you do not specify any of end_time
#
# to make things worse, their docs say it should be a Unix Timestamp
# but with seconds it fails, so we set milliseconds(somehow it works that way)
#
endTime = self.milliseconds()
request = {
'trading_pair_id': market['id'],
'timeframe': self.timeframes[timeframe],
'end_time': endTime,
}
if since is not None:
request['start_time'] = since
response = self.publicGetChartCandlesTradingPairId(self.extend(request, params))
ohlcv = response['result']['candles']
return self.parse_ohlcvs(ohlcv, market, timeframe, since, limit)
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetWalletBalances(params)
result = {'info': response}
balances = response['result']['balances']
for i in range(0, len(balances)):
balance = balances[i]
currency = balance['currency']
if currency in self.currencies_by_id:
currency = self.currencies_by_id[currency]['code']
account = {
'used': float(balance['on_order']),
'total': float(balance['total']),
}
account['free'] = float(account['total'] - account['used'])
result[currency] = account
return self.parse_balance(result)
def parse_order_status(self, status):
statuses = {
'filled': 'closed',
'rejected': 'closed',
'partially_filled': 'open',
'pending_cancellation': 'open',
'pending_modification': 'open',
'open': 'open',
'new': 'open',
'queued': 'open',
'cancelled': 'canceled',
'triggered': 'triggered',
}
if status in statuses:
return statuses[status]
return status
def parse_order(self, order, market=None):
#
# {
# 'completed_at': None,
# 'eq_price': '0',
# 'filled': '0',
# 'id': '88426800-beae-4407-b4a1-f65cef693542',
# 'price': '0.00000507',
# 'side': 'bid',
# 'size': '3503.6489',
# 'source': 'exchange',
# 'state': 'open',
# 'timestamp': 1535258403597,
# 'trading_pair_id': 'ACT-BTC',
# 'type': 'limit',
# }
#
symbol = None
if market is None:
marketId = self.safe_string_2(order, 'trading_pair', 'trading_pair_id')
market = self.safe_value(self.markets_by_id, marketId)
if market is not None:
symbol = market['symbol']
timestamp = self.safe_integer(order, 'timestamp')
price = self.safe_float(order, 'price')
average = self.safe_float(order, 'eq_price')
amount = self.safe_float(order, 'size')
filled = self.safe_float(order, 'filled')
remaining = None
cost = None
if filled is not None and average is not None:
cost = average * filled
elif average is not None:
cost = average * amount
if amount is not None:
if filled is not None:
remaining = amount - filled
status = self.parse_order_status(self.safe_string(order, 'state'))
side = self.safe_string(order, 'side')
if side == 'bid':
side = 'buy'
elif side == 'ask':
side = 'sell'
return {
'id': self.safe_string(order, 'id'),
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': self.safe_string(order, 'type'), # market, limit, stop, stop_limit, trailing_stop, fill_or_kill
'side': side,
'price': price,
'cost': cost,
'average': average,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': None,
'info': order,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
side = 'ask' if (side == 'sell') else 'bid'
request = {
'trading_pair_id': market['id'],
'type': type, # market, limit, stop, stop_limit
'side': side,
'size': self.amount_to_precision(symbol, amount),
}
if type != 'market':
request['price'] = self.price_to_precision(symbol, price)
response = self.privatePostTradingOrders(self.extend(request, params))
order = self.parse_order(response['result']['order'], market)
id = order['id']
self.orders[id] = order
return order
def edit_order(self, id, symbol, type, side, amount, price, params={}):
self.load_markets()
response = self.privatePutTradingOrdersOrderId(self.extend({
'order_id': id,
'price': self.price_to_precision(symbol, price),
'size': self.amount_to_precision(symbol, amount),
}, params))
return self.parse_order(self.extend(response, {
'id': id,
}))
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
response = self.privateDeleteTradingOrdersOrderId(self.extend({
'order_id': id,
}, params))
return self.parse_order(self.extend(response, {
'id': id,
}))
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
response = self.privateGetTradingOrdersOrderId(self.extend({
'order_id': str(id),
}, params))
return self.parse_order(response['result']['order'])
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
result = self.privateGetTradingOrders(params)
orders = self.parse_orders(result['result']['orders'], None, since, limit)
if symbol is not None:
return self.filter_by_symbol_since_limit(orders, symbol, since, limit)
return self.filter_by_since_limit(orders, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
result = self.privateGetTradingOrderHistory(params)
orders = self.parse_orders(result['result']['orders'], None, since, limit)
if symbol is not None:
return self.filter_by_symbol_since_limit(orders, symbol, since, limit)
return self.filter_by_since_limit(orders, since, limit)
def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
self.load_markets()
response = self.privateGetTradingOrdersOrderIdTrades(self.extend({
'order_id': id,
}, params))
market = None if (symbol is None) else self.market(symbol)
return self.parse_trades(response['result']['trades'], market)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {}
if symbol is not None:
request['trading_pair_id'] = market['id']
response = self.privateGetTradingTrades(self.extend(request, params))
return self.parse_trades(response['result']['trades'], market, since, limit)
def create_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
# 'ledger_type' is required, see: https://cobinhood.github.io/api-public/#create-new-deposit-address
ledgerType = self.safe_string(params, 'ledger_type', 'exchange')
request = {
'currency': currency['id'],
'ledger_type': ledgerType,
}
response = self.privatePostWalletDepositAddresses(self.extend(request, params))
address = self.safe_string(response['result']['deposit_address'], 'address')
tag = self.safe_string(response['result']['deposit_address'], 'memo')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': response,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
response = self.privateGetWalletDepositAddresses(self.extend({
'currency': currency['id'],
}, params))
#
# {success: True,
# result: {deposit_addresses: [{ address: "abcdefg",
# blockchain_id: "eosio",
# created_at: 1536768050235,
# currency: "EOS",
# memo: "12345678",
# type: "exchange" }]} }
#
addresses = self.safe_value(response['result'], 'deposit_addresses', [])
address = None
tag = None
if len(addresses) > 0:
address = self.safe_string(addresses[0], 'address')
tag = self.safe_string_2(addresses[0], 'memo', 'tag')
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
'address': address,
}
if tag is not None:
request['memo'] = tag
response = self.privatePostWalletWithdrawals(self.extend(request, params))
return {
'id': None,
'info': response,
}
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
self.load_markets()
if code is None:
raise ExchangeError(self.id + ' fetchDeposits() requires a currency code arguemnt')
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = self.privateGetWalletDeposits(self.extend(request, params))
return self.parseTransactions(response['result']['deposits'], currency)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
self.load_markets()
if code is None:
raise ExchangeError(self.id + ' fetchWithdrawals() requires a currency code arguemnt')
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = self.privateGetWalletWithdrawals(self.extend(request, params))
return self.parseTransactions(response['result']['withdrawals'], currency)
def parse_transaction_status(self, status):
statuses = {
'tx_pending_two_factor_auth': 'pending',
'tx_pending_email_auth': 'pending',
'tx_pending_approval': 'pending',
'tx_approved': 'pending',
'tx_processing': 'pending',
'tx_pending': 'pending',
'tx_sent': 'pending',
'tx_cancelled': 'canceled',
'tx_timeout': 'failed',
'tx_invalid': 'failed',
'tx_rejected': 'failed',
'tx_confirmed': 'ok',
}
return statuses[status] if (status in list(statuses.keys())) else status
def parse_transaction(self, transaction, currency=None):
timestamp = self.safe_integer(transaction, 'created_at')
code = None
if currency is None:
currencyId = self.safe_string(transaction, 'currency')
if currencyId in self.currencies_by_id:
currency = self.currencies_by_id[currencyId]
else:
code = self.common_currency_code(currencyId)
if currency is not None:
code = currency['code']
id = None
withdrawalId = self.safe_string(transaction, 'withdrawal_id')
depositId = self.safe_string(transaction, 'deposit_id')
type = None
address = None
if withdrawalId is not None:
type = 'withdrawal'
id = withdrawalId
address = self.safe_string(transaction, 'to_address')
elif depositId is not None:
type = 'deposit'
id = depositId
address = self.safe_string(transaction, 'from_address')
additionalInfo = self.safe_value(transaction, 'additional_info', {})
tag = self.safe_string(additionalInfo, 'memo')
return {
'info': transaction,
'id': id,
'txid': self.safe_string(transaction, 'txhash'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': tag, # refix it properly
'type': type,
'amount': self.safe_float(transaction, 'amount'),
'currency': code,
'status': self.parse_transaction_status(transaction['status']),
'updated': None,
'fee': {
'cost': self.safe_float(transaction, 'fee'),
'rate': None,
},
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.version + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
headers = {}
if api == 'private':
self.check_required_credentials()
# headers['device_id'] = self.apiKey
headers['nonce'] = str(self.nonce())
headers['Authorization'] = self.apiKey
if method == 'GET':
query = self.urlencode(query)
if len(query):
url += '?' + query
else:
headers['Content-type'] = 'application/json charset=UTF-8'
body = self.json(query)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response):
if code < 400 or code >= 600:
return
if body[0] != '{':
raise ExchangeError(self.id + ' ' + body)
response = json.loads(body)
feedback = self.id + ' ' + self.json(response)
errorCode = self.safe_value(response['error'], 'error_code')
if method == 'DELETE' or method == 'GET':
if errorCode == 'parameter_error':
if url.find('trading/orders/') >= 0:
# Cobinhood returns vague "parameter_error" on fetchOrder() and cancelOrder() calls
# for invalid order IDs as well as orders that are not "open"
raise InvalidOrder(feedback)
exceptions = self.exceptions
if errorCode in exceptions:
raise exceptions[errorCode](feedback)
raise ExchangeError(feedback)
def nonce(self):
return self.milliseconds()
| [
[
[
212,
220
],
[
516,
524
]
],
[
[
228,
232
],
[
30779,
30783
]
],
[
[
262,
275
],
[
26413,
26426
],
[
26910,
26923
],
[
30724,
30737
],
[
31442,
31455
]
],
[
[
305,
321
],
[
7598,
7614
]
],
[
[
351,
368
],
[
7442,
7459
]
],
[
[
398,
412
],
[
7651,
7665
]
],
[
[
442,
454
],
[
7499,
7511
],
[
31282,
31294
]
],
[
[
484,
496
],
[
7546,
7558
]
],
[
[
505,
514
],
[
590,
599
]
]
] |
import abc
from backend.model.SentenceTokenise import SentenceTokenise
from backend.service.ExtractSentences import extract_sentences
from backend.service.ReadCorpus import read_corpus
class Corpus:
def __init__(self):
self.receive_text = ""
self.input_file = "t1_biology_0_0.txt"
self.base_train_folder = "../data/source_txt/train/"
pass
sentences = SentenceTokenise()
@abc.abstractmethod
def getInputText(self):
# Corpusul curat
Corpus.receivedText = read_corpus(self.base_train_folder, self.input_file)
return Corpus.receivedText
def getSentences(self, text):
# Lista de propozitii
self.sentences.listOfSentence = extract_sentences(text)
return self.sentences.listOfSentence
def setInputText(self, text):
pass
| [
[
[
7,
10
],
[
420,
423
]
],
[
[
55,
71
],
[
395,
411
]
],
[
[
117,
134
],
[
715,
732
]
],
[
[
174,
185
],
[
522,
533
]
],
[
[
194,
200
],
[
500,
506
],
[
590,
596
]
]
] |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "psyplot-ci-release-test-"
cfg.versionfile_source = "release_test/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| [
[
[
524,
529
],
[
2786,
2791
]
],
[
[
537,
539
],
[
3823,
3825
],
[
4136,
4138
],
[
17472,
17474
],
[
17768,
17770
]
],
[
[
547,
549
],
[
4947,
4949
],
[
5131,
5133
],
[
5311,
5313
],
[
7341,
7343
],
[
10358,
10360
]
],
[
[
557,
567
],
[
2446,
2456
],
[
2533,
2543
],
[
2591,
2601
]
],
[
[
575,
578
],
[
2742,
2745
],
[
3127,
3130
],
[
8712,
8715
]
],
[
[
585,
597
],
[
17325,
17337
]
],
[
[
1104,
1120
],
[
1369,
1385
]
],
[
[
1189,
1199
],
[
17234,
17244
]
],
[
[
1615,
1628
],
[
4319,
4332
],
[
5690,
5703
],
[
6422,
6435
],
[
9003,
9016
],
[
9536,
9549
],
[
9724,
9737
],
[
17419,
17432
],
[
18131,
18144
],
[
18293,
18306
]
],
[
[
1721,
1736
]
],
[
[
1742,
1750
],
[
1975,
1983
],
[
1997,
2005
],
[
2024,
2032
]
],
[
[
1762,
1782
],
[
4383,
4403
],
[
5499,
5519
],
[
8268,
8288
]
],
[
[
2093,
2104
],
[
8378,
8389
]
],
[
[
3404,
3427
],
[
18221,
18244
]
],
[
[
4431,
4447
]
],
[
[
5543,
5569
],
[
17298,
17324
]
],
[
[
8319,
8338
],
[
18028,
18047
]
],
[
[
11813,
11824
],
[
12465,
12476
],
[
13873,
13884
]
],
[
[
11986,
11999
],
[
16206,
16219
]
],
[
[
12868,
12885
],
[
16279,
16296
]
],
[
[
13274,
13292
],
[
16357,
16375
]
],
[
[
14156,
14173
],
[
16435,
16452
]
],
[
[
14726,
14745
],
[
16514,
16533
]
],
[
[
15240,
15264
],
[
16600,
16624
]
],
[
[
15768,
15774
],
[
18094,
18100
]
],
[
[
16866,
16878
]
]
] |
sys.stdout = open("6-num.txt", "w")
data = "1234567890"
for a in data:
for b in data:
for c in data:
for d in data:
for e in data:
for f in data:
print(a+b+c+d+e+f)
sys.stdout.close()
| [
[
[
36,
40
],
[
65,
69
],
[
81,
85
],
[
98,
102
],
[
116,
120
],
[
135,
139
],
[
155,
159
]
],
[
[
60,
61
],
[
174,
175
]
],
[
[
76,
77
],
[
176,
177
]
],
[
[
93,
94
],
[
178,
179
]
],
[
[
111,
112
],
[
180,
181
]
],
[
[
130,
131
],
[
182,
183
]
],
[
[
150,
151
],
[
184,
185
]
]
] |
import os
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Set hosting environment, if not set, default to production for security
HOSTING_ENV = os.getenv("HOSTING_ENV", "production")
if HOSTING_ENV == "dev":
from .dev import *
else:
from .production import *
| [
[
[
7,
9
],
[
175,
177
]
],
[
[
30,
41
],
[
72,
83
]
],
[
[
161,
172
],
[
218,
229
]
],
[
[
261,
262
]
],
[
[
297,
298
]
]
] |
import abc
import enum
import itertools
import logging
import uuid
from copy import deepcopy
from typing import Any, Dict, List, MutableMapping, Optional, Union
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.compat import StringIO
import great_expectations.exceptions as ge_exceptions
from great_expectations.core.util import convert_to_json_serializable, nested_update
from great_expectations.marshmallow__shade import (
INCLUDE,
Schema,
ValidationError,
fields,
post_dump,
post_load,
pre_load,
validates_schema,
)
from great_expectations.marshmallow__shade.validate import OneOf
from great_expectations.types import DictDot, SerializableDictDot
from great_expectations.types.configurations import ClassConfigSchema
yaml = YAML()
yaml.indent(mapping=2, sequence=4, offset=2)
logger = logging.getLogger(__name__)
CURRENT_GE_CONFIG_VERSION = 3
FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE = 3
CURRENT_CHECKPOINT_CONFIG_VERSION = 1
MINIMUM_SUPPORTED_CONFIG_VERSION = 2
DEFAULT_USAGE_STATISTICS_URL = (
"https://stats.greatexpectations.io/great_expectations/v1/usage_statistics"
)
def object_to_yaml_str(obj):
output_str: str
with StringIO() as string_stream:
yaml.dump(obj, string_stream)
output_str = string_stream.getvalue()
return output_str
class BaseYamlConfig(SerializableDictDot):
_config_schema_class = None
def __init__(self, commented_map: CommentedMap = None):
if commented_map is None:
commented_map = CommentedMap()
self._commented_map = commented_map
@classmethod
def _get_schema_instance(cls) -> Schema:
if not issubclass(cls.get_schema_class(), Schema):
raise ge_exceptions.InvalidConfigError(
"Invalid type: A configuration schema class needs to inherit from the Marshmallow Schema class."
)
if not issubclass(cls.get_config_class(), BaseYamlConfig):
raise ge_exceptions.InvalidConfigError(
"Invalid type: A configuration class needs to inherit from the BaseYamlConfig class."
)
if hasattr(cls.get_config_class(), "_schema_instance"):
# noinspection PyProtectedMember
schema_instance: Schema = cls.get_config_class()._schema_instance
if schema_instance is None:
cls.get_config_class()._schema_instance = (cls.get_schema_class())()
else:
return schema_instance
else:
cls.get_config_class().schema_instance = (cls.get_schema_class())()
return cls.get_config_class().schema_instance
@classmethod
def from_commented_map(cls, commented_map: CommentedMap):
try:
config: Union[dict, BaseYamlConfig]
config = cls._get_schema_instance().load(commented_map)
if isinstance(config, dict):
return cls.get_config_class()(commented_map=commented_map, **config)
return config
except ValidationError:
logger.error(
"Encountered errors during loading config. See ValidationError for more details."
)
raise
def _get_schema_validated_updated_commented_map(self) -> CommentedMap:
commented_map: CommentedMap = deepcopy(self._commented_map)
commented_map.update(self._get_schema_instance().dump(self))
return commented_map
def to_yaml(self, outfile):
"""
:returns None (but writes a YAML file containing the project configuration)
"""
yaml.dump(self.commented_map, outfile)
def to_yaml_str(self) -> str:
"""
:returns a YAML string containing the project configuration
"""
return object_to_yaml_str(self.commented_map)
def to_json_dict(self) -> dict:
"""
:returns a JSON-serialiable dict containing the project configuration
"""
commented_map: CommentedMap = self.commented_map
return convert_to_json_serializable(data=commented_map)
@property
def commented_map(self) -> CommentedMap:
return self._get_schema_validated_updated_commented_map()
@classmethod
def get_config_class(cls):
raise NotImplementedError
@classmethod
def get_schema_class(cls):
raise NotImplementedError
class AssetConfig(DictDot):
def __init__(
self,
name=None,
class_name=None,
module_name=None,
bucket=None,
prefix=None,
delimiter=None,
max_keys=None,
batch_spec_passthrough=None,
**kwargs,
):
if name is not None:
self.name = name
self._class_name = class_name
self._module_name = module_name
if bucket is not None:
self.bucket = bucket
if prefix is not None:
self.prefix = prefix
if delimiter is not None:
self.delimiter = delimiter
if max_keys is not None:
self.max_keys = max_keys
if batch_spec_passthrough is not None:
self.batch_spec_passthrough = batch_spec_passthrough
for k, v in kwargs.items():
setattr(self, k, v)
@property
def class_name(self):
return self._class_name
@property
def module_name(self):
return self._module_name
class AssetConfigSchema(Schema):
class Meta:
unknown = INCLUDE
name = fields.String(required=False, allow_none=True)
class_name = fields.String(required=False, allow_none=True, missing="Asset")
module_name = fields.String(
required=False,
all_none=True,
missing="great_expectations.datasource.data_connector.asset",
)
base_directory = fields.String(required=False, allow_none=True)
glob_directive = fields.String(required=False, allow_none=True)
pattern = fields.String(required=False, allow_none=True)
group_names = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
bucket = fields.String(required=False, allow_none=True)
prefix = fields.String(required=False, allow_none=True)
delimiter = fields.String(required=False, allow_none=True)
max_keys = fields.Integer(required=False, allow_none=True)
batch_spec_passthrough = fields.Dict(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
pass
# noinspection PyUnusedLocal
@post_load
def make_asset_config(self, data, **kwargs):
return AssetConfig(**data)
class SorterConfig(DictDot):
def __init__(
self,
name,
class_name=None,
module_name=None,
orderby="asc",
reference_list=None,
datetime_format=None,
**kwargs,
):
self._name = name
self._class_name = class_name
self._module_name = module_name
self._orderby = orderby
for k, v in kwargs.items():
setattr(self, k, v)
if reference_list is not None:
self._reference_list = reference_list
if datetime_format is not None:
self._datetime_format = datetime_format
@property
def name(self):
return self._name
@property
def module_name(self):
return self._module_name
@property
def class_name(self):
return self._class_name
@property
def orderby(self):
return self._orderby
@property
def reference_list(self):
return self._reference_list
@property
def datetime_format(self):
return self._datetime_format
class SorterConfigSchema(Schema):
class Meta:
unknown = INCLUDE
name = fields.String(required=True)
class_name = fields.String(required=True)
module_name = fields.String(
missing="great_expectations.datasource.data_connector.sorter"
)
orderby = fields.String(required=False, missing="asc", allow_none=False)
# allow_none = True because it is only used by some Sorters
reference_list = fields.List(
cls_or_instance=fields.Str(), required=False, missing=None, allow_none=True
)
datetime_format = fields.String(required=False, missing=None, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
pass
# noinspection PyUnusedLocal
@post_load
def make_sorter_config(self, data, **kwargs):
return SorterConfig(**data)
class DataConnectorConfig(DictDot):
def __init__(
self,
class_name,
module_name=None,
credentials=None,
assets=None,
base_directory=None,
glob_directive=None,
default_regex=None,
batch_identifiers=None,
sorters=None,
batch_spec_passthrough=None,
# S3
boto3_options=None,
bucket=None,
max_keys=None,
# Azure
azure_options=None,
container=None,
name_starts_with=None,
# GCS
bucket_or_name=None,
max_results=None,
# Both S3/GCS
prefix=None,
# Both S3/Azure
delimiter=None,
**kwargs,
):
self._class_name = class_name
self._module_name = module_name
if credentials is not None:
self.credentials = credentials
if assets is not None:
self.assets = assets
if base_directory is not None:
self.base_directory = base_directory
if glob_directive is not None:
self.glob_directive = glob_directive
if default_regex is not None:
self.default_regex = default_regex
if batch_identifiers is not None:
self.batch_identifiers = batch_identifiers
if sorters is not None:
self.sorters = sorters
if batch_spec_passthrough is not None:
self.batch_spec_passthrough = batch_spec_passthrough
# S3
if boto3_options is not None:
self.boto3_options = boto3_options
if bucket is not None:
self.bucket = bucket
if max_keys is not None:
self.max_keys = max_keys
# Azure
if azure_options is not None:
self.azure_options = azure_options
if container is not None:
self.container = container
if name_starts_with is not None:
self.name_starts_with = name_starts_with
# GCS
if bucket_or_name is not None:
self.bucket_or_name = bucket_or_name
if max_results is not None:
self.max_results = max_results
# Both S3/GCS
if prefix is not None:
self.prefix = prefix
# Both S3/Azure
if delimiter is not None:
self.delimiter = delimiter
for k, v in kwargs.items():
setattr(self, k, v)
@property
def class_name(self):
return self._class_name
@property
def module_name(self):
return self._module_name
class DataConnectorConfigSchema(Schema):
class Meta:
unknown = INCLUDE
class_name = fields.String(required=True)
module_name = fields.String(missing="great_expectations.datasource.data_connector")
assets = fields.Dict(
keys=fields.Str(),
values=fields.Nested(AssetConfigSchema, required=False, allow_none=True),
required=False,
allow_none=True,
)
base_directory = fields.String(required=False, allow_none=True)
glob_directive = fields.String(required=False, allow_none=True)
sorters = fields.List(
fields.Nested(SorterConfigSchema, required=False, allow_none=True),
required=False,
allow_none=True,
)
default_regex = fields.Dict(required=False, allow_none=True)
credentials = fields.Raw(required=False, allow_none=True)
batch_identifiers = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
# S3
boto3_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
bucket = fields.String(required=False, allow_none=True)
max_keys = fields.Integer(required=False, allow_none=True)
# Azure
azure_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
container = fields.String(required=False, allow_none=True)
name_starts_with = fields.String(required=False, allow_none=True)
# GCS
gcs_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
bucket_or_name = fields.String(required=False, allow_none=True)
max_results = fields.String(required=False, allow_none=True)
# Both S3/GCS
prefix = fields.String(required=False, allow_none=True)
# Both S3/Azure
delimiter = fields.String(required=False, allow_none=True)
data_asset_name_prefix = fields.String(required=False, allow_none=True)
data_asset_name_suffix = fields.String(required=False, allow_none=True)
include_schema_name = fields.Boolean(required=False, allow_none=True)
splitter_method = fields.String(required=False, allow_none=True)
splitter_kwargs = fields.Dict(required=False, allow_none=True)
sampling_method = fields.String(required=False, allow_none=True)
sampling_kwargs = fields.Dict(required=False, allow_none=True)
excluded_tables = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
included_tables = fields.List(
cls_or_instance=fields.Str(), required=False, allow_none=True
)
skip_inapplicable_tables = fields.Boolean(required=False, allow_none=True)
batch_spec_passthrough = fields.Dict(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
# If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted.
if data["class_name"][0] == "$":
return
if ("default_regex" in data) and not (
data["class_name"]
in [
"InferredAssetFilesystemDataConnector",
"ConfiguredAssetFilesystemDataConnector",
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by a
subclass of the FilePathDataConnector class (your data connector is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
if ("glob_directive" in data) and not (
data["class_name"]
in [
"InferredAssetFilesystemDataConnector",
"ConfiguredAssetFilesystemDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by a
filesystem type of the data connector (your data connector is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
if ("delimiter" in data) and not (
data["class_name"]
in [
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
S3/Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if ("prefix" in data) and not (
data["class_name"]
in [
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
S3/GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if ("bucket" in data or "max_keys" in data) and not (
data["class_name"]
in [
"InferredAssetS3DataConnector",
"ConfiguredAssetS3DataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
S3 type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if (
"azure_options" in data or "container" in data or "name_starts_with" in data
) and not (
data["class_name"]
in [
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if "azure_options" in data and data["class_name"] in [
"InferredAssetAzureDataConnector",
"ConfiguredAssetAzureDataConnector",
]:
azure_options = data["azure_options"]
if not (("conn_str" in azure_options) ^ ("account_url" in azure_options)):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration is either missing methods of authentication or is using too many for the Azure type of data connector.
You must only select one between `conn_str` or `account_url`. Please update your configuration to continue.
"""
)
if (
"gcs_options" in data or "bucket_or_name" in data or "max_results" in data
) and not (
data["class_name"]
in [
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by a
GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
if "gcs_options" in data and data["class_name"] in [
"InferredAssetGCSDataConnector",
"ConfiguredAssetGCSDataConnector",
]:
gcs_options = data["gcs_options"]
if "filename" in gcs_options and "info" in gcs_options:
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration can only use a single method of authentication for the GCS type of data connector.
You must only select one between `filename` (from_service_account_file) and `info` (from_service_account_info). Please update your configuration to continue.
"""
)
if (
"data_asset_name_prefix" in data
or "data_asset_name_suffix" in data
or "include_schema_name" in data
or "splitter_method" in data
or "splitter_kwargs" in data
or "sampling_method" in data
or "sampling_kwargs" in data
or "excluded_tables" in data
or "included_tables" in data
or "skip_inapplicable_tables" in data
) and not (
data["class_name"]
in [
"InferredAssetSqlDataConnector",
"ConfiguredAssetSqlDataConnector",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data connector that are required only by an
SQL type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to
continue.
"""
)
# noinspection PyUnusedLocal
@post_load
def make_data_connector_config(self, data, **kwargs):
return DataConnectorConfig(**data)
class ExecutionEngineConfig(DictDot):
def __init__(
self,
class_name,
module_name=None,
caching=None,
batch_spec_defaults=None,
connection_string=None,
credentials=None,
spark_config=None,
boto3_options=None,
azure_options=None,
gcs_options=None,
**kwargs,
):
self._class_name = class_name
self._module_name = module_name
if caching is not None:
self.caching = caching
if batch_spec_defaults is not None:
self._batch_spec_defaults = batch_spec_defaults
if connection_string is not None:
self.connection_string = connection_string
if credentials is not None:
self.credentials = credentials
if spark_config is not None:
self.spark_config = spark_config
if boto3_options is not None:
self.boto3_options = boto3_options
if azure_options is not None:
self.azure_options = azure_options
if gcs_options is not None:
self.gcs_options = gcs_options
for k, v in kwargs.items():
setattr(self, k, v)
@property
def module_name(self):
return self._module_name
@property
def class_name(self):
return self._class_name
@property
def batch_spec_defaults(self):
return self._batch_spec_defaults
class ExecutionEngineConfigSchema(Schema):
class Meta:
unknown = INCLUDE
class_name = fields.String(required=True)
module_name = fields.String(missing="great_expectations.execution_engine")
connection_string = fields.String(required=False, allow_none=True)
credentials = fields.Raw(required=False, allow_none=True)
spark_config = fields.Raw(required=False, allow_none=True)
boto3_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
azure_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
gcs_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
caching = fields.Boolean(required=False, allow_none=True)
batch_spec_defaults = fields.Dict(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
# If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted.
if data["class_name"][0] == "$":
return
if ("connection_string" in data or "credentials" in data) and not (
data["class_name"] == "SqlAlchemyExecutionEngine"
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses the "connection_string" key in an execution engine, but only
SqlAlchemyExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
if "spark_config" in data and not (
data["class_name"] == "SparkDFExecutionEngine"
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses the "spark_config" key in an execution engine, but only
SparkDFExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your
configuration to continue.
"""
)
# noinspection PyUnusedLocal
@post_load
def make_execution_engine_config(self, data, **kwargs):
return ExecutionEngineConfig(**data)
class DatasourceConfig(DictDot):
def __init__(
self,
class_name=None,
module_name: Optional[str] = "great_expectations.datasource",
execution_engine=None,
data_connectors=None,
data_asset_type=None,
batch_kwargs_generators=None,
connection_string=None,
credentials=None,
introspection=None,
tables=None,
boto3_options=None,
azure_options=None,
gcs_options=None,
reader_method=None,
reader_options=None,
limit=None,
**kwargs,
):
# NOTE - JPC - 20200316: Currently, we are mostly inconsistent with respect to this type...
self._class_name = class_name
self._module_name = module_name
if execution_engine is not None:
self.execution_engine = execution_engine
if data_connectors is not None and isinstance(data_connectors, dict):
self.data_connectors = data_connectors
# NOTE - AJB - 20201202: This should use the datasource class build_configuration method as in DataContext.add_datasource()
if data_asset_type is None:
if class_name == "PandasDatasource":
data_asset_type = {
"class_name": "PandasDataset",
"module_name": "great_expectations.dataset",
}
elif class_name == "SqlAlchemyDatasource":
data_asset_type = {
"class_name": "SqlAlchemyDataset",
"module_name": "great_expectations.dataset",
}
elif class_name == "SparkDFDatasource":
data_asset_type = {
"class_name": "SparkDFDataset",
"module_name": "great_expectations.dataset",
}
if data_asset_type is not None:
self.data_asset_type = data_asset_type
if batch_kwargs_generators is not None:
self.batch_kwargs_generators = batch_kwargs_generators
if connection_string is not None:
self.connection_string = connection_string
if credentials is not None:
self.credentials = credentials
if introspection is not None:
self.introspection = introspection
if tables is not None:
self.tables = tables
if boto3_options is not None:
self.boto3_options = boto3_options
if azure_options is not None:
self.azure_options = azure_options
if gcs_options is not None:
self.gcs_options = gcs_options
if reader_method is not None:
self.reader_method = reader_method
if reader_options is not None:
self.reader_options = reader_options
if limit is not None:
self.limit = limit
for k, v in kwargs.items():
setattr(self, k, v)
@property
def class_name(self):
return self._class_name
@property
def module_name(self):
return self._module_name
class DatasourceConfigSchema(Schema):
class Meta:
unknown = INCLUDE
class_name = fields.String(missing="Datasource")
module_name = fields.String(missing="great_expectations.datasource")
force_reuse_spark_context = fields.Bool(required=False, allow_none=True)
spark_config = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
execution_engine = fields.Nested(
ExecutionEngineConfigSchema, required=False, allow_none=True
)
data_connectors = fields.Dict(
keys=fields.Str(),
values=fields.Nested(DataConnectorConfigSchema),
required=False,
allow_none=True,
)
data_asset_type = fields.Nested(ClassConfigSchema, required=False, allow_none=True)
# TODO: Update to generator-specific
# batch_kwargs_generators = fields.Mapping(keys=fields.Str(), values=fields.Nested(fields.GeneratorSchema))
batch_kwargs_generators = fields.Dict(
keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True
)
connection_string = fields.String(required=False, allow_none=True)
credentials = fields.Raw(required=False, allow_none=True)
introspection = fields.Dict(required=False, allow_none=True)
tables = fields.Dict(required=False, allow_none=True)
boto3_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
azure_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
gcs_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
reader_method = fields.String(required=False, allow_none=True)
reader_options = fields.Dict(
keys=fields.Str(), values=fields.Str(), required=False, allow_none=True
)
limit = fields.Integer(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
if "generators" in data:
raise ge_exceptions.InvalidConfigError(
'Your current configuration uses the "generators" key in a datasource, but in version 0.10 of '
'GE that key is renamed to "batch_kwargs_generators". Please update your configuration to continue.'
)
# If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted.
if data["class_name"][0] == "$":
return
if (
"connection_string" in data
or "credentials" in data
or "introspection" in data
or "tables" in data
) and not (
data["class_name"]
in [
"SqlAlchemyDatasource",
"SimpleSqlalchemyDatasource",
]
):
raise ge_exceptions.InvalidConfigError(
f"""Your current configuration uses one or more keys in a data source that are required only by a
sqlalchemy data source (your data source is "{data['class_name']}"). Please update your configuration to continue.
"""
)
# noinspection PyUnusedLocal
@post_load
def make_datasource_config(self, data, **kwargs):
return DatasourceConfig(**data)
class AnonymizedUsageStatisticsConfig(DictDot):
def __init__(self, enabled=True, data_context_id=None, usage_statistics_url=None):
self._enabled = enabled
if data_context_id is None:
data_context_id = str(uuid.uuid4())
self._explicit_id = False
else:
self._explicit_id = True
self._data_context_id = data_context_id
if usage_statistics_url is None:
usage_statistics_url = DEFAULT_USAGE_STATISTICS_URL
self._explicit_url = False
else:
self._explicit_url = True
self._usage_statistics_url = usage_statistics_url
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, enabled):
if not isinstance(enabled, bool):
raise ValueError("usage statistics enabled property must be boolean")
self._enabled = enabled
@property
def data_context_id(self):
return self._data_context_id
@data_context_id.setter
def data_context_id(self, data_context_id):
try:
uuid.UUID(data_context_id)
except ValueError:
raise ge_exceptions.InvalidConfigError(
"data_context_id must be a valid uuid"
)
self._data_context_id = data_context_id
self._explicit_id = True
@property
def explicit_id(self):
return self._explicit_id
@property
def usage_statistics_url(self):
return self._usage_statistics_url
@usage_statistics_url.setter
def usage_statistics_url(self, usage_statistics_url):
self._usage_statistics_url = usage_statistics_url
self._explicit_url = True
class AnonymizedUsageStatisticsConfigSchema(Schema):
data_context_id = fields.UUID()
enabled = fields.Boolean(default=True)
usage_statistics_url = fields.URL(allow_none=True)
_explicit_url = fields.Boolean(required=False)
# noinspection PyUnusedLocal
@post_load()
def make_usage_statistics_config(self, data, **kwargs):
if "data_context_id" in data:
data["data_context_id"] = str(data["data_context_id"])
return AnonymizedUsageStatisticsConfig(**data)
# noinspection PyUnusedLocal
@post_dump()
def filter_implicit(self, data, **kwargs):
if not data.get("_explicit_url") and "usage_statistics_url" in data:
del data["usage_statistics_url"]
if "_explicit_url" in data:
del data["_explicit_url"]
return data
class NotebookTemplateConfig(DictDot):
def __init__(self, file_name, template_kwargs=None):
self.file_name = file_name
if template_kwargs:
self.template_kwargs = template_kwargs
else:
self.template_kwargs = {}
class NotebookTemplateConfigSchema(Schema):
file_name = fields.String()
template_kwargs = fields.Dict(
keys=fields.Str(), values=fields.Str(), allow_none=True
)
# noinspection PyUnusedLocal
@post_load
def make_notebook_template_config(self, data, **kwargs):
return NotebookTemplateConfig(**data)
class NotebookConfig(DictDot):
def __init__(
self,
class_name,
module_name,
custom_templates_module,
header_markdown=None,
footer_markdown=None,
table_expectations_header_markdown=None,
column_expectations_header_markdown=None,
table_expectations_not_found_markdown=None,
column_expectations_not_found_markdown=None,
authoring_intro_markdown=None,
column_expectations_markdown=None,
header_code=None,
footer_code=None,
table_expectation_code=None,
column_expectation_code=None,
):
self.class_name = class_name
self.module_name = module_name
self.custom_templates_module = custom_templates_module
self.header_markdown = header_markdown
self.footer_markdown = footer_markdown
self.table_expectations_header_markdown = table_expectations_header_markdown
self.column_expectations_header_markdown = column_expectations_header_markdown
self.table_expectations_not_found_markdown = (
table_expectations_not_found_markdown
)
self.column_expectations_not_found_markdown = (
column_expectations_not_found_markdown
)
self.authoring_intro_markdown = authoring_intro_markdown
self.column_expectations_markdown = column_expectations_markdown
self.header_code = header_code
self.footer_code = footer_code
self.table_expectation_code = table_expectation_code
self.column_expectation_code = column_expectation_code
class NotebookConfigSchema(Schema):
class_name = fields.String(missing="SuiteEditNotebookRenderer")
module_name = fields.String(
missing="great_expectations.render.renderer.suite_edit_notebook_renderer"
)
custom_templates_module = fields.String()
header_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
footer_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
table_expectations_header_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectations_header_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
table_expectations_not_found_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectations_not_found_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
authoring_intro_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectations_markdown = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
header_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
footer_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True)
table_expectation_code = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
column_expectation_code = fields.Nested(
NotebookTemplateConfigSchema, allow_none=True
)
# noinspection PyUnusedLocal
@post_load
def make_notebook_config(self, data, **kwargs):
return NotebookConfig(**data)
class NotebooksConfig(DictDot):
def __init__(self, suite_edit):
self.suite_edit = suite_edit
class NotebooksConfigSchema(Schema):
# for now only suite_edit, could have other customization options for
# notebooks in the future
suite_edit = fields.Nested(NotebookConfigSchema)
# noinspection PyUnusedLocal
@post_load
def make_notebooks_config(self, data, **kwargs):
return NotebooksConfig(**data)
class ConcurrencyConfig(DictDot):
"""WARNING: This class is experimental."""
def __init__(self, enabled: Optional[bool] = False):
"""Initialize a concurrency configuration to control multithreaded execution.
Args:
enabled: Whether or not multithreading is enabled.
"""
self._enabled = enabled
@property
def enabled(self):
"""Whether or not multithreading is enabled."""
return self._enabled
@property
def max_database_query_concurrency(self) -> int:
"""Max number of concurrent database queries to execute with mulithreading."""
# BigQuery has a limit of 100 for "Concurrent rate limit for interactive queries" as described at
# https://cloud.google.com/bigquery/quotas#query_jobs). If necessary, this can later be tuned for other
# databases and/or be manually user configurable.
return 100
def add_sqlalchemy_create_engine_parameters(
self, parameters: MutableMapping[str, Any]
):
"""Update SqlAlchemy parameters to prevent concurrency errors (e.g. http://sqlalche.me/e/14/3o7r) and
bottlenecks.
Args:
parameters: SqlAlchemy create_engine parameters to which we add concurrency appropriate parameters. If the
concurrency parameters are already set, those parameters are left unchanged.
"""
if not self._enabled:
return
if "pool_size" not in parameters:
# https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.pool_size
parameters["pool_size"] = 0
if "max_overflow" not in parameters:
# https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.max_overflow
parameters["max_overflow"] = -1
class ConcurrencyConfigSchema(Schema):
"""WARNING: This class is experimental."""
enabled = fields.Boolean(default=False)
class GeCloudConfig(DictDot):
def __init__(self, base_url: str, account_id: str, access_token: str):
self.base_url = base_url
self.account_id = account_id
self.access_token = access_token
def to_json_dict(self):
return {
"base_url": self.base_url,
"account_id": self.account_id,
"access_token": self.access_token,
}
class DataContextConfigSchema(Schema):
config_version = fields.Number(
validate=lambda x: 0 < x < 100,
error_messages={"invalid": "config version must " "be a number."},
)
datasources = fields.Dict(
keys=fields.Str(),
values=fields.Nested(DatasourceConfigSchema),
required=False,
allow_none=True,
)
expectations_store_name = fields.Str()
validations_store_name = fields.Str()
evaluation_parameter_store_name = fields.Str()
checkpoint_store_name = fields.Str(required=False, allow_none=True)
plugins_directory = fields.Str(allow_none=True)
validation_operators = fields.Dict(
keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True
)
stores = fields.Dict(keys=fields.Str(), values=fields.Dict())
notebooks = fields.Nested(NotebooksConfigSchema, allow_none=True)
data_docs_sites = fields.Dict(
keys=fields.Str(), values=fields.Dict(), allow_none=True
)
config_variables_file_path = fields.Str(allow_none=True)
anonymous_usage_statistics = fields.Nested(AnonymizedUsageStatisticsConfigSchema)
concurrency = fields.Nested(ConcurrencyConfigSchema)
# noinspection PyMethodMayBeStatic
# noinspection PyUnusedLocal
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
if (
exc
and exc.messages
and isinstance(exc.messages, dict)
and all([key is None for key in exc.messages.keys()])
):
exc.messages = list(itertools.chain.from_iterable(exc.messages.values()))
message: str = (
f"Error while processing DataContextConfig: {' '.join(exc.messages)}"
)
logger.error(message)
raise ge_exceptions.InvalidDataContextConfigError(
message=message,
)
@validates_schema
def validate_schema(self, data, **kwargs):
if "config_version" not in data:
raise ge_exceptions.InvalidDataContextConfigError(
"The key `config_version` is missing; please check your config file.",
validation_error=ValidationError(message="no config_version key"),
)
if not isinstance(data["config_version"], (int, float)):
raise ge_exceptions.InvalidDataContextConfigError(
"The key `config_version` must be a number. Please check your config file.",
validation_error=ValidationError(message="config version not a number"),
)
# When migrating from 0.7.x to 0.8.0
if data["config_version"] == 0 and any(
[
store_config["class_name"] == "ValidationsStore"
for store_config in data["stores"].values()
]
):
raise ge_exceptions.UnsupportedConfigVersionError(
"You appear to be using a config version from the 0.7.x series. This version is no longer supported."
)
if data["config_version"] < MINIMUM_SUPPORTED_CONFIG_VERSION:
raise ge_exceptions.UnsupportedConfigVersionError(
"You appear to have an invalid config version ({}).\n The version number must be at least {}. "
"Please see the migration guide at https://docs.greatexpectations.io/en/latest/guides/how_to_guides/migrating_versions.html".format(
data["config_version"], MINIMUM_SUPPORTED_CONFIG_VERSION
),
)
if data["config_version"] > CURRENT_GE_CONFIG_VERSION:
raise ge_exceptions.InvalidDataContextConfigError(
"You appear to have an invalid config version ({}).\n The maximum valid version is {}.".format(
data["config_version"], CURRENT_GE_CONFIG_VERSION
),
validation_error=ValidationError(message="config version too high"),
)
if data["config_version"] < CURRENT_GE_CONFIG_VERSION and (
"checkpoint_store_name" in data
or any(
[
store_config["class_name"] == "CheckpointStore"
for store_config in data["stores"].values()
]
)
):
raise ge_exceptions.InvalidDataContextConfigError(
"You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format(
data["config_version"], float(CURRENT_GE_CONFIG_VERSION)
),
validation_error=ValidationError(
message="You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format(
data["config_version"], float(CURRENT_GE_CONFIG_VERSION)
)
),
)
if (
data["config_version"] >= FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE
and "validation_operators" in data
and data["validation_operators"] is not None
):
# TODO: <Alex>Add a URL to the migration guide with instructions for how to replace validation_operators with appropriate actions.</Alex>
logger.warning(
"You appear to be using a legacy capability with the latest config version ({}).\n Your data context with this configuration version uses validation_operators, which are being deprecated. Please update your configuration to be compatible with the version number {}.".format(
data["config_version"], CURRENT_GE_CONFIG_VERSION
),
)
class DataContextConfigDefaults(enum.Enum):
DEFAULT_CONFIG_VERSION = CURRENT_GE_CONFIG_VERSION
DEFAULT_EXPECTATIONS_STORE_NAME = "expectations_store"
EXPECTATIONS_BASE_DIRECTORY = "expectations"
DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
f"{EXPECTATIONS_BASE_DIRECTORY}/"
)
DEFAULT_VALIDATIONS_STORE_NAME = "validations_store"
VALIDATIONS_BASE_DIRECTORY = "validations"
DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
f"uncommitted/{VALIDATIONS_BASE_DIRECTORY}/"
)
DEFAULT_EVALUATION_PARAMETER_STORE_NAME = "evaluation_parameter_store"
DEFAULT_EVALUATION_PARAMETER_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
"evaluation_parameters/"
)
DEFAULT_CHECKPOINT_STORE_NAME = "checkpoint_store"
CHECKPOINTS_BASE_DIRECTORY = "checkpoints"
DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME = (
f"{CHECKPOINTS_BASE_DIRECTORY}/"
)
DEFAULT_DATA_DOCS_SITE_NAME = "local_site"
DEFAULT_CONFIG_VARIABLES_FILEPATH = "uncommitted/config_variables.yml"
PLUGINS_BASE_DIRECTORY = "plugins"
DEFAULT_PLUGINS_DIRECTORY = f"{PLUGINS_BASE_DIRECTORY}/"
NOTEBOOKS_BASE_DIRECTORY = "notebooks"
DEFAULT_VALIDATION_OPERATORS = {
"action_list_operator": {
"class_name": "ActionListValidationOperator",
"action_list": [
{
"name": "store_validation_result",
"action": {"class_name": "StoreValidationResultAction"},
},
{
"name": "store_evaluation_params",
"action": {"class_name": "StoreEvaluationParametersAction"},
},
{
"name": "update_data_docs",
"action": {"class_name": "UpdateDataDocsAction"},
},
],
}
}
DEFAULT_STORES = {
DEFAULT_EXPECTATIONS_STORE_NAME: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"base_directory": DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME,
},
},
DEFAULT_VALIDATIONS_STORE_NAME: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"base_directory": DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME,
},
},
DEFAULT_EVALUATION_PARAMETER_STORE_NAME: {
"class_name": "EvaluationParameterStore"
},
DEFAULT_CHECKPOINT_STORE_NAME: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"suppress_store_backend_id": True,
"base_directory": DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME,
},
},
}
DEFAULT_DATA_DOCS_SITES = {
DEFAULT_DATA_DOCS_SITE_NAME: {
"class_name": "SiteBuilder",
"show_how_to_buttons": True,
"store_backend": {
"class_name": "TupleFilesystemStoreBackend",
"base_directory": "uncommitted/data_docs/local_site/",
},
"site_index_builder": {
"class_name": "DefaultSiteIndexBuilder",
},
}
}
class CheckpointConfigDefaults(enum.Enum):
DEFAULT_CONFIG_VERSION = CURRENT_CHECKPOINT_CONFIG_VERSION
class BaseStoreBackendDefaults(DictDot):
"""
Define base defaults for platform specific StoreBackendDefaults.
StoreBackendDefaults define defaults for specific cases of often used configurations.
For example, if you plan to store expectations, validations, and data_docs in s3 use the S3StoreBackendDefaults and you may be able to specify less parameters.
"""
def __init__(
self,
expectations_store_name: str = DataContextConfigDefaults.DEFAULT_EXPECTATIONS_STORE_NAME.value,
validations_store_name: str = DataContextConfigDefaults.DEFAULT_VALIDATIONS_STORE_NAME.value,
evaluation_parameter_store_name: str = DataContextConfigDefaults.DEFAULT_EVALUATION_PARAMETER_STORE_NAME.value,
checkpoint_store_name: str = DataContextConfigDefaults.DEFAULT_CHECKPOINT_STORE_NAME.value,
data_docs_site_name: str = DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITE_NAME.value,
validation_operators: dict = None,
stores: dict = None,
data_docs_sites: dict = None,
):
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.validation_operators = validation_operators
if stores is None:
stores = deepcopy(DataContextConfigDefaults.DEFAULT_STORES.value)
self.stores = stores
if data_docs_sites is None:
data_docs_sites = deepcopy(
DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITES.value
)
self.data_docs_sites = data_docs_sites
self.data_docs_site_name = data_docs_site_name
class S3StoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for s3 backends, with some accessible parameters
Args:
default_bucket_name: Use this bucket name for stores that do not have a bucket name provided
expectations_store_bucket_name: Overrides default_bucket_name if supplied
validations_store_bucket_name: Overrides default_bucket_name if supplied
data_docs_bucket_name: Overrides default_bucket_name if supplied
checkpoint_store_bucket_name: Overrides default_bucket_name if supplied
expectations_store_prefix: Overrides default if supplied
validations_store_prefix: Overrides default if supplied
data_docs_prefix: Overrides default if supplied
checkpoint_store_prefix: Overrides default if supplied
expectations_store_name: Overrides default if supplied
validations_store_name: Overrides default if supplied
evaluation_parameter_store_name: Overrides default if supplied
checkpoint_store_name: Overrides default if supplied
"""
def __init__(
self,
default_bucket_name: Optional[str] = None,
expectations_store_bucket_name: Optional[str] = None,
validations_store_bucket_name: Optional[str] = None,
data_docs_bucket_name: Optional[str] = None,
checkpoint_store_bucket_name: Optional[str] = None,
expectations_store_prefix: str = "expectations",
validations_store_prefix: str = "validations",
data_docs_prefix: str = "data_docs",
checkpoint_store_prefix: str = "checkpoints",
expectations_store_name: str = "expectations_S3_store",
validations_store_name: str = "validations_S3_store",
evaluation_parameter_store_name: str = "evaluation_parameter_store",
checkpoint_store_name: str = "checkpoint_S3_store",
):
# Initialize base defaults
super().__init__()
# Use default_bucket_name if separate store buckets are not provided
if expectations_store_bucket_name is None:
expectations_store_bucket_name = default_bucket_name
if validations_store_bucket_name is None:
validations_store_bucket_name = default_bucket_name
if data_docs_bucket_name is None:
data_docs_bucket_name = default_bucket_name
if checkpoint_store_bucket_name is None:
checkpoint_store_bucket_name = default_bucket_name
# Overwrite defaults
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.stores = {
expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": expectations_store_bucket_name,
"prefix": expectations_store_prefix,
},
},
validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": validations_store_bucket_name,
"prefix": validations_store_prefix,
},
},
evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"},
checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": checkpoint_store_bucket_name,
"prefix": checkpoint_store_prefix,
},
},
}
self.data_docs_sites = {
"s3_site": {
"class_name": "SiteBuilder",
"show_how_to_buttons": True,
"store_backend": {
"class_name": "TupleS3StoreBackend",
"bucket": data_docs_bucket_name,
"prefix": data_docs_prefix,
},
"site_index_builder": {
"class_name": "DefaultSiteIndexBuilder",
},
}
}
class FilesystemStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for filesystem backends, with some accessible parameters
Args:
root_directory: Absolute directory prepended to the base_directory for each store
plugins_directory: Overrides default if supplied
"""
def __init__(
self,
root_directory: Optional[str] = None,
plugins_directory: Optional[str] = None,
):
# Initialize base defaults
super().__init__()
if plugins_directory is None:
plugins_directory = (
DataContextConfigDefaults.DEFAULT_PLUGINS_DIRECTORY.value
)
self.plugins_directory = plugins_directory
if root_directory is not None:
self.stores[self.expectations_store_name]["store_backend"][
"root_directory"
] = root_directory
self.stores[self.validations_store_name]["store_backend"][
"root_directory"
] = root_directory
self.stores[self.checkpoint_store_name]["store_backend"][
"root_directory"
] = root_directory
self.data_docs_sites[self.data_docs_site_name]["store_backend"][
"root_directory"
] = root_directory
class InMemoryStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for in memory backends.
This is useful for testing without persistence.
"""
def __init__(
self,
):
# Initialize base defaults
super().__init__()
self.stores = {
self.expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "InMemoryStoreBackend",
},
},
self.validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "InMemoryStoreBackend",
},
},
self.evaluation_parameter_store_name: {
"class_name": "EvaluationParameterStore"
},
self.checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "InMemoryStoreBackend",
},
},
}
self.data_docs_sites = {}
class GCSStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for Google Cloud Storage (GCS) backends, with some accessible parameters
Args:
default_bucket_name: Use this bucket name for stores that do not have a bucket name provided
default_project_name: Use this project name for stores that do not have a project name provided
expectations_store_bucket_name: Overrides default_bucket_name if supplied
validations_store_bucket_name: Overrides default_bucket_name if supplied
data_docs_bucket_name: Overrides default_bucket_name if supplied
checkpoint_store_bucket_name: Overrides default_bucket_name if supplied
expectations_store_project_name: Overrides default_project_name if supplied
validations_store_project_name: Overrides default_project_name if supplied
data_docs_project_name: Overrides default_project_name if supplied
checkpoint_store_project_name: Overrides default_project_name if supplied
expectations_store_prefix: Overrides default if supplied
validations_store_prefix: Overrides default if supplied
data_docs_prefix: Overrides default if supplied
checkpoint_store_prefix: Overrides default if supplied
expectations_store_name: Overrides default if supplied
validations_store_name: Overrides default if supplied
evaluation_parameter_store_name: Overrides default if supplied
checkpoint_store_name: Overrides default if supplied
"""
def __init__(
self,
default_bucket_name: Optional[str] = None,
default_project_name: Optional[str] = None,
expectations_store_bucket_name: Optional[str] = None,
validations_store_bucket_name: Optional[str] = None,
data_docs_bucket_name: Optional[str] = None,
checkpoint_store_bucket_name: Optional[str] = None,
expectations_store_project_name: Optional[str] = None,
validations_store_project_name: Optional[str] = None,
data_docs_project_name: Optional[str] = None,
checkpoint_store_project_name: Optional[str] = None,
expectations_store_prefix: str = "expectations",
validations_store_prefix: str = "validations",
data_docs_prefix: str = "data_docs",
checkpoint_store_prefix: str = "checkpoints",
expectations_store_name: str = "expectations_GCS_store",
validations_store_name: str = "validations_GCS_store",
evaluation_parameter_store_name: str = "evaluation_parameter_store",
checkpoint_store_name: str = "checkpoint_GCS_store",
):
# Initialize base defaults
super().__init__()
# Use default_bucket_name if separate store buckets are not provided
if expectations_store_bucket_name is None:
expectations_store_bucket_name = default_bucket_name
if validations_store_bucket_name is None:
validations_store_bucket_name = default_bucket_name
if data_docs_bucket_name is None:
data_docs_bucket_name = default_bucket_name
if checkpoint_store_bucket_name is None:
checkpoint_store_bucket_name = default_bucket_name
# Use default_project_name if separate store projects are not provided
if expectations_store_project_name is None:
expectations_store_project_name = default_project_name
if validations_store_project_name is None:
validations_store_project_name = default_project_name
if data_docs_project_name is None:
data_docs_project_name = default_project_name
if checkpoint_store_project_name is None:
checkpoint_store_project_name = default_project_name
# Overwrite defaults
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.stores = {
expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": expectations_store_project_name,
"bucket": expectations_store_bucket_name,
"prefix": expectations_store_prefix,
},
},
validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": validations_store_project_name,
"bucket": validations_store_bucket_name,
"prefix": validations_store_prefix,
},
},
evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"},
checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": checkpoint_store_project_name,
"bucket": checkpoint_store_bucket_name,
"prefix": checkpoint_store_prefix,
},
},
}
self.data_docs_sites = {
"gcs_site": {
"class_name": "SiteBuilder",
"show_how_to_buttons": True,
"store_backend": {
"class_name": "TupleGCSStoreBackend",
"project": data_docs_project_name,
"bucket": data_docs_bucket_name,
"prefix": data_docs_prefix,
},
"site_index_builder": {
"class_name": "DefaultSiteIndexBuilder",
},
}
}
class DatabaseStoreBackendDefaults(BaseStoreBackendDefaults):
"""
Default store configs for database backends, with some accessible parameters
Args:
default_credentials: Use these credentials for all stores that do not have credentials provided
expectations_store_credentials: Overrides default_credentials if supplied
validations_store_credentials: Overrides default_credentials if supplied
checkpoint_store_credentials: Overrides default_credentials if supplied
expectations_store_name: Overrides default if supplied
validations_store_name: Overrides default if supplied
evaluation_parameter_store_name: Overrides default if supplied
checkpoint_store_name: Overrides default if supplied
"""
def __init__(
self,
default_credentials: Optional[Dict] = None,
expectations_store_credentials: Optional[Dict] = None,
validations_store_credentials: Optional[Dict] = None,
checkpoint_store_credentials: Optional[Dict] = None,
expectations_store_name: str = "expectations_database_store",
validations_store_name: str = "validations_database_store",
evaluation_parameter_store_name: str = "evaluation_parameter_store",
checkpoint_store_name: str = "checkpoint_database_store",
):
# Initialize base defaults
super().__init__()
# Use default credentials if separate credentials not supplied for expectations_store and validations_store
if expectations_store_credentials is None:
expectations_store_credentials = default_credentials
if validations_store_credentials is None:
validations_store_credentials = default_credentials
if checkpoint_store_credentials is None:
checkpoint_store_credentials = default_credentials
# Overwrite defaults
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
self.checkpoint_store_name = checkpoint_store_name
self.stores = {
expectations_store_name: {
"class_name": "ExpectationsStore",
"store_backend": {
"class_name": "DatabaseStoreBackend",
"credentials": expectations_store_credentials,
},
},
validations_store_name: {
"class_name": "ValidationsStore",
"store_backend": {
"class_name": "DatabaseStoreBackend",
"credentials": validations_store_credentials,
},
},
evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"},
checkpoint_store_name: {
"class_name": "CheckpointStore",
"store_backend": {
"class_name": "DatabaseStoreBackend",
"credentials": checkpoint_store_credentials,
},
},
}
class DataContextConfig(BaseYamlConfig):
# TODO: <Alex>ALEX (does not work yet)</Alex>
# _config_schema_class = DataContextConfigSchema
def __init__(
self,
config_version: Optional[float] = None,
datasources: Optional[
Union[
Dict[str, DatasourceConfig],
Dict[str, Dict[str, Union[Dict[str, str], str, dict]]],
]
] = None,
expectations_store_name: Optional[str] = None,
validations_store_name: Optional[str] = None,
evaluation_parameter_store_name: Optional[str] = None,
checkpoint_store_name: Optional[str] = None,
plugins_directory: Optional[str] = None,
validation_operators=None,
stores: Optional[Dict] = None,
data_docs_sites: Optional[Dict] = None,
notebooks=None,
config_variables_file_path: Optional[str] = None,
anonymous_usage_statistics=None,
store_backend_defaults: Optional[BaseStoreBackendDefaults] = None,
commented_map: Optional[CommentedMap] = None,
concurrency: Optional[Union[ConcurrencyConfig, Dict]] = None,
):
# Set defaults
if config_version is None:
config_version = DataContextConfigDefaults.DEFAULT_CONFIG_VERSION.value
# Set defaults via store_backend_defaults if one is passed in
# Override attributes from store_backend_defaults with any items passed into the constructor:
if store_backend_defaults is not None:
if stores is None:
stores = store_backend_defaults.stores
if expectations_store_name is None:
expectations_store_name = store_backend_defaults.expectations_store_name
if validations_store_name is None:
validations_store_name = store_backend_defaults.validations_store_name
if evaluation_parameter_store_name is None:
evaluation_parameter_store_name = (
store_backend_defaults.evaluation_parameter_store_name
)
if data_docs_sites is None:
data_docs_sites = store_backend_defaults.data_docs_sites
if checkpoint_store_name is None:
checkpoint_store_name = store_backend_defaults.checkpoint_store_name
self._config_version = config_version
if datasources is None:
datasources = {}
self.datasources = datasources
self.expectations_store_name = expectations_store_name
self.validations_store_name = validations_store_name
self.evaluation_parameter_store_name = evaluation_parameter_store_name
if checkpoint_store_name is not None:
self.checkpoint_store_name = checkpoint_store_name
self.plugins_directory = plugins_directory
if validation_operators is not None:
self.validation_operators = validation_operators
self.stores = stores
self.notebooks = notebooks
self.data_docs_sites = data_docs_sites
self.config_variables_file_path = config_variables_file_path
if anonymous_usage_statistics is None:
anonymous_usage_statistics = AnonymizedUsageStatisticsConfig()
elif isinstance(anonymous_usage_statistics, dict):
anonymous_usage_statistics = AnonymizedUsageStatisticsConfig(
**anonymous_usage_statistics
)
self.anonymous_usage_statistics = anonymous_usage_statistics
if concurrency is None:
concurrency = ConcurrencyConfig()
elif isinstance(concurrency, dict):
concurrency = ConcurrencyConfig(**concurrency)
self.concurrency: ConcurrencyConfig = concurrency
super().__init__(commented_map=commented_map)
# TODO: <Alex>ALEX (we still need the next two properties)</Alex>
@classmethod
def get_config_class(cls):
return cls # DataContextConfig
@classmethod
def get_schema_class(cls):
return DataContextConfigSchema
@property
def config_version(self):
return self._config_version
class CheckpointConfigSchema(Schema):
class Meta:
unknown = INCLUDE
fields = (
"name",
"config_version",
"template_name",
"module_name",
"class_name",
"run_name_template",
"expectation_suite_name",
"batch_request",
"action_list",
"evaluation_parameters",
"runtime_configuration",
"validations",
"profilers",
# Next two fields are for LegacyCheckpoint configuration
"validation_operator_name",
"batches",
# Next fields are used by configurators
"site_names",
"slack_webhook",
"notify_on",
"notify_with",
"ge_cloud_id",
"expectation_suite_ge_cloud_id",
)
ordered = True
# if keys have None value, remove in post_dump
REMOVE_KEYS_IF_NONE = [
"site_names",
"slack_webhook",
"notify_on",
"notify_with",
]
ge_cloud_id = fields.UUID(required=False, allow_none=True)
name = fields.String(required=False, allow_none=True)
config_version = fields.Number(
validate=lambda x: (0 < x < 100) or x is None,
error_messages={"invalid": "config version must " "be a number or None."},
required=False,
allow_none=True,
)
template_name = fields.String(required=False, allow_none=True)
module_name = fields.String(required=False, missing="great_expectations.checkpoint")
class_name = fields.Str(required=False, allow_none=True)
run_name_template = fields.String(required=False, allow_none=True)
expectation_suite_name = fields.String(required=False, allow_none=True)
expectation_suite_ge_cloud_id = fields.UUID(required=False, allow_none=True)
batch_request = fields.Dict(required=False, allow_none=True)
action_list = fields.List(
cls_or_instance=fields.Dict(), required=False, allow_none=True
)
evaluation_parameters = fields.Dict(required=False, allow_none=True)
runtime_configuration = fields.Dict(required=False, allow_none=True)
validations = fields.List(
cls_or_instance=fields.Dict(), required=False, allow_none=True
)
profilers = fields.List(
cls_or_instance=fields.Dict(), required=False, allow_none=True
)
# Next two fields are for LegacyCheckpoint configuration
validation_operator_name = fields.Str(required=False, allow_none=True)
batches = fields.List(
cls_or_instance=fields.Dict(
keys=fields.Str(
validate=OneOf(["batch_kwargs", "expectation_suite_names"]),
required=False,
allow_none=True,
)
),
required=False,
allow_none=True,
)
# Next fields are used by configurators
site_names = fields.Raw(required=False, allow_none=True)
slack_webhook = fields.String(required=False, allow_none=True)
notify_on = fields.String(required=False, allow_none=True)
notify_with = fields.String(required=False, allow_none=True)
@validates_schema
def validate_schema(self, data, **kwargs):
if not (
"name" in data or "validation_operator_name" in data or "batches" in data
):
raise ge_exceptions.InvalidConfigError(
f"""Your current Checkpoint configuration is incomplete. Please update your Checkpoint configuration to
continue.
"""
)
if data.get("config_version"):
if "name" not in data:
raise ge_exceptions.InvalidConfigError(
f"""Your Checkpoint configuration requires the "name" field. Please update your current Checkpoint
configuration to continue.
"""
)
@post_dump
def remove_keys_if_none(self, data, **kwargs):
data = deepcopy(data)
for key in self.REMOVE_KEYS_IF_NONE:
if key in data and data[key] is None:
data.pop(key)
return data
class CheckpointConfig(BaseYamlConfig):
# TODO: <Alex>ALEX (does not work yet)</Alex>
# _config_schema_class = CheckpointConfigSchema
def __init__(
self,
name: Optional[str] = None,
config_version: Optional[Union[int, float]] = None,
template_name: Optional[str] = None,
module_name: Optional[str] = None,
class_name: Optional[str] = None,
run_name_template: Optional[str] = None,
expectation_suite_name: Optional[str] = None,
batch_request: Optional[dict] = None,
action_list: Optional[List[dict]] = None,
evaluation_parameters: Optional[dict] = None,
runtime_configuration: Optional[dict] = None,
validations: Optional[List[dict]] = None,
profilers: Optional[List[dict]] = None,
validation_operator_name: Optional[str] = None,
batches: Optional[List[dict]] = None,
commented_map: Optional[CommentedMap] = None,
ge_cloud_id: Optional[str] = None,
# the following four args are used by SimpleCheckpoint
site_names: Optional[Union[list, str]] = None,
slack_webhook: Optional[str] = None,
notify_on: Optional[str] = None,
notify_with: Optional[str] = None,
expectation_suite_ge_cloud_id: Optional[str] = None,
):
self._name = name
self._config_version = config_version
if self.config_version is None:
class_name = class_name or "LegacyCheckpoint"
self.validation_operator_name = validation_operator_name
if batches is not None and isinstance(batches, list):
self.batches = batches
else:
class_name = class_name or "Checkpoint"
self._template_name = template_name
self._run_name_template = run_name_template
self._expectation_suite_name = expectation_suite_name
self._expectation_suite_ge_cloud_id = expectation_suite_ge_cloud_id
self._batch_request = batch_request
self._action_list = action_list or []
self._evaluation_parameters = evaluation_parameters or {}
self._runtime_configuration = runtime_configuration or {}
self._validations = validations or []
self._profilers = profilers or []
self._ge_cloud_id = ge_cloud_id
# the following attributes are used by SimpleCheckpoint
self._site_names = site_names
self._slack_webhook = slack_webhook
self._notify_on = notify_on
self._notify_with = notify_with
self._module_name = module_name or "great_expectations.checkpoint"
self._class_name = class_name
super().__init__(commented_map=commented_map)
def update(
self,
other_config: Optional["CheckpointConfig"] = None,
runtime_kwargs: Optional[dict] = None,
):
assert other_config is not None or runtime_kwargs is not None, (
"other_config and runtime_kwargs cannot both " "be None"
)
if other_config is not None:
# replace
if other_config.name is not None:
self.name = other_config.name
if other_config.module_name is not None:
self.module_name = other_config.module_name
if other_config.class_name is not None:
self.class_name = other_config.class_name
if other_config.run_name_template is not None:
self.run_name_template = other_config.run_name_template
if other_config.expectation_suite_name is not None:
self.expectation_suite_name = other_config.expectation_suite_name
if other_config.expectation_suite_ge_cloud_id is not None:
self.expectation_suite_ge_cloud_id = (
other_config.expectation_suite_ge_cloud_id
)
# update
if other_config.batch_request is not None:
if self.batch_request is None:
batch_request = {}
else:
batch_request = self.batch_request
other_batch_request = other_config.batch_request
updated_batch_request = nested_update(
batch_request,
other_batch_request,
)
self._batch_request = updated_batch_request
if other_config.action_list is not None:
self.action_list = self.get_updated_action_list(
base_action_list=self.action_list,
other_action_list=other_config.action_list,
)
if other_config.evaluation_parameters is not None:
nested_update(
self.evaluation_parameters,
other_config.evaluation_parameters,
)
if other_config.runtime_configuration is not None:
nested_update(
self.runtime_configuration,
other_config.runtime_configuration,
)
if other_config.validations is not None:
self.validations.extend(
filter(
lambda v: v not in self.validations, other_config.validations
)
)
if other_config.profilers is not None:
self.profilers.extend(other_config.profilers)
if runtime_kwargs is not None and any(runtime_kwargs.values()):
# replace
if runtime_kwargs.get("run_name_template") is not None:
self.run_name_template = runtime_kwargs.get("run_name_template")
if runtime_kwargs.get("expectation_suite_name") is not None:
self.expectation_suite_name = runtime_kwargs.get(
"expectation_suite_name"
)
if runtime_kwargs.get("expectation_suite_ge_cloud_id") is not None:
self.expectation_suite_ge_cloud_id = runtime_kwargs.get(
"expectation_suite_ge_cloud_id"
)
# update
if runtime_kwargs.get("batch_request") is not None:
batch_request = self.batch_request
batch_request = batch_request or {}
runtime_batch_request = runtime_kwargs.get("batch_request")
batch_request = nested_update(batch_request, runtime_batch_request)
self._batch_request = batch_request
if runtime_kwargs.get("action_list") is not None:
self.action_list = self.get_updated_action_list(
base_action_list=self.action_list,
other_action_list=runtime_kwargs.get("action_list"),
)
if runtime_kwargs.get("evaluation_parameters") is not None:
nested_update(
self.evaluation_parameters,
runtime_kwargs.get("evaluation_parameters"),
)
if runtime_kwargs.get("runtime_configuration") is not None:
nested_update(
self.runtime_configuration,
runtime_kwargs.get("runtime_configuration"),
)
if runtime_kwargs.get("validations") is not None:
self.validations.extend(
filter(
lambda v: v not in self.validations,
runtime_kwargs.get("validations"),
)
)
if runtime_kwargs.get("profilers") is not None:
self.profilers.extend(runtime_kwargs.get("profilers"))
# TODO: <Alex>ALEX (we still need the next two properties)</Alex>
@classmethod
def get_config_class(cls):
return cls # CheckpointConfig
@classmethod
def get_schema_class(cls):
return CheckpointConfigSchema
@property
def ge_cloud_id(self):
return self._ge_cloud_id
@ge_cloud_id.setter
def ge_cloud_id(self, value: str):
self._ge_cloud_id = value
@property
def expectation_suite_ge_cloud_id(self):
return self._expectation_suite_ge_cloud_id
@expectation_suite_ge_cloud_id.setter
def expectation_suite_ge_cloud_id(self, value: str):
self._expectation_suite_ge_cloud_id = value
@property
def name(self):
return self._name
@name.setter
def name(self, value: str):
self._name = value
@property
def template_name(self):
return self._template_name
@template_name.setter
def template_name(self, value: str):
self._template_name = value
@property
def config_version(self):
return self._config_version
@property
def validations(self):
return self._validations
@property
def profilers(self):
return self._profilers
@property
def module_name(self):
return self._module_name
@module_name.setter
def module_name(self, value: str):
self._module_name = value
@property
def class_name(self):
return self._class_name
@class_name.setter
def class_name(self, value: str):
self._class_name = value
@property
def run_name_template(self):
return self._run_name_template
@run_name_template.setter
def run_name_template(self, value: str):
self._run_name_template = value
@property
def batch_request(self):
return self._batch_request
@batch_request.setter
def batch_request(self, value: dict):
self._batch_request = value
@property
def expectation_suite_name(self):
return self._expectation_suite_name
@expectation_suite_name.setter
def expectation_suite_name(self, value: str):
self._expectation_suite_name = value
@property
def action_list(self):
return self._action_list
@action_list.setter
def action_list(self, value: List[dict]):
self._action_list = value
@property
def site_names(self):
return self._site_names
@property
def slack_webhook(self):
return self._slack_webhook
@property
def notify_on(self):
return self._notify_on
@property
def notify_with(self):
return self._notify_with
@classmethod
def get_updated_action_list(
cls,
base_action_list: list,
other_action_list: list,
) -> List[dict]:
base_action_list_dict = {action["name"]: action for action in base_action_list}
for other_action in other_action_list:
other_action_name = other_action["name"]
if other_action_name in base_action_list_dict:
if other_action["action"] is None:
base_action_list_dict.pop(other_action_name)
else:
nested_update(
base_action_list_dict[other_action_name],
other_action,
dedup=True,
)
else:
base_action_list_dict[other_action_name] = other_action
return list(base_action_list_dict.values())
@property
def evaluation_parameters(self):
return self._evaluation_parameters
@property
def runtime_configuration(self):
return self._runtime_configuration
class CheckpointValidationConfig(DictDot):
pass
class CheckpointValidationConfigSchema(Schema):
pass
dataContextConfigSchema = DataContextConfigSchema()
datasourceConfigSchema = DatasourceConfigSchema()
dataConnectorConfigSchema = DataConnectorConfigSchema()
assetConfigSchema = AssetConfigSchema()
sorterConfigSchema = SorterConfigSchema()
anonymizedUsageStatisticsSchema = AnonymizedUsageStatisticsConfigSchema()
notebookConfigSchema = NotebookConfigSchema()
checkpointConfigSchema = CheckpointConfigSchema()
concurrencyConfigSchema = ConcurrencyConfigSchema()
| [
[
[
7,
10
]
],
[
[
18,
22
],
[
46779,
46783
],
[
50211,
50215
]
],
[
[
30,
39
],
[
42001,
42010
]
],
[
[
47,
54
],
[
866,
873
]
],
[
[
62,
66
],
[
31432,
31436
],
[
32299,
32303
]
],
[
[
84,
92
],
[
3340,
3348
],
[
51707,
51715
],
[
51859,
51867
],
[
75996,
76004
]
],
[
[
112,
115
],
[
39030,
39033
]
],
[
[
117,
121
],
[
65634,
65638
],
[
65697,
65701
],
[
65759,
65763
],
[
65820,
65824
],
[
68195,
68199
],
[
68240,
68244
],
[
68250,
68254
],
[
68266,
68270
],
[
68662,
68666
],
[
68710,
68714
],
[
69031,
69035
]
],
[
[
123,
127
],
[
76738,
76742
],
[
76896,
76900
],
[
76944,
76948
],
[
77046,
77050
],
[
86194,
86198
],
[
86677,
86681
]
],
[
[
129,
143
],
[
39010,
39024
]
],
[
[
145,
153
],
[
24953,
24961
],
[
38128,
38136
],
[
53200,
53208
],
[
53262,
53270
],
[
53323,
53331
],
[
53376,
53384
],
[
53436,
53444
],
[
56810,
56818
],
[
56859,
56867
],
[
60481,
60489
],
[
60533,
60541
],
[
60595,
60603
],
[
60656,
60664
],
[
60709,
60717
],
[
60769,
60777
],
[
60832,
60840
],
[
60894,
60902
],
[
60948,
60956
],
[
61009,
61017
],
[
65625,
65633
],
[
65688,
65696
],
[
65750,
65758
],
[
65811,
65819
],
[
68105,
68113
],
[
68150,
68158
],
[
68361,
68369
],
[
68415,
68423
],
[
68478,
68486
],
[
68531,
68539
],
[
68580,
68588
],
[
68653,
68661
],
[
68701,
68709
],
[
68784,
68792
],
[
68879,
68887
],
[
68945,
68953
],
[
68997,
69005
],
[
76347,
76355
],
[
76393,
76401
],
[
76452,
76460
],
[
76495,
76503
],
[
76537,
76545
],
[
76586,
76594
],
[
76640,
76648
],
[
76685,
76693
],
[
76729,
76737
],
[
76789,
76797
],
[
76843,
76851
],
[
76887,
76895
],
[
76935,
76943
],
[
76998,
77006
],
[
77037,
77045
],
[
77089,
77097
],
[
77141,
77149
],
[
77246,
77254
],
[
77304,
77312
],
[
77345,
77353
],
[
77388,
77396
],
[
77449,
77457
],
[
78980,
78988
],
[
79041,
79049
]
],
[
[
155,
160
],
[
2789,
2794
],
[
68172,
68177
],
[
68260,
68265
],
[
69006,
69011
],
[
76402,
76407
],
[
77255,
77260
]
],
[
[
186,
190
],
[
804,
808
]
],
[
[
224,
236
],
[
1476,
1488
],
[
1560,
1572
],
[
2741,
2753
],
[
3288,
3300
],
[
3325,
3337
],
[
3999,
4011
],
[
4143,
4155
],
[
68954,
68966
],
[
77098,
77110
]
],
[
[
268,
276
],
[
1225,
1233
]
],
[
[
285,
331
],
[
1759,
1772
],
[
2005,
2018
],
[
14642,
14655
],
[
15220,
15233
],
[
15875,
15888
],
[
16522,
16535
],
[
17089,
17102
],
[
17718,
17731
],
[
18366,
18379
],
[
19035,
19048
],
[
19651,
19664
],
[
20697,
20710
],
[
23904,
23917
],
[
24365,
24378
],
[
29936,
29949
],
[
30752,
30765
],
[
32371,
32384
],
[
42217,
42230
],
[
42430,
42443
],
[
42743,
42756
],
[
43260,
43273
],
[
43526,
43539
],
[
44027,
44040
],
[
44719,
44732
],
[
75359,
75372
],
[
75671,
75684
]
],
[
[
373,
401
],
[
4048,
4076
]
],
[
[
403,
416
],
[
80428,
80441
],
[
80931,
80944
],
[
81147,
81160
],
[
82609,
82622
],
[
83074,
83087
],
[
83308,
83321
],
[
87094,
87107
]
],
[
[
473,
480
],
[
5476,
5483
],
[
7752,
7759
],
[
11153,
11160
],
[
22668,
22675
],
[
27967,
27974
],
[
72092,
72099
]
],
[
[
486,
492
],
[
5433,
5439
],
[
7709,
7715
],
[
11110,
11116
],
[
22625,
22631
],
[
27924,
27930
],
[
32953,
32959
],
[
34033,
34039
],
[
35962,
35968
],
[
37705,
37711
],
[
39884,
39890
],
[
40420,
40426
],
[
72049,
72055
],
[
87698,
87704
],
[
1674,
1680
],
[
1732,
1738
],
[
2293,
2299
]
],
[
[
498,
513
],
[
3052,
3067
],
[
42595,
42610
],
[
42914,
42929
],
[
44309,
44324
],
[
45342,
45357
]
],
[
[
519,
525
],
[
5496,
5502
],
[
5560,
5566
],
[
5642,
5648
],
[
5801,
5807
],
[
5869,
5875
],
[
5930,
5936
],
[
5995,
6001
],
[
6032,
6038
],
[
6097,
6103
],
[
6157,
6163
],
[
6220,
6226
],
[
6282,
6288
],
[
6359,
6365
],
[
7772,
7778
],
[
7818,
7824
],
[
7865,
7871
],
[
7970,
7976
],
[
8119,
8125
],
[
8156,
8162
],
[
8244,
8250
],
[
11179,
11185
],
[
11226,
11232
],
[
11310,
11316
],
[
11336,
11342
],
[
11365,
11371
],
[
11509,
11515
],
[
11577,
11583
],
[
11638,
11644
],
[
11659,
11665
],
[
11802,
11808
],
[
11865,
11871
],
[
11933,
11939
],
[
11970,
11976
],
[
12052,
12058
],
[
12078,
12084
],
[
12099,
12105
],
[
12164,
12170
],
[
12226,
12232
],
[
12307,
12313
],
[
12333,
12339
],
[
12354,
12360
],
[
12422,
12428
],
[
12492,
12498
],
[
12568,
12574
],
[
12594,
12600
],
[
12615,
12621
],
[
12688,
12694
],
[
12753,
12759
],
[
12832,
12838
],
[
12916,
12922
],
[
12993,
12999
],
[
13069,
13075
],
[
13142,
13148
],
[
13212,
13218
],
[
13281,
13287
],
[
13348,
13354
],
[
13417,
13423
],
[
13484,
13490
],
[
13521,
13527
],
[
13595,
13601
],
[
13632,
13638
],
[
13715,
13721
],
[
13792,
13798
],
[
22694,
22700
],
[
22741,
22747
],
[
22826,
22832
],
[
22891,
22897
],
[
22954,
22960
],
[
23018,
23024
],
[
23044,
23050
],
[
23065,
23071
],
[
23137,
23143
],
[
23163,
23169
],
[
23184,
23190
],
[
23254,
23260
],
[
23280,
23286
],
[
23301,
23307
],
[
23367,
23373
],
[
23441,
23447
],
[
27993,
27999
],
[
28047,
28053
],
[
28134,
28140
],
[
28198,
28204
],
[
28224,
28230
],
[
28245,
28251
],
[
28320,
28326
],
[
28432,
28438
],
[
28458,
28464
],
[
28487,
28493
],
[
28607,
28613
],
[
28857,
28863
],
[
28883,
28889
],
[
28904,
28910
],
[
28981,
28987
],
[
29046,
29052
],
[
29110,
29116
],
[
29168,
29174
],
[
29233,
29239
],
[
29259,
29265
],
[
29280,
29286
],
[
29352,
29358
],
[
29378,
29384
],
[
29399,
29405
],
[
29469,
29475
],
[
29495,
29501
],
[
29516,
29522
],
[
29588,
29594
],
[
29656,
29662
],
[
29682,
29688
],
[
29703,
29709
],
[
29767,
29773
],
[
32984,
32990
],
[
33012,
33018
],
[
33068,
33074
],
[
33116,
33122
],
[
34058,
34064
],
[
34096,
34102
],
[
34122,
34128
],
[
34143,
34149
],
[
35988,
35994
],
[
36057,
36063
],
[
36190,
36196
],
[
36229,
36235
],
[
36312,
36318
],
[
36414,
36420
],
[
36531,
36537
],
[
36650,
36656
],
[
36770,
36776
],
[
36876,
36882
],
[
36986,
36992
],
[
37080,
37086
],
[
37159,
37165
],
[
37249,
37255
],
[
37354,
37360
],
[
37835,
37841
],
[
39955,
39961
],
[
40450,
40456
],
[
40604,
40610
],
[
40630,
40636
],
[
40659,
40665
],
[
40783,
40789
],
[
40825,
40831
],
[
40876,
40882
],
[
40917,
40923
],
[
40985,
40991
],
[
41040,
41046
],
[
41066,
41072
],
[
41087,
41093
],
[
41153,
41159
],
[
41170,
41176
],
[
41191,
41197
],
[
41222,
41228
],
[
41298,
41304
],
[
41324,
41330
],
[
41345,
41351
],
[
41415,
41421
],
[
41476,
41482
],
[
41547,
41553
],
[
73096,
73102
],
[
73152,
73158
],
[
73220,
73226
],
[
73448,
73454
],
[
73513,
73519
],
[
73601,
73607
],
[
73669,
73675
],
[
73745,
73751
],
[
73828,
73834
],
[
73893,
73899
],
[
73956,
73962
],
[
73993,
73999
],
[
74074,
74080
],
[
74147,
74153
],
[
74210,
74216
],
[
74247,
74253
],
[
74316,
74322
],
[
74353,
74359
],
[
74498,
74504
],
[
74556,
74562
],
[
74593,
74599
],
[
74623,
74629
],
[
74918,
74924
],
[
74982,
74988
],
[
75045,
75051
],
[
75110,
75116
]
],
[
[
531,
540
],
[
33457,
33466
],
[
75920,
75929
]
],
[
[
546,
555
],
[
6526,
6535
],
[
8427,
8436
],
[
21049,
21058
],
[
24725,
24734
],
[
31089,
31098
],
[
33186,
33195
],
[
34218,
34227
],
[
37468,
37477
],
[
37910,
37919
]
],
[
[
561,
569
]
],
[
[
575,
591
],
[
6410,
6426
],
[
8311,
8327
],
[
13843,
13859
],
[
23492,
23508
],
[
29821,
29837
],
[
42307,
42323
],
[
75163,
75179
]
],
[
[
654,
659
],
[
74660,
74665
]
],
[
[
697,
704
],
[
4409,
4416
],
[
6641,
6648
],
[
8551,
8558
],
[
21190,
21197
],
[
24865,
24872
],
[
31233,
31240
],
[
33763,
33770
],
[
34358,
34365
],
[
37592,
37599
],
[
38038,
38045
],
[
40007,
40014
],
[
50319,
50326
],
[
87638,
87645
]
],
[
[
706,
725
],
[
1383,
1402
]
],
[
[
778,
795
],
[
28621,
28638
]
],
[
[
797,
801
],
[
811,
815
],
[
1262,
1266
],
[
3617,
3621
]
],
[
[
857,
863
],
[
3081,
3087
],
[
42181,
42187
],
[
46331,
46337
]
],
[
[
895,
920
],
[
46820,
46845
],
[
43982,
44007
],
[
44231,
44256
],
[
44412,
44437
],
[
45263,
45288
],
[
45874,
45899
],
[
46686,
46711
]
],
[
[
925,
970
],
[
46008,
46053
]
],
[
[
975,
1008
],
[
50252,
50285
]
],
[
[
1013,
1045
],
[
43474,
43506
],
[
43879,
43911
]
],
[
[
1050,
1078
],
[
31660,
31688
]
],
[
[
1171,
1189
],
[
3798,
3816
]
],
[
[
1368,
1382
],
[
67928,
67942
],
[
76181,
76195
],
[
1970,
1984
],
[
2801,
2815
]
],
[
[
4397,
4408
],
[
6600,
6611
]
],
[
[
5415,
5432
],
[
11379,
11396
],
[
87896,
87913
]
],
[
[
6628,
6640
],
[
8502,
8514
]
],
[
[
7690,
7708
],
[
11673,
11691
],
[
87937,
87955
]
],
[
[
8531,
8550
],
[
21132,
21151
]
],
[
[
11084,
11109
],
[
28501,
28526
],
[
87848,
87873
]
],
[
[
21168,
21189
],
[
24810,
24831
]
],
[
[
22597,
22624
],
[
28343,
28370
]
],
[
[
24848,
24864
],
[
31168,
31184
],
[
68205,
68221
]
],
[
[
27901,
27923
],
[
40673,
40695
],
[
87795,
87817
]
],
[
[
31201,
31232
],
[
33378,
33409
],
[
71101,
71132
],
[
71235,
71266
]
],
[
[
32915,
32952
],
[
41490,
41527
],
[
87992,
88029
]
],
[
[
33740,
33762
],
[
34304,
34326
]
],
[
[
34004,
34032
],
[
36243,
36271
],
[
36326,
36354
],
[
36437,
36465
],
[
36554,
36582
],
[
36673,
36701
],
[
36793,
36821
],
[
36899,
36927
],
[
37009,
37037
],
[
37094,
37122
],
[
37173,
37201
],
[
37272,
37300
],
[
37377,
37405
]
],
[
[
34343,
34357
],
[
37545,
37559
]
],
[
[
35941,
35961
],
[
37849,
37869
],
[
88055,
88075
]
],
[
[
37576,
37591
],
[
37988,
38003
]
],
[
[
37683,
37704
],
[
41236,
41257
]
],
[
[
38020,
38037
],
[
69012,
69029
],
[
71454,
71471
],
[
71544,
71561
],
[
71603,
71620
]
],
[
[
39860,
39883
],
[
41561,
41584
],
[
88154,
88177
]
],
[
[
39993,
40006
]
],
[
[
40396,
40419
],
[
87744,
87767
],
[
71913,
71936
]
],
[
[
46753,
46778
],
[
50740,
50765
],
[
50843,
50868
],
[
50954,
50979
],
[
51064,
51089
],
[
51162,
51187
],
[
51716,
51741
],
[
51885,
51910
],
[
57039,
57064
],
[
69140,
69165
]
],
[
[
50186,
50210
]
],
[
[
50294,
50318
],
[
52088,
52112
],
[
56470,
56494
],
[
57784,
57808
],
[
58917,
58941
],
[
64825,
64849
],
[
68888,
68912
]
],
[
[
52065,
52087
]
],
[
[
56439,
56469
]
],
[
[
57755,
57783
]
],
[
[
58893,
58916
]
],
[
[
64796,
64824
]
],
[
[
67910,
67927
]
],
[
[
72026,
72048
],
[
88103,
88125
],
[
84098,
84120
]
],
[
[
76164,
76180
]
],
[
[
87611,
87637
]
],
[
[
87665,
87697
]
],
[
[
87718,
87741
]
],
[
[
87770,
87792
]
],
[
[
87820,
87845
]
],
[
[
87876,
87893
]
],
[
[
87916,
87934
]
],
[
[
87958,
87989
]
],
[
[
88032,
88052
]
],
[
[
88078,
88100
]
],
[
[
88128,
88151
]
]
] |
# -*- coding: utf-8 -*-
"""
This is initialization module of openwarp.
"""
__author__ = "caoweiquan322"
__copyright__ = "Copyright (C) 2014 TopCoder Inc. All rights reserved."
__version__ = "1.0"
#import logging
# Create a base logger for the whole module.
#logger = logging.getLogger(__name__)
#logger.setLevel(logging.DEBUG)
#ch = logging.StreamHandler()
#ch.setLevel(logging.DEBUG)
#formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#ch.setFormatter(formatter)
#logger.addHandler(ch)
| [
[
[
81,
91
]
],
[
[
111,
124
]
],
[
[
184,
195
]
]
] |
# https://github.com/albermax/innvestigate
# http://heatmapping.org/ | [] |
import tensorflow as tf
import common
def inference(x, num_output, wd, dropout_rate, is_training, transfer_mode= False, model_type= 'A'):
# Create tables describing VGG configurations A, B, D, E
if model_type == 'A':
config = [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M']
elif model_type == 'B':
config = [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M']
elif model_type == 'D':
config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M']
elif model_type == 'E':
config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M']
else:
print('Unknown model type: ' + model_type + ' | Please specify a modelType A or B or D or E')
network= x
for k,v in enumerate(config):
if v == 'M':
network= common.maxPool(network, 2, 2)
else:
with tf.variable_scope('conv'+str(k)):
network = common.spatialConvolution(network, 3, 1, v, wd= wd)
network = tf.nn.relu(network)
network= common.flatten(network)
with tf.variable_scope('fc1'):
network = common.fullyConnected(network, 4096, wd= wd)
network = tf.nn.relu(network)
network = common.batchNormalization(network, is_training= is_training)
network = tf.nn.dropout(network, dropout_rate)
with tf.variable_scope('fc2'):
network = common.fullyConnected(network, 4096, wd= wd)
network = tf.nn.relu(network)
network = common.batchNormalization(network, is_training= is_training)
network = tf.nn.dropout(network, dropout_rate)
if not transfer_mode:
with tf.variable_scope('output'):
network = common.fullyConnected(network, num_output, wd= wd)
else:
with tf.variable_scope('transfer_output'):
network = common.fullyConnected(network, num_output, wd= wd)
return network
| [
[
[
7,
23
],
[
947,
949
],
[
1071,
1073
],
[
1137,
1139
],
[
1239,
1241
],
[
1350,
1352
],
[
1395,
1397
],
[
1496,
1498
],
[
1607,
1609
],
[
1679,
1681
],
[
1795,
1797
]
],
[
[
31,
37
],
[
892,
898
],
[
1000,
1006
],
[
1104,
1110
],
[
1179,
1185
],
[
1274,
1280
],
[
1436,
1442
],
[
1531,
1537
],
[
1725,
1731
],
[
1850,
1856
]
],
[
[
43,
52
]
]
] |
#!/usr/bin/env python3
import argparse
import io
import sys
PY2 = sys.version_info[0] == 2
if PY2:
from itertools import izip_longest as zip_longest
else:
from itertools import zip_longest
def get_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Mixing wav.scp files into a multi-channel wav.scp " "using sox.",
)
parser.add_argument("scp", type=str, nargs="+", help="Give wav.scp")
parser.add_argument(
"out",
nargs="?",
type=argparse.FileType("w"),
default=sys.stdout,
help="The output filename. " "If omitted, then output to sys.stdout",
)
return parser
def main():
parser = get_parser()
args = parser.parse_args()
fscps = [io.open(scp, "r", encoding="utf-8") for scp in args.scp]
for linenum, lines in enumerate(zip_longest(*fscps)):
keys = []
wavs = []
for line, scp in zip(lines, args.scp):
if line is None:
raise RuntimeError("Numbers of line mismatch")
sps = line.split(" ", 1)
if len(sps) != 2:
raise RuntimeError(
'Invalid line is found: {}, line {}: "{}" '.format(
scp, linenum, line
)
)
key, wav = sps
keys.append(key)
wavs.append(wav.strip())
if not all(k == keys[0] for k in keys):
raise RuntimeError(
"The ids mismatch. Hint; the input files must be "
"sorted and must have same ids: {}".format(keys)
)
args.out.write(
"{} sox -M {} -c {} -t wav - |\n".format(
keys[0], " ".join("{}".format(w) for w in wavs), len(fscps)
)
)
if __name__ == "__main__":
main()
| [
[
[
30,
38
],
[
232,
240
],
[
281,
289
],
[
559,
567
]
],
[
[
46,
48
],
[
798,
800
]
],
[
[
56,
59
],
[
67,
70
],
[
599,
602
]
],
[
[
61,
64
],
[
96,
99
]
],
[
[
127,
154
],
[
891,
902
]
],
[
[
187,
198
],
[
891,
902
]
],
[
[
205,
215
],
[
740,
750
]
],
[
[
719,
723
],
[
1880,
1884
]
]
] |
import numpy as np
import random
import matplotlib.pyplot as plt
points=np.loadtxt('points.txt')
herring_r = np.loadtxt('distribution.txt')
herring=np.zeros((802,350))
for i in range(350):
for j in range(802):
herring[j,349-i]=herring_r[i,j]
# s=np.zeros(10)
#
# for i in range(10):
# x=int(round(points[i,0]))-1
# y=int(round(points[i,1]))
#
# for xx in range(x-11,x+12):
# for yy in range(y-11,y+12):
# if herring[xx,yy]>0:
# s[i]+=herring[xx,yy]
#
# f = open('fish_count.txt', 'w')
# for i in range(10):
# f.write(str(s[i])+'\n')
# f.close()
s=0
for i in range(802):
for j in range(350):
if herring[i,j]>0:
s+=herring[i,j]
print(s)
| [
[
[
7,
18
],
[
73,
75
],
[
110,
112
],
[
149,
151
]
],
[
[
26,
32
]
],
[
[
40,
64
]
],
[
[
66,
72
]
],
[
[
98,
107
],
[
240,
249
]
],
[
[
141,
148
],
[
223,
230
],
[
672,
679
],
[
703,
710
]
],
[
[
173,
174
],
[
250,
251
],
[
237,
238
]
],
[
[
198,
199
],
[
252,
253
],
[
231,
232
]
],
[
[
611,
612
],
[
700,
701
],
[
723,
724
]
],
[
[
619,
620
],
[
680,
681
],
[
711,
712
]
],
[
[
644,
645
],
[
682,
683
],
[
713,
714
]
]
] |
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from cached_property import cached_property
from chainer import functions as F
from chainerrl.action_value import ActionValue
class BranchedActionValue(ActionValue):
"""Q-function output for a branched action space.
Args:
branches (list):
Each element of the list is a Q-function for an action dimension
"""
def __init__(self, branches, q_values_formatter=lambda x: x):
self.branches = branches
self.q_values_formatter = q_values_formatter
@cached_property
def greedy_actions(self):
actions = []
for branch in self.branches:
actions.append(branch.q_values.array.argmax(axis=1).reshape(-1, 1))
return F.hstack(actions)
@cached_property
def max(self):
chosen_q_values = []
for branch in self.branches:
chosen_q_values.append(branch.max.reshape(-1, 1))
return F.hstack(chosen_q_values)
def evaluate_actions(self, actions):
branch_q_values = []
for i, branch in enumerate(self.branches):
branch_actions = actions[:, i]
branch_q_values.append(branch.evaluate_actions(
branch_actions).reshape(-1, 1))
return F.hstack(branch_q_values)
@property
def params(self):
branch_params = []
for branch in self.branches:
branch_params.extend(list(branch.params))
return tuple(branch_params)
| [
[
[
23,
39
]
],
[
[
63,
77
]
],
[
[
101,
109
]
],
[
[
133,
148
]
],
[
[
170,
171
]
],
[
[
199,
215
],
[
216,
232
]
],
[
[
288,
303
],
[
762,
777
],
[
987,
1002
]
],
[
[
324,
338
],
[
963,
964
],
[
1167,
1168
],
[
1483,
1484
]
],
[
[
375,
386
],
[
415,
426
]
],
[
[
395,
414
]
]
] |
from vlasisku.utils import compound2affixes
class Entry(object):
"""Container for jbovlaste entry data."""
#: The word (or compound) this entry describes.
word = None
#: The type of the word, such as ``'gismu'``.
type = None
#: A list of three-letter affix forms for the word.
affixes = None
#: A list of affixes including four and five-letter versions.
searchaffixes = None
#: The grammatical class if the word is a particle.
grammarclass = None
#: The grammatical class of this words terminator, if any.
terminator = None
#: A list of grammatical classes this word terminates, for terminators.
terminates = None
#: A list of two-tuples such as ``('<chapter>.<section>', 'http://...')``.
cll = None
#: HTML for the entry definition, such as a place structure.
definition = None
#: HTML for notes about the entry.
notes = None
#: Plain text definition.
textdefinition = None
#: Plain text notes.
textnotes = None
#: The :class:`~vlasisku.database.Root` instance this entry is in.
db = None
# We need new lists for every instance.
def __init__(self, db):
self.affixes = []
self.searchaffixes = []
self.terminates = []
self.cll = []
self.db = db
def __str__(self):
return self.word
def __repr__(self):
return '<Entry %s>' % self.word
def components(self):
"""Build HTML that links the affixes in a compound
to their corresponding words, with definitions in the link tooltips.
"""
if self.type == 'lujvo':
components = ''
for a in compound2affixes(self.word):
if len(a) == 1:
components += a
else:
word = [e for e in self.db.entries.values()
if a in e.searchaffixes]
if word:
components += '<a href="%s" ' % word[0]
components += 'title="<strong>%s:</strong> ' % word[0]
components += '%s">%s</a>' % (word[0].definition, a)
else:
components += a
return components
class Gloss(object):
"""Container for jbovlaste gloss data."""
#: The actual gloss word.
gloss = None
#: The :class:`Entry` this glosses to.
entry = None
#: The sense in which this gloss word relates to the entry, or ``None``.
sense = None
#: The specific place of the entry this glosses to, if any.
place = None
def __str__(self):
return self.entry.word
| [
[
[
27,
43
],
[
1688,
1704
]
],
[
[
52,
57
]
],
[
[
2279,
2284
]
]
] |
from data import DataHandler
from models import ACRegNet
import tensorflow as tf
from utils import get_random_batch, read_config_file, create_dir
RUN_IN_GPU = False
def train_acregnet_model(config):
tf.reset_default_graph()
tf_config = tf.ConfigProto()
if RUN_IN_GPU:
tf_config.gpu_options.allow_growth = True
sess = tf.Session(config=tf_config)
train_ims, _ = DataHandler.load_images(config['train_ims_file'])
train_lbs, _ = DataHandler.load_labels(config['train_lbs_file'])
print('Loading training data...done')
acregnet = ACRegNet(sess, config, 'ACRegNet', is_train=True)
print('Building AC-RegNet model...done')
print('Training...')
for i in range(config['iterations']):
batch_ims_x, batch_ims_y, batch_lbs_x, batch_lbs_y = get_random_batch(
train_ims, config['batch_size'], train_lbs)
cur_loss = acregnet.fit(
batch_ims_x, batch_ims_y, batch_lbs_x, batch_lbs_y)
print('Iteration {:>8d}/{}: Loss: {}'.format(
i + 1, config['iterations'], cur_loss))
acregnet.save(config['ckpt_dir'])
print('Saving current AC-RegNet model...done')
print('Training...done')
tf.reset_default_graph()
sess.close()
if __name__ == "__main__":
config = read_config_file('./config/JSRT/ACRegNet.cfg')
create_dir(config['ckpt_dir'])
train_acregnet_model(config)
| [
[
[
17,
28
],
[
396,
407
],
[
465,
476
]
],
[
[
48,
56
],
[
573,
581
]
],
[
[
64,
80
],
[
207,
209
],
[
248,
250
],
[
347,
349
],
[
1199,
1201
]
],
[
[
99,
115
],
[
797,
813
]
],
[
[
117,
133
],
[
1283,
1299
]
],
[
[
135,
145
],
[
1334,
1344
]
],
[
[
148,
158
],
[
273,
283
]
],
[
[
173,
193
],
[
1369,
1389
]
],
[
[
1274,
1280
],
[
1345,
1351
],
[
1390,
1396
]
]
] |
import unittest
from gr_nlp_toolkit.labels.dp_labels import dp_labels
from gr_nlp_toolkit.labels.ner_labels import ner_labels
from gr_nlp_toolkit.labels.pos_labels import pos_labels, pos_properties
from gr_nlp_toolkit.pipeline.pipeline import Pipeline
class TestPipeline(unittest.TestCase):
def test_using_all_processors(self):
nlp = Pipeline('dp,pos,ner')
sentences = ["Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021",
"Το ποιηματάκι το έγραψε ο διάσημος ποιητής, Νίκος Νικολαϊδης"]
for sent in sentences:
doc = nlp(sent)
for token in doc.tokens:
print(token.text, token.ner, token.upos, token.feats, token.head, token.deprel)
self.assertIsNotNone(token.ner)
self.assertTrue(token.ner in ner_labels)
self.assertIsNotNone(token.head)
self.assertIsNotNone(token.deprel)
# We have to add plus one, because the cls token is removed
self.assertTrue(token.head in range(0, len(doc.tokens) + 1))
self.assertTrue(token.deprel in dp_labels)
self.assertIsNotNone(token.upos)
self.assertTrue(token.upos in pos_labels['upos'])
self.assertIsNotNone(token.feats)
self.assertEqual(len(list(token.feats.keys())), len(pos_properties[token.upos]))
for feat, value in token.feats.items():
self.assertTrue(feat in pos_properties[token.upos])
self.assertTrue(value in pos_labels[feat])
print(token.text, token.ner, token.upos, token.feats, token.head, token.deprel)
self.assertIsNotNone(token.ner)
self.assertTrue(token.ner in ner_labels)
self.assertIsNotNone(token.head)
self.assertIsNotNone(token.deprel)
# We have to add plus one, because the cls token is removed
self.assertTrue(token.head in range(0, len(doc.tokens) + 1))
self.assertTrue(token.deprel in dp_labels)
self.assertIsNotNone(token.upos)
self.assertTrue(token.upos in pos_labels['upos'])
def test_annotations_are_same_with_multiple_configurations(self):
nlp = Pipeline('dp,pos,ner')
doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021")
deprels_preds = []
upos_preds = []
ner_preds = []
for token in doc.tokens:
deprels_preds.append(token.deprel)
upos_preds.append(token.upos)
ner_preds.append(token.ner)
nlp = Pipeline('dp')
doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021")
new_deprels_preds = []
for token in doc.tokens:
new_deprels_preds.append(token.deprel)
nlp = Pipeline('pos')
doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021")
new_upos_preds =[]
for token in doc.tokens:
new_upos_preds.append(token.upos)
nlp = Pipeline('ner')
doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021")
new_ner_preds =[]
for token in doc.tokens:
new_ner_preds.append(token.ner)
self.assertEqual(new_deprels_preds, deprels_preds)
self.assertEqual(new_upos_preds, upos_preds)
self.assertEqual(new_ner_preds, ner_preds)
def test_using_only_one_processor(self):
nlp = Pipeline('ner')
doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021")
for token in doc.tokens:
self.assertIsNotNone(token.ner)
self.assertTrue(token.ner in ner_labels)
self.assertIsNone(token.head)
self.assertIsNone(token.deprel)
self.assertFalse(token.head in range(0, len(doc.tokens)))
self.assertFalse(token.deprel in dp_labels)
self.assertIsNone(token.upos)
self.assertFalse(token.upos in pos_labels['upos'])
for feat, value in token.feats.items():
self.assertFalse(feat in pos_properties[token.upos])
self.assertFalse(value in pos_labels[feat])
if __name__ == '__main__':
unittest.main()
| [
[
[
7,
15
],
[
274,
282
],
[
4330,
4338
]
],
[
[
61,
70
],
[
1138,
1147
],
[
2138,
2147
],
[
3999,
4008
]
],
[
[
116,
126
],
[
825,
835
],
[
1805,
1815
],
[
3786,
3796
]
],
[
[
172,
182
],
[
1244,
1254
],
[
1586,
1596
],
[
2252,
2262
],
[
4095,
4105
],
[
4279,
4289
]
],
[
[
184,
198
],
[
1383,
1397
],
[
1513,
1527
],
[
4209,
4223
]
],
[
[
244,
252
],
[
349,
357
],
[
2357,
2365
],
[
2710,
2718
],
[
2934,
2942
],
[
3150,
3158
],
[
3573,
3581
]
],
[
[
261,
273
]
]
] |
"""This module contains the general information for InitiatorFcInitiatorEp ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class InitiatorFcInitiatorEpConsts:
PREF_ALTERNATE = "alternate"
PREF_PREFERRED = "preferred"
PROT_DERIVED = "derived"
PROT_FC = "fc"
PROT_ISCSI = "iscsi"
class InitiatorFcInitiatorEp(ManagedObject):
"""This is InitiatorFcInitiatorEp class."""
consts = InitiatorFcInitiatorEpConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("InitiatorFcInitiatorEp", "initiatorFcInitiatorEp", "fc-ini-[name]", VersionMeta.Version211a, "InputOutput", 0x3f, [], ["read-only"], [u'initiatorGroupEp'], [u'storageEpUser'], [None])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"ep_dn": MoPropertyMeta("ep_dn", "epDn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"id": MoPropertyMeta("id", "id", "ulong", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version211a, MoPropertyMeta.NAMING, 0x8, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []),
"pref": MoPropertyMeta("pref", "pref", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["alternate", "preferred"], []),
"prot": MoPropertyMeta("prot", "prot", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["derived", "fc", "iscsi"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"wwpn": MoPropertyMeta("wwpn", "wwpn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, r"""(([A-Fa-f0-9][A-Fa-f0-9]:){7}[A-Fa-f0-9][A-Fa-f0-9])|0""", [], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"epDn": "ep_dn",
"id": "id",
"name": "name",
"pref": "pref",
"prot": "prot",
"rn": "rn",
"sacl": "sacl",
"status": "status",
"wwpn": "wwpn",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.child_action = None
self.ep_dn = None
self.id = None
self.pref = None
self.prot = None
self.sacl = None
self.status = None
self.wwpn = None
ManagedObject.__init__(self, "InitiatorFcInitiatorEp", parent_mo_or_dn, **kwargs)
| [
[
[
115,
128
],
[
422,
435
],
[
3365,
3378
]
],
[
[
156,
170
],
[
815,
829
],
[
896,
910
],
[
1054,
1068
],
[
1116,
1130
],
[
1188,
1202
],
[
1255,
1269
],
[
1325,
1339
],
[
1386,
1400
],
[
1462,
1476
],
[
1528,
1542
],
[
1626,
1640
],
[
1692,
1706
],
[
1792,
1806
],
[
1858,
1872
],
[
1956,
1970
],
[
2018,
2032
],
[
2090,
2104
],
[
2156,
2170
],
[
2312,
2326
],
[
2382,
2396
],
[
2543,
2557
],
[
2609,
2623
]
],
[
[
172,
178
],
[
580,
586
]
],
[
[
202,
213
],
[
656,
667
],
[
871,
882
],
[
1091,
1102
],
[
1230,
1241
],
[
1361,
1372
],
[
1503,
1514
],
[
1667,
1678
],
[
1833,
1844
],
[
1993,
2004
],
[
2131,
2142
],
[
2357,
2368
],
[
2584,
2595
]
],
[
[
222,
250
],
[
500,
528
]
],
[
[
399,
421
]
]
] |
# Copyright (c) 2020 NVIDIA CORPORATION.
# Copyright (c) 2018-2020 Chris Choy ([email protected]).
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Please cite "4D Spatio-Temporal ConvNets: Minkowski Convolutional Neural
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part
# of the code.
import argparse
import sklearn.metrics as metrics
import numpy as np
import torch
import torch.nn as nn
import torch.utils.data
from torch.utils.data import DataLoader
import torch.optim as optim
import torch.nn.functional as F
import MinkowskiEngine as ME
from examples.pointnet import (
PointNet,
MinkowskiPointNet,
CoordinateTransformation,
ModelNet40H5,
stack_collate_fn,
minkowski_collate_fn,
)
from examples.common import seed_all
parser = argparse.ArgumentParser()
parser.add_argument("--voxel_size", type=float, default=0.05)
parser.add_argument("--max_steps", type=int, default=100000)
parser.add_argument("--val_freq", type=int, default=1000)
parser.add_argument("--batch_size", default=32, type=int)
parser.add_argument("--lr", default=1e-1, type=float)
parser.add_argument("--weight_decay", type=float, default=1e-4)
parser.add_argument("--num_workers", type=int, default=2)
parser.add_argument("--stat_freq", type=int, default=100)
parser.add_argument("--weights", type=str, default="modelnet.pth")
parser.add_argument("--seed", type=int, default=777)
parser.add_argument("--translation", type=float, default=0.2)
parser.add_argument("--test_translation", type=float, default=0.0)
parser.add_argument(
"--network",
type=str,
choices=["pointnet", "minkpointnet", "minkfcnn", "minksplatfcnn"],
default="minkfcnn",
)
class MinkowskiFCNN(ME.MinkowskiNetwork):
def __init__(
self,
in_channel,
out_channel,
embedding_channel=1024,
channels=(32, 48, 64, 96, 128),
D=3,
):
ME.MinkowskiNetwork.__init__(self, D)
self.network_initialization(
in_channel,
out_channel,
channels=channels,
embedding_channel=embedding_channel,
kernel_size=3,
D=D,
)
self.weight_initialization()
def get_mlp_block(self, in_channel, out_channel):
return nn.Sequential(
ME.MinkowskiLinear(in_channel, out_channel, bias=False),
ME.MinkowskiBatchNorm(out_channel),
ME.MinkowskiLeakyReLU(),
)
def get_conv_block(self, in_channel, out_channel, kernel_size, stride):
return nn.Sequential(
ME.MinkowskiConvolution(
in_channel,
out_channel,
kernel_size=kernel_size,
stride=stride,
dimension=self.D,
),
ME.MinkowskiBatchNorm(out_channel),
ME.MinkowskiLeakyReLU(),
)
def network_initialization(
self,
in_channel,
out_channel,
channels,
embedding_channel,
kernel_size,
D=3,
):
self.mlp1 = self.get_mlp_block(in_channel, channels[0])
self.conv1 = self.get_conv_block(
channels[0],
channels[1],
kernel_size=kernel_size,
stride=1,
)
self.conv2 = self.get_conv_block(
channels[1],
channels[2],
kernel_size=kernel_size,
stride=2,
)
self.conv3 = self.get_conv_block(
channels[2],
channels[3],
kernel_size=kernel_size,
stride=2,
)
self.conv4 = self.get_conv_block(
channels[3],
channels[4],
kernel_size=kernel_size,
stride=2,
)
self.conv5 = nn.Sequential(
self.get_conv_block(
channels[1] + channels[2] + channels[3] + channels[4],
embedding_channel // 4,
kernel_size=3,
stride=2,
),
self.get_conv_block(
embedding_channel // 4,
embedding_channel // 2,
kernel_size=3,
stride=2,
),
self.get_conv_block(
embedding_channel // 2,
embedding_channel,
kernel_size=3,
stride=2,
),
)
self.pool = ME.MinkowskiMaxPooling(kernel_size=3, stride=2, dimension=D)
self.global_max_pool = ME.MinkowskiGlobalMaxPooling()
self.global_avg_pool = ME.MinkowskiGlobalAvgPooling()
self.final = nn.Sequential(
self.get_mlp_block(embedding_channel * 2, 512),
ME.MinkowskiDropout(),
self.get_mlp_block(512, 512),
ME.MinkowskiLinear(512, out_channel, bias=True),
)
# No, Dropout, last 256 linear, AVG_POOLING 92%
def weight_initialization(self):
for m in self.modules():
if isinstance(m, ME.MinkowskiConvolution):
ME.utils.kaiming_normal_(m.kernel, mode="fan_out", nonlinearity="relu")
if isinstance(m, ME.MinkowskiBatchNorm):
nn.init.constant_(m.bn.weight, 1)
nn.init.constant_(m.bn.bias, 0)
def forward(self, x: ME.TensorField):
x = self.mlp1(x)
y = x.sparse()
y = self.conv1(y)
y1 = self.pool(y)
y = self.conv2(y1)
y2 = self.pool(y)
y = self.conv3(y2)
y3 = self.pool(y)
y = self.conv4(y3)
y4 = self.pool(y)
x1 = y1.slice(x)
x2 = y2.slice(x)
x3 = y3.slice(x)
x4 = y4.slice(x)
x = ME.cat(x1, x2, x3, x4)
y = self.conv5(x.sparse())
x1 = self.global_max_pool(y)
x2 = self.global_avg_pool(y)
return self.final(ME.cat(x1, x2)).F
class GlobalMaxAvgPool(torch.nn.Module):
def __init__(self):
torch.nn.Module.__init__(self)
self.global_max_pool = ME.MinkowskiGlobalMaxPooling()
self.global_avg_pool = ME.MinkowskiGlobalAvgPooling()
def forward(self, tensor):
x = self.global_max_pool(tensor)
y = self.global_avg_pool(tensor)
return ME.cat(x, y)
class MinkowskiSplatFCNN(MinkowskiFCNN):
def __init__(
self,
in_channel,
out_channel,
embedding_channel=1024,
channels=(32, 48, 64, 96, 128),
D=3,
):
MinkowskiFCNN.__init__(
self, in_channel, out_channel, embedding_channel, channels, D
)
def forward(self, x: ME.TensorField):
x = self.mlp1(x)
y = x.splat()
y = self.conv1(y)
y1 = self.pool(y)
y = self.conv2(y1)
y2 = self.pool(y)
y = self.conv3(y2)
y3 = self.pool(y)
y = self.conv4(y3)
y4 = self.pool(y)
x1 = y1.interpolate(x)
x2 = y2.interpolate(x)
x3 = y3.interpolate(x)
x4 = y4.interpolate(x)
x = ME.cat(x1, x2, x3, x4)
y = self.conv5(x.sparse())
x1 = self.global_max_pool(y)
x2 = self.global_avg_pool(y)
return self.final(ME.cat(x1, x2)).F
STR2NETWORK = dict(
pointnet=PointNet,
minkpointnet=MinkowskiPointNet,
minkfcnn=MinkowskiFCNN,
minksplatfcnn=MinkowskiSplatFCNN,
)
def create_input_batch(batch, is_minknet, device="cuda", quantization_size=0.05):
if is_minknet:
batch["coordinates"][:, 1:] = batch["coordinates"][:, 1:] / quantization_size
return ME.TensorField(
coordinates=batch["coordinates"],
features=batch["features"],
device=device,
)
else:
return batch["coordinates"].permute(0, 2, 1).to(device)
class CoordinateTranslation:
def __init__(self, translation):
self.trans = translation
def __call__(self, coords):
if self.trans > 0:
coords += np.random.uniform(low=-self.trans, high=self.trans, size=[1, 3])
return coords
def make_data_loader(phase, is_minknet, config):
assert phase in ["train", "val", "test"]
is_train = phase == "train"
dataset = ModelNet40H5(
phase=phase,
transform=CoordinateTransformation(trans=config.translation)
if is_train
else CoordinateTranslation(config.test_translation),
data_root="modelnet40_ply_hdf5_2048",
)
return DataLoader(
dataset,
num_workers=config.num_workers,
shuffle=is_train,
collate_fn=minkowski_collate_fn if is_minknet else stack_collate_fn,
batch_size=config.batch_size,
)
def test(net, device, config, phase="val"):
is_minknet = isinstance(net, ME.MinkowskiNetwork)
data_loader = make_data_loader(
"test",
is_minknet,
config=config,
)
net.eval()
labels, preds = [], []
with torch.no_grad():
for batch in data_loader:
input = create_input_batch(
batch,
is_minknet,
device=device,
quantization_size=config.voxel_size,
)
logit = net(input)
pred = torch.argmax(logit, 1)
labels.append(batch["labels"].cpu().numpy())
preds.append(pred.cpu().numpy())
torch.cuda.empty_cache()
return metrics.accuracy_score(np.concatenate(labels), np.concatenate(preds))
def criterion(pred, labels, smoothing=True):
"""Calculate cross entropy loss, apply label smoothing if needed."""
labels = labels.contiguous().view(-1)
if smoothing:
eps = 0.2
n_class = pred.size(1)
one_hot = torch.zeros_like(pred).scatter(1, labels.view(-1, 1), 1)
one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)
log_prb = F.log_softmax(pred, dim=1)
loss = -(one_hot * log_prb).sum(dim=1).mean()
else:
loss = F.cross_entropy(pred, labels, reduction="mean")
return loss
def train(net, device, config):
is_minknet = isinstance(net, ME.MinkowskiNetwork)
optimizer = optim.SGD(
net.parameters(),
lr=config.lr,
momentum=0.9,
weight_decay=config.weight_decay,
)
scheduler = optim.lr_scheduler.CosineAnnealingLR(
optimizer,
T_max=config.max_steps,
)
print(optimizer)
print(scheduler)
train_iter = iter(make_data_loader("train", is_minknet, config))
best_metric = 0
net.train()
for i in range(config.max_steps):
optimizer.zero_grad()
try:
data_dict = train_iter.next()
except StopIteration:
train_iter = iter(make_data_loader("train", is_minknet, config))
data_dict = train_iter.next()
input = create_input_batch(
data_dict, is_minknet, device=device, quantization_size=config.voxel_size
)
logit = net(input)
loss = criterion(logit, data_dict["labels"].to(device))
loss.backward()
optimizer.step()
scheduler.step()
torch.cuda.empty_cache()
if i % config.stat_freq == 0:
print(f"Iter: {i}, Loss: {loss.item():.3e}")
if i % config.val_freq == 0 and i > 0:
torch.save(
{
"state_dict": net.state_dict(),
"optimizer": optimizer.state_dict(),
"scheduler": scheduler.state_dict(),
"curr_iter": i,
},
config.weights,
)
accuracy = test(net, device, config, phase="val")
if best_metric < accuracy:
best_metric = accuracy
print(f"Validation accuracy: {accuracy}. Best accuracy: {best_metric}")
net.train()
if __name__ == "__main__":
config = parser.parse_args()
seed_all(config.seed)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("===================ModelNet40 Dataset===================")
print(f"Training with translation {config.translation}")
print(f"Evaluating with translation {config.test_translation}")
print("=============================================\n\n")
net = STR2NETWORK[config.network](
in_channel=3, out_channel=40, embedding_channel=1024
).to(device)
print("===================Network===================")
print(net)
print("=============================================\n\n")
train(net, device, config)
accuracy = test(net, device, config, phase="test")
print(f"Test accuracy: {accuracy}")
| [
[
[
1339,
1347
],
[
1805,
1813
]
],
[
[
1355,
1381
],
[
10340,
10347
]
],
[
[
1389,
1400
],
[
8931,
8933
],
[
10363,
10365
],
[
10387,
10389
]
],
[
[
1409,
1414
]
],
[
[
1422,
1436
],
[
3285,
3287
],
[
3556,
3558
],
[
4786,
4788
],
[
5621,
5623
],
[
6185,
6187
],
[
6235,
6237
]
],
[
[
1444,
1460
],
[
6890,
6895
],
[
12878,
12883
],
[
12901,
12906
],
[
6940,
6945
],
[
9877,
9882
],
[
10167,
10172
],
[
10304,
10309
],
[
10659,
10664
],
[
12051,
12056
],
[
12232,
12237
]
],
[
[
1490,
1500
],
[
9408,
9418
]
],
[
[
1508,
1528
],
[
11086,
11091
],
[
11231,
11236
]
],
[
[
1536,
1560
],
[
10810,
10811
],
[
10917,
10918
]
],
[
[
1569,
1590
],
[
2724,
2726
],
[
2919,
2921
],
[
3312,
3314
],
[
3381,
3383
],
[
3429,
3431
],
[
3583,
3585
],
[
3798,
3800
],
[
3846,
3848
],
[
5413,
5415
],
[
5506,
5508
],
[
5568,
5570
],
[
5708,
5710
],
[
5785,
5787
],
[
6001,
6003
],
[
6043,
6045
],
[
6145,
6147
],
[
6293,
6295
],
[
6687,
6689
],
[
6847,
6849
],
[
7002,
7004
],
[
7064,
7066
],
[
7224,
7226
],
[
7587,
7589
],
[
8004,
8006
],
[
8164,
8166
],
[
8535,
8537
],
[
9703,
9705
],
[
11049,
11051
]
],
[
[
1627,
1635
],
[
8217,
8225
]
],
[
[
1641,
1658
],
[
8244,
8261
]
],
[
[
1664,
1688
],
[
9213,
9237
]
],
[
[
1694,
1706
],
[
9160,
9172
]
],
[
[
1712,
1728
],
[
9562,
9578
]
],
[
[
1734,
1754
],
[
9522,
9542
]
],
[
[
1786,
1794
],
[
12843,
12851
]
],
[
[
1796,
1802
],
[
1831,
1837
],
[
1893,
1899
],
[
1954,
1960
],
[
2012,
2018
],
[
2070,
2076
],
[
2124,
2130
],
[
2188,
2194
],
[
2246,
2252
],
[
2304,
2310
],
[
2371,
2377
],
[
2424,
2430
],
[
2486,
2492
],
[
2553,
2559
],
[
12819,
12825
]
],
[
[
2710,
2723
],
[
7264,
7277
],
[
8276,
8289
],
[
7453,
7466
]
],
[
[
6873,
6889
]
],
[
[
7245,
7263
],
[
8309,
8327
]
],
[
[
8184,
8195
],
[
13212,
13223
]
],
[
[
8337,
8355
],
[
9948,
9966
],
[
11762,
11780
]
],
[
[
8756,
8777
],
[
9297,
9318
]
],
[
[
9024,
9040
],
[
9742,
9758
],
[
11391,
11407
],
[
11657,
11673
]
],
[
[
9630,
9634
],
[
13503,
13507
],
[
12552,
12556
]
],
[
[
10416,
10425
],
[
11920,
11929
]
],
[
[
10988,
10993
],
[
13461,
13466
]
],
[
[
12810,
12816
],
[
12852,
12858
],
[
13048,
13054
],
[
13111,
13117
],
[
13224,
13230
],
[
13480,
13486
],
[
13521,
13527
]
],
[
[
12869,
12875
],
[
13311,
13317
],
[
13472,
13478
],
[
13513,
13519
]
],
[
[
13206,
13209
],
[
13388,
13391
],
[
13467,
13470
],
[
13508,
13511
]
],
[
[
13492,
13500
],
[
13571,
13579
]
]
] |
#
# Copyright 2014 Google Inc. All rights reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
__version__ = "2.4.5-dev"
from googlemaps.client import Client
import googlemaps.exceptions
# Allow sphinx to pick up these symbols for the documentation.
__all__ = ["Client"]
| [
[
[
603,
614
]
],
[
[
660,
666
]
],
[
[
674,
695
]
],
[
[
760,
767
]
]
] |
from typing import Any
from tortoise import fields
from tortoise.models import Model
from crimsobot.models import DiscordUser
from crimsobot.models.user import User
class WordleResults(Model):
uuid = fields.UUIDField(pk=True)
name = fields.TextField(default='wordle result')
user = fields.ForeignKeyField('models.User', related_name='wordle_results', index=True)
guesses = fields.IntField() # guesses to solve word (0 for quit)
word = fields.TextField() # word guessed
created_at = fields.DatetimeField(null=True, auto_now_add=True)
@classmethod
async def create_result(cls, discord_user: DiscordUser, guesses: int, word: str) -> None:
user = await User.get_by_discord_user(discord_user)
result = WordleResults(user=user, guesses=guesses, word=word)
await result.save()
@classmethod
async def fetch_all_by_user(cls, discord_user: DiscordUser) -> Any:
user = await User.get_by_discord_user(discord_user)
stat = await WordleResults.filter(user=user)
return stat
class Meta:
table = 'wordle_results'
| [
[
[
19,
22
],
[
922,
925
]
],
[
[
45,
51
],
[
208,
214
],
[
245,
251
],
[
299,
305
],
[
394,
400
],
[
461,
467
],
[
514,
520
]
],
[
[
80,
85
],
[
189,
194
]
],
[
[
116,
127
],
[
630,
641
],
[
906,
917
]
],
[
[
162,
166
],
[
698,
702
],
[
948,
952
]
],
[
[
175,
188
],
[
755,
768
],
[
1008,
1021
]
]
] |
import csv
import json
import os
import random
import shutil
import string
import tempfile
import warnings
from io import BytesIO
from logging import getLogger
from pathlib import Path
from string import ascii_letters, digits
from unittest import mock
from urllib.parse import urljoin, urlparse, quote
from urllib.request import pathname2url
import lxml.etree
from testfixtures import LogCapture
from twisted.internet import defer
from twisted.trial import unittest
from w3lib.url import file_uri_to_path, path_to_file_uri
from zope.interface import implementer
from zope.interface.verify import verifyObject
import scrapy
from scrapy.crawler import CrawlerRunner
from scrapy.exporters import CsvItemExporter
from scrapy.extensions.feedexport import (BlockingFeedStorage, FileFeedStorage, FTPFeedStorage,
IFeedStorage, S3FeedStorage, StdoutFeedStorage)
from scrapy.settings import Settings
from scrapy.utils.python import to_unicode
from scrapy.utils.test import assert_aws_environ, get_crawler, get_s3_content_and_delete
from tests.mockserver import MockServer
class FileFeedStorageTest(unittest.TestCase):
def test_store_file_uri(self):
path = os.path.abspath(self.mktemp())
uri = path_to_file_uri(path)
return self._assert_stores(FileFeedStorage(uri), path)
def test_store_file_uri_makedirs(self):
path = os.path.abspath(self.mktemp())
path = os.path.join(path, 'more', 'paths', 'file.txt')
uri = path_to_file_uri(path)
return self._assert_stores(FileFeedStorage(uri), path)
def test_store_direct_path(self):
path = os.path.abspath(self.mktemp())
return self._assert_stores(FileFeedStorage(path), path)
def test_store_direct_path_relative(self):
path = self.mktemp()
return self._assert_stores(FileFeedStorage(path), path)
def test_interface(self):
path = self.mktemp()
st = FileFeedStorage(path)
verifyObject(IFeedStorage, st)
@defer.inlineCallbacks
def _assert_stores(self, storage, path):
spider = scrapy.Spider("default")
file = storage.open(spider)
file.write(b"content")
yield storage.store(file)
self.assertTrue(os.path.exists(path))
try:
with open(path, 'rb') as fp:
self.assertEqual(fp.read(), b"content")
finally:
os.unlink(path)
class FTPFeedStorageTest(unittest.TestCase):
def get_test_spider(self, settings=None):
class TestSpider(scrapy.Spider):
name = 'test_spider'
crawler = get_crawler(settings_dict=settings)
spider = TestSpider.from_crawler(crawler)
return spider
def test_store(self):
uri = os.environ.get('FEEDTEST_FTP_URI')
path = os.environ.get('FEEDTEST_FTP_PATH')
if not (uri and path):
raise unittest.SkipTest("No FTP server available for testing")
st = FTPFeedStorage(uri)
verifyObject(IFeedStorage, st)
return self._assert_stores(st, path)
def test_store_active_mode(self):
uri = os.environ.get('FEEDTEST_FTP_URI')
path = os.environ.get('FEEDTEST_FTP_PATH')
if not (uri and path):
raise unittest.SkipTest("No FTP server available for testing")
use_active_mode = {'FEED_STORAGE_FTP_ACTIVE': True}
crawler = get_crawler(settings_dict=use_active_mode)
st = FTPFeedStorage.from_crawler(crawler, uri)
verifyObject(IFeedStorage, st)
return self._assert_stores(st, path)
def test_uri_auth_quote(self):
# RFC3986: 3.2.1. User Information
pw_quoted = quote(string.punctuation, safe='')
st = FTPFeedStorage('ftp://foo:%[email protected]/some_path' % pw_quoted)
self.assertEqual(st.password, string.punctuation)
@defer.inlineCallbacks
def _assert_stores(self, storage, path):
spider = self.get_test_spider()
file = storage.open(spider)
file.write(b"content")
yield storage.store(file)
self.assertTrue(os.path.exists(path))
try:
with open(path, 'rb') as fp:
self.assertEqual(fp.read(), b"content")
# again, to check s3 objects are overwritten
yield storage.store(BytesIO(b"new content"))
with open(path, 'rb') as fp:
self.assertEqual(fp.read(), b"new content")
finally:
os.unlink(path)
class BlockingFeedStorageTest(unittest.TestCase):
def get_test_spider(self, settings=None):
class TestSpider(scrapy.Spider):
name = 'test_spider'
crawler = get_crawler(settings_dict=settings)
spider = TestSpider.from_crawler(crawler)
return spider
def test_default_temp_dir(self):
b = BlockingFeedStorage()
tmp = b.open(self.get_test_spider())
tmp_path = os.path.dirname(tmp.name)
self.assertEqual(tmp_path, tempfile.gettempdir())
def test_temp_file(self):
b = BlockingFeedStorage()
tests_path = os.path.dirname(os.path.abspath(__file__))
spider = self.get_test_spider({'FEED_TEMPDIR': tests_path})
tmp = b.open(spider)
tmp_path = os.path.dirname(tmp.name)
self.assertEqual(tmp_path, tests_path)
def test_invalid_folder(self):
b = BlockingFeedStorage()
tests_path = os.path.dirname(os.path.abspath(__file__))
invalid_path = os.path.join(tests_path, 'invalid_path')
spider = self.get_test_spider({'FEED_TEMPDIR': invalid_path})
self.assertRaises(OSError, b.open, spider=spider)
class S3FeedStorageTest(unittest.TestCase):
@mock.patch('scrapy.utils.project.get_project_settings',
new=mock.MagicMock(return_value={'AWS_ACCESS_KEY_ID': 'conf_key',
'AWS_SECRET_ACCESS_KEY': 'conf_secret'}),
create=True)
def test_parse_credentials(self):
try:
import boto # noqa: F401
except ImportError:
raise unittest.SkipTest("S3FeedStorage requires boto")
aws_credentials = {'AWS_ACCESS_KEY_ID': 'settings_key',
'AWS_SECRET_ACCESS_KEY': 'settings_secret'}
crawler = get_crawler(settings_dict=aws_credentials)
# Instantiate with crawler
storage = S3FeedStorage.from_crawler(crawler,
's3://mybucket/export.csv')
self.assertEqual(storage.access_key, 'settings_key')
self.assertEqual(storage.secret_key, 'settings_secret')
# Instantiate directly
storage = S3FeedStorage('s3://mybucket/export.csv',
aws_credentials['AWS_ACCESS_KEY_ID'],
aws_credentials['AWS_SECRET_ACCESS_KEY'])
self.assertEqual(storage.access_key, 'settings_key')
self.assertEqual(storage.secret_key, 'settings_secret')
# URI priority > settings priority
storage = S3FeedStorage('s3://uri_key:uri_secret@mybucket/export.csv',
aws_credentials['AWS_ACCESS_KEY_ID'],
aws_credentials['AWS_SECRET_ACCESS_KEY'])
self.assertEqual(storage.access_key, 'uri_key')
self.assertEqual(storage.secret_key, 'uri_secret')
# Backward compatibility for initialising without settings
with warnings.catch_warnings(record=True) as w:
storage = S3FeedStorage('s3://mybucket/export.csv')
self.assertEqual(storage.access_key, 'conf_key')
self.assertEqual(storage.secret_key, 'conf_secret')
self.assertTrue('without AWS keys' in str(w[-1].message))
@defer.inlineCallbacks
def test_store(self):
assert_aws_environ()
uri = os.environ.get('S3_TEST_FILE_URI')
if not uri:
raise unittest.SkipTest("No S3 URI available for testing")
access_key = os.environ.get('AWS_ACCESS_KEY_ID')
secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
storage = S3FeedStorage(uri, access_key, secret_key)
verifyObject(IFeedStorage, storage)
file = storage.open(scrapy.Spider("default"))
expected_content = b"content: \xe2\x98\x83"
file.write(expected_content)
yield storage.store(file)
u = urlparse(uri)
content = get_s3_content_and_delete(u.hostname, u.path[1:])
self.assertEqual(content, expected_content)
def test_init_without_acl(self):
storage = S3FeedStorage(
's3://mybucket/export.csv',
'access_key',
'secret_key'
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, None)
def test_init_with_acl(self):
storage = S3FeedStorage(
's3://mybucket/export.csv',
'access_key',
'secret_key',
'custom-acl'
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, 'custom-acl')
def test_from_crawler_without_acl(self):
settings = {
'AWS_ACCESS_KEY_ID': 'access_key',
'AWS_SECRET_ACCESS_KEY': 'secret_key',
}
crawler = get_crawler(settings_dict=settings)
storage = S3FeedStorage.from_crawler(
crawler,
's3://mybucket/export.csv'
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, None)
def test_from_crawler_with_acl(self):
settings = {
'AWS_ACCESS_KEY_ID': 'access_key',
'AWS_SECRET_ACCESS_KEY': 'secret_key',
'FEED_STORAGE_S3_ACL': 'custom-acl',
}
crawler = get_crawler(settings_dict=settings)
storage = S3FeedStorage.from_crawler(
crawler,
's3://mybucket/export.csv'
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, 'custom-acl')
@defer.inlineCallbacks
def test_store_botocore_without_acl(self):
try:
import botocore # noqa: F401
except ImportError:
raise unittest.SkipTest('botocore is required')
storage = S3FeedStorage(
's3://mybucket/export.csv',
'access_key',
'secret_key',
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, None)
storage.s3_client = mock.MagicMock()
yield storage.store(BytesIO(b'test file'))
self.assertNotIn('ACL', storage.s3_client.put_object.call_args[1])
@defer.inlineCallbacks
def test_store_botocore_with_acl(self):
try:
import botocore # noqa: F401
except ImportError:
raise unittest.SkipTest('botocore is required')
storage = S3FeedStorage(
's3://mybucket/export.csv',
'access_key',
'secret_key',
'custom-acl'
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, 'custom-acl')
storage.s3_client = mock.MagicMock()
yield storage.store(BytesIO(b'test file'))
self.assertEqual(
storage.s3_client.put_object.call_args[1].get('ACL'),
'custom-acl'
)
@defer.inlineCallbacks
def test_store_not_botocore_without_acl(self):
storage = S3FeedStorage(
's3://mybucket/export.csv',
'access_key',
'secret_key',
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, None)
storage.is_botocore = False
storage.connect_s3 = mock.MagicMock()
self.assertFalse(storage.is_botocore)
yield storage.store(BytesIO(b'test file'))
conn = storage.connect_s3(*storage.connect_s3.call_args)
bucket = conn.get_bucket(*conn.get_bucket.call_args)
key = bucket.new_key(*bucket.new_key.call_args)
self.assertNotIn(
dict(policy='custom-acl'),
key.set_contents_from_file.call_args
)
@defer.inlineCallbacks
def test_store_not_botocore_with_acl(self):
storage = S3FeedStorage(
's3://mybucket/export.csv',
'access_key',
'secret_key',
'custom-acl'
)
self.assertEqual(storage.access_key, 'access_key')
self.assertEqual(storage.secret_key, 'secret_key')
self.assertEqual(storage.acl, 'custom-acl')
storage.is_botocore = False
storage.connect_s3 = mock.MagicMock()
self.assertFalse(storage.is_botocore)
yield storage.store(BytesIO(b'test file'))
conn = storage.connect_s3(*storage.connect_s3.call_args)
bucket = conn.get_bucket(*conn.get_bucket.call_args)
key = bucket.new_key(*bucket.new_key.call_args)
self.assertIn(
dict(policy='custom-acl'),
key.set_contents_from_file.call_args
)
class StdoutFeedStorageTest(unittest.TestCase):
@defer.inlineCallbacks
def test_store(self):
out = BytesIO()
storage = StdoutFeedStorage('stdout:', _stdout=out)
file = storage.open(scrapy.Spider("default"))
file.write(b"content")
yield storage.store(file)
self.assertEqual(out.getvalue(), b"content")
class FromCrawlerMixin:
init_with_crawler = False
@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
cls.init_with_crawler = True
return cls(*args, **kwargs)
class FromCrawlerCsvItemExporter(CsvItemExporter, FromCrawlerMixin):
pass
class FromCrawlerFileFeedStorage(FileFeedStorage, FromCrawlerMixin):
pass
@implementer(IFeedStorage)
class LogOnStoreFileStorage:
"""
This storage logs inside `store` method.
It can be used to make sure `store` method is invoked.
"""
def __init__(self, uri):
self.path = file_uri_to_path(uri)
self.logger = getLogger()
def open(self, spider):
return tempfile.NamedTemporaryFile(prefix='feed-')
def store(self, file):
self.logger.info('Storage.store is called')
file.close()
class FeedExportTest(unittest.TestCase):
class MyItem(scrapy.Item):
foo = scrapy.Field()
egg = scrapy.Field()
baz = scrapy.Field()
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_dir, ignore_errors=True)
def _random_temp_filename(self):
chars = [random.choice(ascii_letters + digits) for _ in range(15)]
filename = ''.join(chars)
return os.path.join(self.temp_dir, filename)
@defer.inlineCallbacks
def run_and_export(self, spider_cls, settings):
""" Run spider with specified settings; return exported data. """
FEEDS = settings.get('FEEDS') or {}
settings['FEEDS'] = {
urljoin('file:', pathname2url(str(file_path))): feed
for file_path, feed in FEEDS.items()
}
content = {}
try:
with MockServer() as s:
runner = CrawlerRunner(Settings(settings))
spider_cls.start_urls = [s.url('/')]
yield runner.crawl(spider_cls)
for file_path, feed in FEEDS.items():
if not os.path.exists(str(file_path)):
continue
with open(str(file_path), 'rb') as f:
content[feed['format']] = f.read()
finally:
for file_path in FEEDS.keys():
if not os.path.exists(str(file_path)):
continue
os.remove(str(file_path))
return content
@defer.inlineCallbacks
def exported_data(self, items, settings):
"""
Return exported data which a spider yielding ``items`` would return.
"""
class TestSpider(scrapy.Spider):
name = 'testspider'
def parse(self, response):
for item in items:
yield item
data = yield self.run_and_export(TestSpider, settings)
return data
@defer.inlineCallbacks
def exported_no_data(self, settings):
"""
Return exported data which a spider yielding no ``items`` would return.
"""
class TestSpider(scrapy.Spider):
name = 'testspider'
def parse(self, response):
pass
data = yield self.run_and_export(TestSpider, settings)
return data
@defer.inlineCallbacks
def assertExportedCsv(self, items, header, rows, settings=None, ordered=True):
settings = settings or {}
settings.update({
'FEEDS': {
self._random_temp_filename(): {'format': 'csv'},
},
})
data = yield self.exported_data(items, settings)
reader = csv.DictReader(to_unicode(data['csv']).splitlines())
got_rows = list(reader)
if ordered:
self.assertEqual(reader.fieldnames, header)
else:
self.assertEqual(set(reader.fieldnames), set(header))
self.assertEqual(rows, got_rows)
@defer.inlineCallbacks
def assertExportedJsonLines(self, items, rows, settings=None):
settings = settings or {}
settings.update({
'FEEDS': {
self._random_temp_filename(): {'format': 'jl'},
},
})
data = yield self.exported_data(items, settings)
parsed = [json.loads(to_unicode(line)) for line in data['jl'].splitlines()]
rows = [{k: v for k, v in row.items() if v} for row in rows]
self.assertEqual(rows, parsed)
@defer.inlineCallbacks
def assertExportedXml(self, items, rows, settings=None):
settings = settings or {}
settings.update({
'FEEDS': {
self._random_temp_filename(): {'format': 'xml'},
},
})
data = yield self.exported_data(items, settings)
rows = [{k: v for k, v in row.items() if v} for row in rows]
root = lxml.etree.fromstring(data['xml'])
got_rows = [{e.tag: e.text for e in it} for it in root.findall('item')]
self.assertEqual(rows, got_rows)
@defer.inlineCallbacks
def assertExportedMultiple(self, items, rows, settings=None):
settings = settings or {}
settings.update({
'FEEDS': {
self._random_temp_filename(): {'format': 'xml'},
self._random_temp_filename(): {'format': 'json'},
},
})
data = yield self.exported_data(items, settings)
rows = [{k: v for k, v in row.items() if v} for row in rows]
# XML
root = lxml.etree.fromstring(data['xml'])
xml_rows = [{e.tag: e.text for e in it} for it in root.findall('item')]
self.assertEqual(rows, xml_rows)
# JSON
json_rows = json.loads(to_unicode(data['json']))
self.assertEqual(rows, json_rows)
def _load_until_eof(self, data, load_func):
result = []
with tempfile.TemporaryFile() as temp:
temp.write(data)
temp.seek(0)
while True:
try:
result.append(load_func(temp))
except EOFError:
break
return result
@defer.inlineCallbacks
def assertExportedPickle(self, items, rows, settings=None):
settings = settings or {}
settings.update({
'FEEDS': {
self._random_temp_filename(): {'format': 'pickle'},
},
})
data = yield self.exported_data(items, settings)
expected = [{k: v for k, v in row.items() if v} for row in rows]
import pickle
result = self._load_until_eof(data['pickle'], load_func=pickle.load)
self.assertEqual(expected, result)
@defer.inlineCallbacks
def assertExportedMarshal(self, items, rows, settings=None):
settings = settings or {}
settings.update({
'FEEDS': {
self._random_temp_filename(): {'format': 'marshal'},
},
})
data = yield self.exported_data(items, settings)
expected = [{k: v for k, v in row.items() if v} for row in rows]
import marshal
result = self._load_until_eof(data['marshal'], load_func=marshal.load)
self.assertEqual(expected, result)
@defer.inlineCallbacks
def assertExported(self, items, header, rows, settings=None, ordered=True):
yield self.assertExportedCsv(items, header, rows, settings, ordered)
yield self.assertExportedJsonLines(items, rows, settings)
yield self.assertExportedXml(items, rows, settings)
yield self.assertExportedPickle(items, rows, settings)
yield self.assertExportedMarshal(items, rows, settings)
yield self.assertExportedMultiple(items, rows, settings)
@defer.inlineCallbacks
def test_export_items(self):
# feed exporters use field names from Item
items = [
self.MyItem({'foo': 'bar1', 'egg': 'spam1'}),
self.MyItem({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}),
]
rows = [
{'egg': 'spam1', 'foo': 'bar1', 'baz': ''},
{'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'}
]
header = self.MyItem.fields.keys()
yield self.assertExported(items, header, rows, ordered=False)
@defer.inlineCallbacks
def test_export_no_items_not_store_empty(self):
for fmt in ('json', 'jsonlines', 'xml', 'csv'):
settings = {
'FEEDS': {
self._random_temp_filename(): {'format': fmt},
},
}
data = yield self.exported_no_data(settings)
self.assertEqual(data[fmt], b'')
@defer.inlineCallbacks
def test_export_no_items_store_empty(self):
formats = (
('json', b'[]'),
('jsonlines', b''),
('xml', b'<?xml version="1.0" encoding="utf-8"?>\n<items></items>'),
('csv', b''),
)
for fmt, expctd in formats:
settings = {
'FEEDS': {
self._random_temp_filename(): {'format': fmt},
},
'FEED_STORE_EMPTY': True,
'FEED_EXPORT_INDENT': None,
}
data = yield self.exported_no_data(settings)
self.assertEqual(data[fmt], expctd)
@defer.inlineCallbacks
def test_export_no_items_multiple_feeds(self):
""" Make sure that `storage.store` is called for every feed. """
settings = {
'FEEDS': {
self._random_temp_filename(): {'format': 'json'},
self._random_temp_filename(): {'format': 'xml'},
self._random_temp_filename(): {'format': 'csv'},
},
'FEED_STORAGES': {'file': 'tests.test_feedexport.LogOnStoreFileStorage'},
'FEED_STORE_EMPTY': False
}
with LogCapture() as log:
yield self.exported_no_data(settings)
print(log)
self.assertEqual(str(log).count('Storage.store is called'), 3)
@defer.inlineCallbacks
def test_export_multiple_item_classes(self):
class MyItem2(scrapy.Item):
foo = scrapy.Field()
hello = scrapy.Field()
items = [
self.MyItem({'foo': 'bar1', 'egg': 'spam1'}),
MyItem2({'hello': 'world2', 'foo': 'bar2'}),
self.MyItem({'foo': 'bar3', 'egg': 'spam3', 'baz': 'quux3'}),
{'hello': 'world4', 'egg': 'spam4'},
]
# by default, Scrapy uses fields of the first Item for CSV and
# all fields for JSON Lines
header = self.MyItem.fields.keys()
rows_csv = [
{'egg': 'spam1', 'foo': 'bar1', 'baz': ''},
{'egg': '', 'foo': 'bar2', 'baz': ''},
{'egg': 'spam3', 'foo': 'bar3', 'baz': 'quux3'},
{'egg': 'spam4', 'foo': '', 'baz': ''},
]
rows_jl = [dict(row) for row in items]
yield self.assertExportedCsv(items, header, rows_csv, ordered=False)
yield self.assertExportedJsonLines(items, rows_jl)
# edge case: FEED_EXPORT_FIELDS==[] means the same as default None
settings = {'FEED_EXPORT_FIELDS': []}
yield self.assertExportedCsv(items, header, rows_csv, ordered=False)
yield self.assertExportedJsonLines(items, rows_jl, settings)
# it is possible to override fields using FEED_EXPORT_FIELDS
header = ["foo", "baz", "hello"]
settings = {'FEED_EXPORT_FIELDS': header}
rows = [
{'foo': 'bar1', 'baz': '', 'hello': ''},
{'foo': 'bar2', 'baz': '', 'hello': 'world2'},
{'foo': 'bar3', 'baz': 'quux3', 'hello': ''},
{'foo': '', 'baz': '', 'hello': 'world4'},
]
yield self.assertExported(items, header, rows,
settings=settings, ordered=True)
@defer.inlineCallbacks
def test_export_dicts(self):
# When dicts are used, only keys from the first row are used as
# a header for CSV, and all fields are used for JSON Lines.
items = [
{'foo': 'bar', 'egg': 'spam'},
{'foo': 'bar', 'egg': 'spam', 'baz': 'quux'},
]
rows_csv = [
{'egg': 'spam', 'foo': 'bar'},
{'egg': 'spam', 'foo': 'bar'}
]
rows_jl = items
yield self.assertExportedCsv(items, ['egg', 'foo'], rows_csv, ordered=False)
yield self.assertExportedJsonLines(items, rows_jl)
@defer.inlineCallbacks
def test_export_feed_export_fields(self):
# FEED_EXPORT_FIELDS option allows to order export fields
# and to select a subset of fields to export, both for Items and dicts.
for item_cls in [self.MyItem, dict]:
items = [
item_cls({'foo': 'bar1', 'egg': 'spam1'}),
item_cls({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}),
]
# export all columns
settings = {'FEED_EXPORT_FIELDS': 'foo,baz,egg'}
rows = [
{'egg': 'spam1', 'foo': 'bar1', 'baz': ''},
{'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'}
]
yield self.assertExported(items, ['foo', 'baz', 'egg'], rows,
settings=settings, ordered=True)
# export a subset of columns
settings = {'FEED_EXPORT_FIELDS': 'egg,baz'}
rows = [
{'egg': 'spam1', 'baz': ''},
{'egg': 'spam2', 'baz': 'quux2'}
]
yield self.assertExported(items, ['egg', 'baz'], rows,
settings=settings, ordered=True)
@defer.inlineCallbacks
def test_export_encoding(self):
items = [dict({'foo': u'Test\xd6'})]
formats = {
'json': '[{"foo": "Test\\u00d6"}]'.encode('utf-8'),
'jsonlines': '{"foo": "Test\\u00d6"}\n'.encode('utf-8'),
'xml': (
'<?xml version="1.0" encoding="utf-8"?>\n'
'<items><item><foo>Test\xd6</foo></item></items>'
).encode('utf-8'),
'csv': 'foo\r\nTest\xd6\r\n'.encode('utf-8'),
}
for fmt, expected in formats.items():
settings = {
'FEEDS': {
self._random_temp_filename(): {'format': fmt},
},
'FEED_EXPORT_INDENT': None,
}
data = yield self.exported_data(items, settings)
self.assertEqual(expected, data[fmt])
formats = {
'json': '[{"foo": "Test\xd6"}]'.encode('latin-1'),
'jsonlines': '{"foo": "Test\xd6"}\n'.encode('latin-1'),
'xml': (
'<?xml version="1.0" encoding="latin-1"?>\n'
'<items><item><foo>Test\xd6</foo></item></items>'
).encode('latin-1'),
'csv': 'foo\r\nTest\xd6\r\n'.encode('latin-1'),
}
for fmt, expected in formats.items():
settings = {
'FEEDS': {
self._random_temp_filename(): {'format': fmt},
},
'FEED_EXPORT_INDENT': None,
'FEED_EXPORT_ENCODING': 'latin-1',
}
data = yield self.exported_data(items, settings)
self.assertEqual(expected, data[fmt])
@defer.inlineCallbacks
def test_export_multiple_configs(self):
items = [dict({'foo': u'FOO', 'bar': u'BAR'})]
formats = {
'json': '[\n{"bar": "BAR"}\n]'.encode('utf-8'),
'xml': (
'<?xml version="1.0" encoding="latin-1"?>\n'
'<items>\n <item>\n <foo>FOO</foo>\n </item>\n</items>'
).encode('latin-1'),
'csv': 'bar,foo\r\nBAR,FOO\r\n'.encode('utf-8'),
}
settings = {
'FEEDS': {
self._random_temp_filename(): {
'format': 'json',
'indent': 0,
'fields': ['bar'],
'encoding': 'utf-8',
},
self._random_temp_filename(): {
'format': 'xml',
'indent': 2,
'fields': ['foo'],
'encoding': 'latin-1',
},
self._random_temp_filename(): {
'format': 'csv',
'indent': None,
'fields': ['bar', 'foo'],
'encoding': 'utf-8',
},
},
}
data = yield self.exported_data(items, settings)
for fmt, expected in formats.items():
self.assertEqual(expected, data[fmt])
@defer.inlineCallbacks
def test_export_indentation(self):
items = [
{'foo': ['bar']},
{'key': 'value'},
]
test_cases = [
# JSON
{
'format': 'json',
'indent': None,
'expected': b'[{"foo": ["bar"]},{"key": "value"}]',
},
{
'format': 'json',
'indent': -1,
'expected': b"""[
{"foo": ["bar"]},
{"key": "value"}
]""",
},
{
'format': 'json',
'indent': 0,
'expected': b"""[
{"foo": ["bar"]},
{"key": "value"}
]""",
},
{
'format': 'json',
'indent': 2,
'expected': b"""[
{
"foo": [
"bar"
]
},
{
"key": "value"
}
]""",
},
{
'format': 'json',
'indent': 4,
'expected': b"""[
{
"foo": [
"bar"
]
},
{
"key": "value"
}
]""",
},
{
'format': 'json',
'indent': 5,
'expected': b"""[
{
"foo": [
"bar"
]
},
{
"key": "value"
}
]""",
},
# XML
{
'format': 'xml',
'indent': None,
'expected': b"""<?xml version="1.0" encoding="utf-8"?>
<items><item><foo><value>bar</value></foo></item><item><key>value</key></item></items>""",
},
{
'format': 'xml',
'indent': -1,
'expected': b"""<?xml version="1.0" encoding="utf-8"?>
<items>
<item><foo><value>bar</value></foo></item>
<item><key>value</key></item>
</items>""",
},
{
'format': 'xml',
'indent': 0,
'expected': b"""<?xml version="1.0" encoding="utf-8"?>
<items>
<item><foo><value>bar</value></foo></item>
<item><key>value</key></item>
</items>""",
},
{
'format': 'xml',
'indent': 2,
'expected': b"""<?xml version="1.0" encoding="utf-8"?>
<items>
<item>
<foo>
<value>bar</value>
</foo>
</item>
<item>
<key>value</key>
</item>
</items>""",
},
{
'format': 'xml',
'indent': 4,
'expected': b"""<?xml version="1.0" encoding="utf-8"?>
<items>
<item>
<foo>
<value>bar</value>
</foo>
</item>
<item>
<key>value</key>
</item>
</items>""",
},
{
'format': 'xml',
'indent': 5,
'expected': b"""<?xml version="1.0" encoding="utf-8"?>
<items>
<item>
<foo>
<value>bar</value>
</foo>
</item>
<item>
<key>value</key>
</item>
</items>""",
},
]
for row in test_cases:
settings = {
'FEEDS': {
self._random_temp_filename(): {
'format': row['format'],
'indent': row['indent'],
},
},
}
data = yield self.exported_data(items, settings)
self.assertEqual(row['expected'], data[row['format']])
@defer.inlineCallbacks
def test_init_exporters_storages_with_crawler(self):
settings = {
'FEED_EXPORTERS': {'csv': 'tests.test_feedexport.FromCrawlerCsvItemExporter'},
'FEED_STORAGES': {'file': 'tests.test_feedexport.FromCrawlerFileFeedStorage'},
'FEEDS': {
self._random_temp_filename(): {'format': 'csv'},
},
}
yield self.exported_data(items=[], settings=settings)
self.assertTrue(FromCrawlerCsvItemExporter.init_with_crawler)
self.assertTrue(FromCrawlerFileFeedStorage.init_with_crawler)
@defer.inlineCallbacks
def test_pathlib_uri(self):
feed_path = Path(self._random_temp_filename())
settings = {
'FEED_STORE_EMPTY': True,
'FEEDS': {
feed_path: {'format': 'csv'}
},
}
data = yield self.exported_no_data(settings)
self.assertEqual(data['csv'], b'')
| [
[
[
7,
10
],
[
17429,
17432
]
],
[
[
18,
22
],
[
18055,
18059
],
[
19469,
19473
]
],
[
[
30,
32
],
[
1206,
1208
],
[
1397,
1399
],
[
1443,
1445
],
[
1645,
1647
],
[
2255,
2257
],
[
2416,
2418
],
[
2767,
2769
],
[
2817,
2819
],
[
3129,
3131
],
[
3179,
3181
],
[
4089,
4091
],
[
4465,
4467
],
[
4917,
4919
],
[
5088,
5090
],
[
5104,
5106
],
[
5247,
5249
],
[
5412,
5414
],
[
5428,
5430
],
[
5478,
5480
],
[
7845,
7847
],
[
7992,
7994
],
[
8049,
8051
],
[
15161,
15163
],
[
15856,
15858
],
[
16111,
16113
],
[
16189,
16191
]
],
[
[
40,
46
],
[
15054,
15060
]
],
[
[
54,
60
],
[
14950,
14956
]
],
[
[
68,
74
],
[
3686,
3692
],
[
3831,
3837
]
],
[
[
82,
90
],
[
4978,
4986
],
[
14545,
14553
],
[
14898,
14906
],
[
19630,
19638
]
],
[
[
98,
106
],
[
7446,
7454
]
],
[
[
122,
129
],
[
4310,
4317
],
[
10874,
10881
],
[
11592,
11599
],
[
12276,
12283
],
[
13170,
13177
],
[
13615,
13622
]
],
[
[
150,
159
],
[
14489,
14498
]
],
[
[
180,
184
],
[
34553,
34557
]
],
[
[
204,
217
],
[
15068,
15081
]
],
[
[
219,
225
],
[
15084,
15090
]
],
[
[
247,
251
],
[
5700,
5704
],
[
5776,
5780
],
[
10829,
10833
],
[
11547,
11551
],
[
12184,
12188
],
[
13078,
13082
]
],
[
[
277,
284
],
[
15440,
15447
]
],
[
[
286,
294
],
[
8383,
8391
]
],
[
[
296,
301
],
[
3680,
3685
]
],
[
[
329,
341
],
[
15457,
15469
]
],
[
[
350,
360
],
[
18633,
18637
],
[
19278,
19282
]
],
[
[
386,
396
],
[
23612,
23622
]
],
[
[
426,
431
],
[
2021,
2026
],
[
3857,
3862
],
[
7754,
7759
],
[
10290,
10295
],
[
10978,
10983
],
[
11748,
11753
],
[
12612,
12617
],
[
13553,
13558
],
[
15205,
15210
],
[
16245,
16250
],
[
16683,
16688
],
[
17075,
17080
],
[
17718,
17723
],
[
18235,
18240
],
[
18795,
18800
],
[
19901,
19906
],
[
20442,
20447
],
[
20988,
20993
],
[
21491,
21496
],
[
22019,
22024
],
[
22409,
22414
],
[
23063,
23068
],
[
23780,
23785
],
[
25617,
25622
],
[
26231,
26236
],
[
27432,
27437
],
[
29102,
29107
],
[
30461,
30466
],
[
33876,
33881
],
[
34479,
34484
]
],
[
[
458,
466
],
[
1135,
1143
],
[
2459,
2467
],
[
4513,
4521
],
[
5674,
5682
],
[
13527,
13535
],
[
14713,
14721
],
[
2902,
2910
],
[
3264,
3272
],
[
6093,
6101
],
[
7918,
7926
],
[
10460,
10468
],
[
11145,
11153
]
],
[
[
489,
505
],
[
14445,
14461
]
],
[
[
507,
523
],
[
1251,
1267
],
[
1505,
1521
]
],
[
[
551,
562
],
[
14220,
14231
]
],
[
[
597,
609
],
[
1984,
1996
],
[
3000,
3012
],
[
3505,
3517
],
[
8158,
8170
]
],
[
[
618,
624
],
[
14751,
14757
],
[
14779,
14785
],
[
14808,
14814
],
[
14837,
14843
],
[
2105,
2111
],
[
2551,
2557
],
[
4605,
4611
],
[
8222,
8228
],
[
13713,
13719
],
[
16439,
16445
],
[
16876,
16882
],
[
23874,
23880
],
[
23906,
23912
],
[
23941,
23947
]
],
[
[
652,
665
],
[
15648,
15661
]
],
[
[
695,
710
],
[
14092,
14107
]
],
[
[
753,
772
],
[
4830,
4849
],
[
5044,
5063
],
[
5368,
5387
]
],
[
[
774,
789
],
[
14172,
14187
],
[
1309,
1324
],
[
1563,
1578
],
[
1711,
1726
],
[
1852,
1867
],
[
1954,
1969
]
],
[
[
791,
805
],
[
2972,
2986
],
[
3455,
3469
],
[
3728,
3742
]
],
[
[
849,
861
],
[
14232,
14244
],
[
1997,
2009
],
[
3013,
3025
],
[
3518,
3530
],
[
8171,
8183
]
],
[
[
863,
876
],
[
6391,
6404
],
[
6674,
6687
],
[
7046,
7059
],
[
7511,
7524
],
[
8107,
8120
],
[
8573,
8586
],
[
8904,
8917
],
[
9463,
9476
],
[
10016,
10029
],
[
10521,
10534
],
[
11206,
11219
],
[
11839,
11852
],
[
12700,
12713
]
],
[
[
878,
895
],
[
13643,
13660
]
],
[
[
925,
933
],
[
15662,
15670
]
],
[
[
966,
976
],
[
17444,
17454
],
[
18066,
18076
],
[
19480,
19490
]
],
[
[
1007,
1025
],
[
7810,
7828
]
],
[
[
1027,
1038
],
[
2618,
2629
],
[
3399,
3410
],
[
4672,
4683
],
[
6295,
6306
],
[
9409,
9420
],
[
9962,
9973
]
],
[
[
1040,
1065
],
[
8415,
8440
]
],
[
[
1096,
1106
],
[
15604,
15614
]
],
[
[
1115,
1134
]
],
[
[
2440,
2458
]
],
[
[
4489,
4512
]
],
[
[
5656,
5673
]
],
[
[
13505,
13526
]
],
[
[
13865,
13881
],
[
14109,
14125
],
[
14189,
14205
]
],
[
[
14065,
14091
],
[
34357,
34383
]
],
[
[
14145,
14171
],
[
34427,
34453
]
],
[
[
14252,
14273
]
],
[
[
14698,
14712
]
]
] |
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Youssef Restom and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestExtraesiaSettings(unittest.TestCase):
pass
| [
[
[
119,
135
]
],
[
[
160,
168
],
[
198,
206
]
],
[
[
176,
197
]
]
] |
#!/usr/bin/env python
import setuptools
def get_version(filename):
with open(filename) as in_fh:
for line in in_fh:
if line.startswith('__version__'):
return line.split('=')[1].strip()[1:-1]
raise ValueError("Cannot extract version from %s" % filename)
setuptools.setup(
name="better-apidoc",
version=get_version("better_apidoc.py"),
url="https://github.com/goerz/better-apidoc",
author="Michael Goerz",
author_email="[email protected]",
description="A version of sphinx-apidoc with support for templating",
install_requires=[
'sphinx', 'jinja2'
],
extras_require={'dev': ['pytest', ]},
py_modules=['better_apidoc'],
entry_points='''
[console_scripts]
better-apidoc=better_apidoc:main
''',
classifiers=[
'Environment :: Console',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
| [
[
[
29,
39
],
[
301,
311
]
],
[
[
46,
57
],
[
357,
368
]
]
] |
"""The following code should emit a raising-non-exception.
Previously, it didn't, due to a bug in the check for bad-exception-context,
which prevented further checking on the Raise node.
"""
# pylint: disable=import-error, too-few-public-methods
from missing_module import missing
class Exc(object):
"""Not an actual exception."""
raise Exc from missing # [raising-non-exception]
| [
[
[
275,
282
],
[
354,
361
]
],
[
[
290,
293
],
[
345,
348
]
]
] |
from django.conf.urls import url, include
from . import views
test_patterns = [
url(r'^$', views.index, name='django_daraja_index'),
url(r'^oauth/success', views.oauth_success, name='test_oauth_success'),
url(r'^stk-push/success', views.stk_push_success, name='test_stk_push_success'),
url(r'^business-payment/success', views.business_payment_success, name='test_business_payment_success'),
url(r'^salary-payment/success', views.salary_payment_success, name='test_salary_payment_success'),
url(r'^promotion-payment/success', views.promotion_payment_success, name='test_promotion_payment_success'),
]
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^tests/', include(test_patterns)),
]
| [
[
[
29,
32
],
[
82,
85
],
[
136,
139
],
[
209,
212
],
[
291,
294
],
[
397,
400
],
[
497,
500
],
[
625,
628
],
[
665,
668
]
],
[
[
34,
41
],
[
681,
688
]
],
[
[
56,
61
],
[
93,
98
],
[
159,
164
],
[
235,
240
],
[
325,
330
],
[
429,
434
],
[
532,
537
],
[
636,
641
]
],
[
[
63,
76
],
[
689,
702
]
],
[
[
608,
619
]
]
] |
"""
This module tests the build API. These are high-level integration tests.
"""
import base64
from collections import OrderedDict
from glob import glob
import logging
import os
import re
import subprocess
import sys
import json
import uuid
# for version
import conda
from conda_build.conda_interface import PY3, url_path, LinkError, CondaError, cc_conda_build
import conda_build
from binstar_client.commands import remove, show
from binstar_client.errors import NotFound
from pkg_resources import parse_version
import pytest
import yaml
import tarfile
from conda_build import api, exceptions, __version__
from conda_build.build import VersionOrder
from conda_build.render import finalize_metadata
from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2,
package_has_file, check_output_env, get_conda_operation_locks)
from conda_build.os_utils.external import find_executable
from conda_build.exceptions import DependencyNeedsBuildingError
from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling, FileNotFoundError
# define a few commonly used recipes - use os.path.join(metadata_dir, recipe) elsewhere
empty_sections = os.path.join(metadata_dir, "empty_sections")
def represent_ordereddict(dumper, data):
value = []
for item_key, item_value in data.items():
node_key = dumper.represent_data(item_key)
node_value = dumper.represent_data(item_value)
value.append((node_key, node_value))
return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value)
yaml.add_representer(OrderedDict, represent_ordereddict)
class AnacondaClientArgs(object):
def __init__(self, specs, token=None, site=None, log_level=logging.INFO, force=False):
from binstar_client.utils import parse_specs
self.specs = [parse_specs(specs)]
self.spec = self.specs[0]
self.token = token
self.site = site
self.log_level = log_level
self.force = force
def describe_root(cwd=None):
if not cwd:
cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
tag = check_output_env(["git", "describe", "--abbrev=0"], cwd=cwd).rstrip()
if PY3:
tag = tag.decode("utf-8")
return tag
@pytest.fixture(params=[dirname for dirname in os.listdir(metadata_dir)
if is_valid_dir(metadata_dir, dirname)])
def recipe(request):
return os.path.join(metadata_dir, request.param)
# This tests any of the folders in the test-recipes/metadata folder that don't start with _
def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch):
# These variables are defined solely for testing purposes,
# so they can be checked within build scripts
monkeypatch.setenv("CONDA_TEST_VAR", "conda_test")
monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2")
api.build(recipe, config=testing_config)
def test_token_upload(testing_workdir, testing_metadata):
folder_uuid = uuid.uuid4().hex
# generated with conda_test_account user, command:
# anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda'
args = AnacondaClientArgs(specs="conda_build_test/test_token_upload_" + folder_uuid,
token="co-143399b8-276e-48db-b43f-4a3de839a024",
force=True)
with pytest.raises(NotFound):
show.main(args)
testing_metadata.meta['package']['name'] = '_'.join([testing_metadata.name(), folder_uuid])
testing_metadata.config.token = args.token
# the folder with the test recipe to upload
api.build(testing_metadata)
# make sure that the package is available (should raise if it doesn't)
show.main(args)
# clean up - we don't actually want this package to exist
remove.main(args)
# verify cleanup:
with pytest.raises(NotFound):
show.main(args)
@pytest.mark.parametrize("service_name", ["binstar", "anaconda"])
def test_no_anaconda_upload_condarc(service_name, testing_workdir, testing_config, capfd):
api.build(empty_sections, config=testing_config)
output, error = capfd.readouterr()
assert "Automatic uploading is disabled" in output, error
def test_git_describe_info_on_branch(testing_config):
recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch")
m = api.render(recipe_path, config=testing_config)[0][0]
output = api.get_output_file_path(m)[0]
# missing hash because we set custom build string in meta.yaml
test_path = os.path.join(testing_config.croot, testing_config.host_subdir,
"git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2")
assert test_path == output
def test_no_include_recipe_config_arg(testing_metadata):
"""Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this.
Former is tested with specific recipe."""
outputs = api.build(testing_metadata)
assert package_has_file(outputs[0], "info/recipe/meta.yaml")
# make sure that it is not there when the command line flag is passed
testing_metadata.config.include_recipe = False
testing_metadata.meta['build']['number'] = 2
# We cannot test packages without recipes as we cannot render them
output_file = api.build(testing_metadata, notest=True)[0]
assert not package_has_file(output_file, "info/recipe/meta.yaml")
def test_no_include_recipe_meta_yaml(testing_metadata, testing_config):
# first, make sure that the recipe is there by default. This test copied from above, but copied
# as a sanity check here.
outputs = api.build(testing_metadata)
assert package_has_file(outputs[0], "info/recipe/meta.yaml")
output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'),
config=testing_config)[0]
assert not package_has_file(output_file, "info/recipe/meta.yaml")
def test_early_abort(testing_config, capfd):
"""There have been some problems with conda-build dropping out early.
Make sure we aren't causing them"""
api.build(os.path.join(metadata_dir, '_test_early_abort'), config=testing_config)
output, error = capfd.readouterr()
assert "Hello World" in output
def test_output_build_path_git_source(testing_workdir, testing_config):
recipe_path = os.path.join(metadata_dir, "source_git_jinja2")
m = api.render(recipe_path, config=testing_config)[0][0]
output = api.get_output_file_paths(m)[0]
_hash = m.hash_dependencies()
test_path = os.path.join(testing_config.croot, testing_config.host_subdir,
"conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format(
sys.version_info.major, sys.version_info.minor, _hash))
assert output == test_path
@pytest.mark.serial
def test_build_with_no_activate_does_not_activate():
api.build(os.path.join(metadata_dir, '_set_env_var_no_activate_build'), activate=False,
anaconda_upload=False)
@pytest.mark.serial
def test_build_with_activate_does_activate():
api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True,
anaconda_upload=False)
@pytest.mark.skipif(sys.platform == "win32",
reason="no binary prefix manipulation done on windows.")
def test_binary_has_prefix_files(testing_workdir, testing_config):
api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'), config=testing_config)
def test_relative_path_git_versioning(testing_workdir, testing_config):
# conda_build_test_recipe is a manual step. Clone it at the same level as
# your conda-build source.
cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',
'conda_build_test_recipe'))
tag = describe_root(cwd)
output = api.get_output_file_path(os.path.join(metadata_dir,
"_source_git_jinja2_relative_path"),
config=testing_config)[0]
assert tag in output
def test_relative_git_url_git_versioning(testing_workdir, testing_config):
cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',
'conda_build_test_recipe'))
tag = describe_root(cwd)
recipe = os.path.join(metadata_dir, "_source_git_jinja2_relative_git_url")
output = api.get_output_file_path(recipe, config=testing_config)[0]
assert tag in output
def test_dirty_variable_available_in_build_scripts(testing_workdir, testing_config):
recipe = os.path.join(metadata_dir, "_dirty_skip_section")
testing_config.dirty = True
api.build(recipe, config=testing_config)
with pytest.raises(subprocess.CalledProcessError):
testing_config.dirty = False
api.build(recipe, config=testing_config)
def dummy_executable(folder, exename):
# empty prefix by default - extra bit at beginning of file
if sys.platform == "win32":
exename = exename + ".bat"
dummyfile = os.path.join(folder, exename)
if sys.platform == "win32":
prefix = "@echo off\n"
else:
prefix = "#!/bin/bash\nexec 1>&2\n"
with open(dummyfile, 'w') as f:
f.write(prefix + """
echo ******* You have reached the dummy {}. It is likely there is a bug in
echo ******* conda that makes it not add the _build/bin directory onto the
echo ******* PATH before running the source checkout tool
exit -1
""".format(exename))
if sys.platform != "win32":
import stat
st = os.stat(dummyfile)
os.chmod(dummyfile, st.st_mode | stat.S_IEXEC)
return exename
def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatch):
# "hide" svn by putting a known bad one on PATH
exename = dummy_executable(testing_workdir, "svn")
monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep)
FNULL = open(os.devnull, 'w')
with pytest.raises(subprocess.CalledProcessError, message="Dummy svn was not executed"):
check_call_env([exename, '--version'], stderr=FNULL)
FNULL.close()
env = dict(os.environ)
env["PATH"] = os.pathsep.join([testing_workdir, env["PATH"]])
api.build(os.path.join(metadata_dir, '_checkout_tool_as_dependency'), config=testing_config)
platforms = ["64" if sys.maxsize > 2**32 else "32"]
if sys.platform == "win32":
platforms = set(["32", ] + platforms)
compilers = ["2.7", "3.4", "3.5"]
msvc_vers = ['9.0', '10.0', '14.0']
else:
msvc_vers = []
compilers = [".".join([str(sys.version_info.major), str(sys.version_info.minor)])]
@pytest.mark.skipif(sys.platform != "win32", reason="MSVC only on windows")
@pytest.mark.parametrize("msvc_ver", msvc_vers)
def test_build_msvc_compiler(msvc_ver, monkeypatch):
# verify that the correct compiler is available
cl_versions = {"9.0": 15,
"10.0": 16,
"11.0": 17,
"12.0": 18,
"14.0": 19}
monkeypatch.setenv('CONDATEST_MSVC_VER', msvc_ver)
monkeypatch.setenv('CL_EXE_VERSION', str(cl_versions[msvc_ver]))
try:
# Always build Python 2.7 - but set MSVC version manually via Jinja template
api.build(os.path.join(metadata_dir, '_build_msvc_compiler'), python="2.7")
except:
raise
finally:
del os.environ['CONDATEST_MSVC_VER']
del os.environ['CL_EXE_VERSION']
@pytest.mark.parametrize("platform", platforms)
@pytest.mark.parametrize("target_compiler", compilers)
def test_cmake_generator(platform, target_compiler, testing_workdir, testing_config):
testing_config.variant['python'] = target_compiler
api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config)
@pytest.mark.skipif(sys.platform == "win32",
reason="No windows symlinks")
def test_symlink_fail(testing_workdir, testing_config, capfd):
with pytest.raises((SystemExit, FileNotFoundError)):
api.build(os.path.join(fail_dir, "symlinks"), config=testing_config)
# output, error = capfd.readouterr()
# assert error.count("Error") == 6, "did not find appropriate count of Error in: " + error
def test_pip_in_meta_yaml_fail(testing_workdir, testing_config):
with pytest.raises(ValueError) as exc:
api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config)
assert "environment.yml" in str(exc)
def test_recursive_fail(testing_workdir, testing_config):
with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)) as exc:
api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config)
# indentation critical here. If you indent this, and the exception is not raised, then
# the exc variable here isn't really completely created and shows really strange errors:
# AttributeError: 'ExceptionInfo' object has no attribute 'typename'
assert "recursive-build2" in str(exc.value)
def test_jinja_typo(testing_workdir, testing_config):
with pytest.raises(SystemExit) as exc:
api.build(os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config)
assert "GIT_DSECRIBE_TAG" in exc.exconly()
@pytest.mark.serial
def test_skip_existing(testing_workdir, testing_config, capfd):
# build the recipe first
api.build(empty_sections, config=testing_config)
api.build(empty_sections, config=testing_config, skip_existing=True)
output, error = capfd.readouterr()
assert "are already built" in output
@pytest.mark.serial
def test_skip_existing_url(testing_metadata, testing_workdir, capfd):
# make sure that it is built
outputs = api.build(testing_metadata)
# Copy our package into some new folder
output_dir = os.path.join(testing_workdir, 'someoutput')
platform = os.path.join(output_dir, testing_metadata.config.host_subdir)
os.makedirs(platform)
copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))
# create the index so conda can find the file
api.update_index(platform, config=testing_metadata.config)
# HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel
noarch = os.path.join(output_dir, 'noarch')
os.makedirs(noarch)
api.update_index(noarch, config=testing_metadata.config)
testing_metadata.config.skip_existing = True
testing_metadata.config.channel_urls = [url_path(output_dir)]
api.build(testing_metadata)
output, error = capfd.readouterr()
assert "are already built" in output
assert url_path(testing_metadata.config.croot) in output
def test_failed_tests_exit_build(testing_workdir, testing_config):
"""https://github.com/conda/conda-build/issues/1112"""
with pytest.raises(SystemExit) as exc:
api.build(os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config)
assert 'TESTS FAILED' in str(exc)
def test_requirements_txt_for_run_reqs(testing_workdir, testing_config):
"""
If run reqs are blank, then conda-build looks for requirements.txt in the recipe folder.
There has been a report of issue with unsatisfiable requirements at
https://github.com/Anaconda-Platform/anaconda-server/issues/2565
This test attempts to reproduce those conditions: a channel other than defaults with this
requirements.txt
"""
testing_config.channel_urls = ('conda_build_test', )
api.build(os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config)
@pytest.mark.serial
def test_compileall_compiles_all_good_files(testing_workdir, testing_config):
output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0]
good_files = ['f1.py', 'f3.py']
bad_file = 'f2_bad.py'
for f in good_files:
assert package_has_file(output, f)
# look for the compiled file also
assert package_has_file(output, add_mangling(f))
assert package_has_file(output, bad_file)
assert not package_has_file(output, add_mangling(bad_file))
def test_render_setup_py_old_funcname(testing_workdir, testing_config, caplog):
api.build(os.path.join(metadata_dir, "_source_setuptools"), config=testing_config)
assert "Deprecation notice: the load_setuptools function has been renamed to " in caplog.text
@pytest.mark.skipif(not on_win, reason="only Windows is insane enough to have backslashes in paths")
def test_backslash_in_always_include_files_path(testing_config):
api.build(os.path.join(metadata_dir, '_backslash_in_include_files'))
with pytest.raises(RuntimeError):
api.build(os.path.join(fail_dir, 'backslash_in_include_files'))
def test_build_metadata_object(testing_metadata):
api.build(testing_metadata)
@pytest.mark.skipif(on_win, reason="fortran compilers on win are hard.")
def test_numpy_setup_py_data(testing_config):
recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data')
m = api.render(recipe_path, config=testing_config, numpy="1.11")[0][0]
_hash = m.hash_dependencies()
assert os.path.basename(api.get_output_file_path(m)[0]) == \
"load_setup_py_test-1.0a1-np111py{0}{1}{2}_1.tar.bz2".format(
sys.version_info.major, sys.version_info.minor, _hash)
def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monkeypatch):
"""
A multi-part test encompassing the following checks:
1. That git submodules identified with both relative and absolute URLs can be mirrored
and cloned.
2. That changes pushed to the original repository are updated in the mirror and finally
reflected in the package version and filename via `GIT_DESCRIBE_TAG`.
3. That `source.py` is using `check_call_env` and `check_output_env` and that those
functions are using tools from the build env.
"""
toplevel = os.path.join(testing_workdir, 'toplevel')
os.mkdir(toplevel)
relative_sub = os.path.join(testing_workdir, 'relative_sub')
os.mkdir(relative_sub)
absolute_sub = os.path.join(testing_workdir, 'absolute_sub')
os.mkdir(absolute_sub)
sys_git_env = os.environ.copy()
sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build'
sys_git_env['GIT_AUTHOR_EMAIL'] = '[email protected]'
sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build'
sys_git_env['GIT_COMMITTER_EMAIL'] = '[email protected]'
# Find the git executable before putting our dummy one on PATH.
git = find_executable('git')
# Put the broken git on os.environ["PATH"]
exename = dummy_executable(testing_workdir, 'git')
monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep)
# .. and ensure it gets run (and fails).
FNULL = open(os.devnull, 'w')
# Strangely ..
# stderr=FNULL suppresses the output from echo on OS X whereas
# stdout=FNULL suppresses the output from echo on Windows
with pytest.raises(subprocess.CalledProcessError, message="Dummy git was not executed"):
check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL)
FNULL.close()
for tag in range(2):
os.chdir(absolute_sub)
if tag == 0:
check_call_env([git, 'init'], env=sys_git_env)
with open('absolute', 'w') as f:
f.write(str(tag))
check_call_env([git, 'add', 'absolute'], env=sys_git_env)
check_call_env([git, 'commit', '-m', 'absolute{}'.format(tag)],
env=sys_git_env)
os.chdir(relative_sub)
if tag == 0:
check_call_env([git, 'init'], env=sys_git_env)
with open('relative', 'w') as f:
f.write(str(tag))
check_call_env([git, 'add', 'relative'], env=sys_git_env)
check_call_env([git, 'commit', '-m', 'relative{}'.format(tag)],
env=sys_git_env)
os.chdir(toplevel)
if tag == 0:
check_call_env([git, 'init'], env=sys_git_env)
with open('toplevel', 'w') as f:
f.write(str(tag))
check_call_env([git, 'add', 'toplevel'], env=sys_git_env)
check_call_env([git, 'commit', '-m', 'toplevel{}'.format(tag)],
env=sys_git_env)
if tag == 0:
check_call_env([git, 'submodule', 'add',
convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'],
env=sys_git_env)
check_call_env([git, 'submodule', 'add', '../relative_sub', 'relative'],
env=sys_git_env)
else:
# Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we
# can change this to `git submodule update --recursive`.
check_call_env([git, 'submodule', 'foreach', git, 'pull'], env=sys_git_env)
check_call_env([git, 'commit', '-am', 'added submodules@{}'.format(tag)],
env=sys_git_env)
check_call_env([git, 'tag', '-a', str(tag), '-m', 'tag {}'.format(tag)],
env=sys_git_env)
# It is possible to use `Git for Windows` here too, though you *must* not use a different
# (type of) git than the one used above to add the absolute submodule, because .gitmodules
# stores the absolute path and that is not interchangeable between MSYS2 and native Win32.
#
# Also, git is set to False here because it needs to be rebuilt with the longer prefix. As
# things stand, my _b_env folder for this test contains more than 80 characters.
requirements = ('requirements', OrderedDict([
('build',
['git # [False]',
'm2-git # [win]',
'm2-filesystem # [win]'])]))
recipe_dir = os.path.join(testing_workdir, 'recipe')
if not os.path.exists(recipe_dir):
os.makedirs(recipe_dir)
filename = os.path.join(testing_workdir, 'recipe', 'meta.yaml')
data = OrderedDict([
('package', OrderedDict([
('name', 'relative_submodules'),
('version', '{{ GIT_DESCRIBE_TAG }}')])),
('source', OrderedDict([
('git_url', toplevel),
('git_tag', str(tag))])),
requirements,
('build', OrderedDict([
('script',
['git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > '
'%PREFIX%\\summaries.txt # [win]',
'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > '
'$PREFIX/summaries.txt # [not win]'])
])),
('test', OrderedDict([
('commands',
['echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]'
.format(tag, tag),
'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]',
'echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]'
.format(tag, tag),
'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]'])
]))
])
with open(filename, 'w') as outfile:
outfile.write(yaml.dump(data, default_flow_style=False, width=999999999))
# Reset the path because our broken, dummy `git` would cause `render_recipe`
# to fail, while no `git` will cause the build_dependencies to be installed.
monkeypatch.undo()
# This will (after one spin round the loop) install and run 'git' with the
# build env prepended to os.environ[]
metadata = api.render(testing_workdir, config=testing_config)[0][0]
output = api.get_output_file_path(metadata, config=testing_config)[0]
assert ("relative_submodules-{}-".format(tag) in output)
api.build(metadata, config=testing_config)
def test_noarch(testing_workdir):
filename = os.path.join(testing_workdir, 'meta.yaml')
for noarch in (False, True):
data = OrderedDict([
('package', OrderedDict([
('name', 'test'),
('version', '0.0.0')])),
('build', OrderedDict([
('noarch', str(noarch))]))
])
with open(filename, 'w') as outfile:
outfile.write(yaml.dump(data, default_flow_style=False, width=999999999))
output = api.get_output_file_path(testing_workdir)[0]
assert (os.path.sep + "noarch" + os.path.sep in output or not noarch)
assert (os.path.sep + "noarch" + os.path.sep not in output or noarch)
def test_disable_pip(testing_config, testing_metadata):
testing_metadata.config.disable_pip = True
testing_metadata.meta['build']['script'] = 'python -c "import pip; print(pip.__version__)"'
with pytest.raises(subprocess.CalledProcessError):
api.build(testing_metadata)
testing_metadata.meta['build']['script'] = ('python -c "import setuptools; '
'print(setuptools.__version__)"')
with pytest.raises(subprocess.CalledProcessError):
api.build(testing_metadata)
@pytest.mark.skipif(not sys.platform.startswith('linux'),
reason="rpath fixup only done on Linux so far.")
def test_rpath_linux(testing_config):
api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config)
def test_noarch_none_value(testing_workdir, testing_config):
recipe = os.path.join(metadata_dir, "_noarch_none")
with pytest.raises(exceptions.CondaBuildException):
api.build(recipe, config=testing_config)
def test_noarch_foo_value(testing_config):
outputs = api.build(os.path.join(metadata_dir, "noarch_generic"), config=testing_config)
metadata = json.loads(package_has_file(outputs[0], 'info/index.json').decode())
assert metadata['noarch'] == "generic"
def test_about_json_content(testing_metadata):
outputs = api.build(testing_metadata)
about = json.loads(package_has_file(outputs[0], 'info/about.json').decode())
assert 'conda_version' in about and about['conda_version'] == conda.__version__
assert 'conda_build_version' in about and about['conda_build_version'] == __version__
assert 'channels' in about and about['channels']
try:
assert 'env_vars' in about and about['env_vars']
except AssertionError:
# new versions of conda support this, so we should raise errors.
if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'):
raise
else:
pass
assert 'root_pkgs' in about and about['root_pkgs']
@pytest.mark.xfail(parse_version(conda.__version__) < parse_version("4.3.14"),
reason="new noarch supported starting with conda 4.3.14")
def test_noarch_python_with_tests(testing_config):
recipe = os.path.join(metadata_dir, "_noarch_python_with_tests")
api.build(recipe, config=testing_config)
def test_noarch_python_1(testing_config):
output = api.build(os.path.join(metadata_dir, "_noarch_python"), config=testing_config)[0]
assert package_has_file(output, 'info/files') is not ''
extra = json.loads(package_has_file(output, 'info/link.json').decode())
assert 'noarch' in extra
assert 'entry_points' in extra['noarch']
assert 'type' in extra['noarch']
assert 'package_metadata_version' in extra
def test_legacy_noarch_python(testing_config):
output = api.build(os.path.join(metadata_dir, "_legacy_noarch_python"),
config=testing_config)[0]
# make sure that the package is going into the noarch folder
assert os.path.basename(os.path.dirname(output)) == 'noarch'
@pytest.mark.skipif(parse_version(conda.__version__) < parse_version("4.5"),
reason="full preferred env implementation deferred to conda 4.5")
def test_preferred_env(testing_config):
recipe = os.path.join(metadata_dir, "_preferred_env")
output = api.build(recipe, config=testing_config)[0]
extra = json.loads(package_has_file(output, 'info/link.json').decode())
assert 'preferred_env' in extra
assert 'name' in extra['preferred_env']
assert 'executable_paths' in extra['preferred_env']
exe_paths = extra['preferred_env']['executable_paths']
if on_win:
assert exe_paths == ['Scripts/exepath1.bat', 'Scripts/exepath2.bat']
else:
assert exe_paths == ['bin/exepath1', 'bin/exepath2']
assert 'package_metadata_version' in extra
@pytest.mark.serial
def test_skip_compile_pyc(testing_config):
outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config)
tf = tarfile.open(outputs[0])
pyc_count = 0
for f in tf.getmembers():
filename = os.path.basename(f.name)
_, ext = os.path.splitext(filename)
basename = filename.split('.', 1)[0]
if basename == 'skip_compile_pyc':
assert not ext == '.pyc', "a skip_compile_pyc .pyc was compiled: {}".format(filename)
if ext == '.pyc':
assert basename == 'compile_pyc', "an unexpected .pyc was compiled: {}".format(filename)
pyc_count = pyc_count + 1
assert pyc_count == 2, "there should be 2 .pyc files, instead there were {}".format(pyc_count)
def test_detect_binary_files_with_prefix(testing_config):
outputs = api.build(os.path.join(metadata_dir, "_detect_binary_files_with_prefix"),
config=testing_config)
matches = []
with tarfile.open(outputs[0]) as tf:
has_prefix = tf.extractfile('info/has_prefix')
contents = [p.strip().decode('utf-8') for p in
has_prefix.readlines()]
has_prefix.close()
matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or
entry.endswith('"binary-has-prefix"')]
assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix"
assert ' binary ' in matches[0], "binary-has-prefix not recorded as binary in info/has_prefix"
def test_skip_detect_binary_files_with_prefix(testing_config):
recipe = os.path.join(metadata_dir, "_skip_detect_binary_files_with_prefix")
outputs = api.build(recipe, config=testing_config)
matches = []
with tarfile.open(outputs[0]) as tf:
try:
has_prefix = tf.extractfile('info/has_prefix')
contents = [p.strip().decode('utf-8') for p in
has_prefix.readlines()]
has_prefix.close()
matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or
entry.endswith('"binary-has-prefix"')]
except:
pass
assert len(matches) == 0, "binary-has-prefix recorded in info/has_prefix despite:" \
"build/detect_binary_files_with_prefix: false"
def test_fix_permissions(testing_config):
recipe = os.path.join(metadata_dir, "fix_permissions")
outputs = api.build(recipe, config=testing_config)
with tarfile.open(outputs[0]) as tf:
for f in tf.getmembers():
assert f.mode & 0o444 == 0o444, "tar member '{}' has invalid (read) mode".format(f.name)
@pytest.mark.skipif(not on_win, reason="windows-only functionality")
@pytest.mark.parametrize('recipe_name', ["_script_win_creates_exe",
"_script_win_creates_exe_garbled"])
def test_script_win_creates_exe(testing_config, recipe_name):
recipe = os.path.join(metadata_dir, recipe_name)
outputs = api.build(recipe, config=testing_config)
assert package_has_file(outputs[0], 'Scripts/test-script.exe')
assert package_has_file(outputs[0], 'Scripts/test-script-script.py')
def test_output_folder_moves_file(testing_metadata, testing_workdir):
testing_metadata.config.output_folder = testing_workdir
outputs = api.build(testing_metadata, no_test=True)
assert outputs[0].startswith(testing_workdir)
def test_info_files_json(testing_config):
outputs = api.build(os.path.join(metadata_dir, "ignore_some_prefix_files"),
config=testing_config)
assert package_has_file(outputs[0], "info/paths.json")
with tarfile.open(outputs[0]) as tf:
data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8'))
fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link",
"prefix_placeholder", "inode_paths"]
for key in data.keys():
assert key in ['paths', 'paths_version']
for paths in data.get('paths'):
for field in paths.keys():
assert field in fields
assert len(data.get('paths')) == 2
for file in data.get('paths'):
for key in file.keys():
assert key in fields
short_path = file.get("_path")
if short_path == "test.sh" or short_path == "test.bat":
assert file.get("prefix_placeholder") is not None
assert file.get("file_mode") is not None
else:
assert file.get("prefix_placeholder") is None
assert file.get("file_mode") is None
def test_build_expands_wildcards(mocker, testing_workdir):
build_tree = mocker.patch("conda_build.build.build_tree")
config = api.Config()
files = ['abc', 'acb']
for f in files:
os.makedirs(f)
with open(os.path.join(f, 'meta.yaml'), 'w') as fh:
fh.write('\n')
api.build(["a*"], config=config)
output = [os.path.join(os.getcwd(), path, 'meta.yaml') for path in files]
build_tree.assert_called_once_with(output, build_only=False, config=mocker.ANY,
need_source_download=True, notest=False,
post=None, variants=None)
@pytest.mark.serial
@pytest.mark.parametrize('set_build_id', [True, False])
def test_remove_workdir_default(testing_config, caplog, set_build_id):
recipe = os.path.join(metadata_dir, '_keep_work_dir')
# make a metadata object - otherwise the build folder is computed within the build, but does
# not alter the config object that is passed in. This is by design - we always make copies
# of the config object rather than edit it in place, so that variants don't clobber one
# another
metadata = api.render(recipe, config=testing_config)[0][0]
api.build(metadata, set_build_id=set_build_id)
assert not glob(os.path.join(metadata.config.work_dir, '*'))
@pytest.mark.serial
def test_keep_workdir_and_dirty_reuse(testing_config, capfd):
recipe = os.path.join(metadata_dir, '_keep_work_dir')
# make a metadata object - otherwise the build folder is computed within the build, but does
# not alter the config object that is passed in. This is by design - we always make copies
# of the config object rather than edit it in place, so that variants don't clobber one
# another
metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0]
workdir = metadata.config.work_dir
api.build(metadata)
out, err = capfd.readouterr()
assert glob(os.path.join(metadata.config.work_dir, '*'))
# test that --dirty reuses the same old folder
metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0]
assert workdir == metadata.config.work_dir
# test that without --dirty, we don't reuse the folder
metadata = api.render(recipe, config=testing_config)[0][0]
assert workdir != metadata.config.work_dir
testing_config.clean()
def test_workdir_removal_warning(testing_config, caplog):
recipe = os.path.join(metadata_dir, '_test_uses_src_dir')
with pytest.raises(ValueError) as exc:
api.build(recipe, config=testing_config)
assert "work dir is removed" in str(exc)
# @pytest.mark.serial
# @pytest.mark.skipif(not sys.platform.startswith('linux'),
# reason="cross compiler packages created only on Linux right now")
# @pytest.mark.xfail(VersionOrder(conda.__version__) < VersionOrder('4.3.2'),
# reason="not completely implemented yet")
# def test_cross_compiler(testing_workdir, testing_config, capfd):
# # TODO: testing purposes. Package from @mingwandroid's channel, copied to conda_build_test
# testing_config.channel_urls = ('conda_build_test', )
# # activation is necessary to set the appropriate toolchain env vars
# testing_config.activate = True
# # testing_config.debug = True
# recipe_dir = os.path.join(metadata_dir, '_cross_helloworld')
# output = api.build(recipe_dir, config=testing_config)[0]
# assert output.startswith(os.path.join(testing_config.croot, 'linux-imx351uc'))
@pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only")
def test_append_python_app_osx(testing_config):
"""Recipes that use osx_is_app need to have python.app in their runtime requirements.
conda-build will add it if it's missing."""
recipe = os.path.join(metadata_dir, '_osx_is_app_missing_python_app')
# tests will fail here if python.app is not added to the run reqs by conda-build, because
# without it, pythonw will be missing.
api.build(recipe, config=testing_config)
# Not sure about this behavior. Basically, people need to realize that if they
# start with a recipe from disk, they should not then alter the metadata
# object. Later reparsing will clobber their edits to the object. The
# complicated thing is that these edits are indistinguishable from Jinja2
# templating doing its normal thing.
# def test_clobbering_manually_set_metadata_raises(testing_metadata, testing_workdir):
# api.output_yaml(testing_metadata, 'meta.yaml')
# metadata = api.render(testing_workdir)[0][0]
# # make the package meta dict out of sync with file contents
# metadata.meta['package']['name'] = 'steve'
# # re-render happens as part of build. We should see an error about clobbering our customized
# # meta dict
# with pytest.raises(ValueError):
# api.build(metadata)
@pytest.mark.serial
def test_run_exports(testing_metadata, testing_config, testing_workdir):
api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config)
api.build(os.path.join(metadata_dir, '_run_exports_implicit_weak'), config=testing_config)
# run_exports is tricky. We mostly only ever want things in "host". Here are the conditions:
# 1. only build section present (legacy recipe). Here, use run_exports from build.
testing_metadata.meta['requirements']['build'] = ['test_has_run_exports']
api.output_yaml(testing_metadata, 'meta.yaml')
m = api.render(testing_workdir, config=testing_config)[0][0]
assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run']
assert 'weak_pinned_package 1.0.*' in m.meta['requirements']['run']
# 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are
# weak by default.
testing_metadata.meta['requirements']['build'] = ['test_has_run_exports_implicit_weak']
testing_metadata.meta['requirements']['host'] = ['python']
api.output_yaml(testing_metadata, 'meta.yaml')
m = api.render(testing_workdir, config=testing_config)[0][0]
assert 'weak_pinned_package 2.0.*' not in m.meta['requirements']['run']
# 3. host present, and deps in build have "strong" run_exports section. use host, add
# in "strong" from build.
testing_metadata.meta['requirements']['build'] = ['test_has_run_exports']
testing_metadata.meta['requirements']['host'] = ['test_has_run_exports_implicit_weak']
api.output_yaml(testing_metadata, 'meta.yaml')
m = api.render(testing_workdir, config=testing_config)[0][0]
assert any('strong_pinned_package 1.0' in req for req in m.meta['requirements']['host'])
assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run']
# weak one from test_has_run_exports should be excluded, since it is a build dep
assert 'weak_pinned_package 1.0.*' not in m.meta['requirements']['run']
assert 'weak_pinned_package 2.0.*' in m.meta['requirements']['run']
@pytest.mark.serial
def test_ignore_run_exports(testing_metadata, testing_config):
# need to clear conda's index, or else we somehow pick up the test_run_exports folder
# above for our package here.
api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config)
testing_metadata.meta['requirements']['build'] = ['test_has_run_exports']
testing_metadata.meta['build']['ignore_run_exports'] = ['downstream_pinned_package']
testing_metadata.config.index = None
m = finalize_metadata(testing_metadata)
assert 'downstream_pinned_package 1.0' not in m.meta['requirements']['run']
def test_pin_subpackage_exact(testing_config):
recipe = os.path.join(metadata_dir, '_pin_subpackage_exact')
ms = api.render(recipe, config=testing_config)
assert any(re.match(r'run_exports_subpkg 1.0 h[a-f0-9]{%s}_0' % testing_config.hash_length,
req)
for (m, _, _) in ms for req in m.meta['requirements']['run'])
api.build(recipe, config=testing_config)
@pytest.mark.skipif(sys.platform != 'linux', reason="xattr code written here is specific to linux")
def test_copy_read_only_file_with_xattr(testing_config, testing_workdir):
src_recipe = os.path.join(metadata_dir, '_xattr_copy')
recipe = os.path.join(testing_workdir, '_xattr_copy')
copy_into(src_recipe, recipe)
# file is r/w for owner, but we change it to 400 after setting the attribute
ro_file = os.path.join(recipe, 'mode_400_file')
subprocess.check_call('setfattr -n user.attrib -v somevalue {}'.format(ro_file), shell=True)
subprocess.check_call('chmod 400 {}'.format(ro_file), shell=True)
api.build(recipe, config=testing_config)
@pytest.mark.serial
def test_env_creation_fail_exits_build(testing_config):
recipe = os.path.join(metadata_dir, '_post_link_exits_after_retry')
with pytest.raises((RuntimeError, LinkError, CondaError)):
api.build(recipe, config=testing_config)
recipe = os.path.join(metadata_dir, '_post_link_exits_tests')
with pytest.raises((RuntimeError, LinkError, CondaError)):
api.build(recipe, config=testing_config)
@pytest.mark.serial
def test_recursion_packages(testing_config):
"""Two packages that need to be built are listed in the recipe
make sure that both get built before the one needing them gets built."""
recipe = os.path.join(metadata_dir, '_recursive-build-two-packages')
api.build(recipe, config=testing_config)
@pytest.mark.serial
def test_recursion_layers(testing_config):
"""go two 'hops' - try to build a, but a needs b, so build b first, then come back to a"""
recipe = os.path.join(metadata_dir, '_recursive-build-two-layers')
api.build(recipe, config=testing_config)
@pytest.mark.skipif(sys.platform != 'win32', reason=("spaces break openssl prefix "
"replacement on *nix"))
def test_croot_with_spaces(testing_metadata, testing_workdir):
testing_metadata.config.croot = os.path.join(testing_workdir, "space path")
api.build(testing_metadata)
def test_unknown_selectors(testing_config):
recipe = os.path.join(metadata_dir, 'unknown_selector')
api.build(recipe, config=testing_config)
def test_extract_tarball_with_unicode_filename(testing_config):
"""See https://github.com/conda/conda-build/pull/1779"""
recipe = os.path.join(metadata_dir, '_unicode_in_tarball')
api.build(recipe, config=testing_config)
@pytest.mark.serial
def test_failed_recipe_leaves_folders(testing_config, testing_workdir):
recipe = os.path.join(fail_dir, 'recursive-build')
m = api.render(recipe, config=testing_config)[0][0]
locks = get_conda_operation_locks(m.config)
with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)):
api.build(m)
assert os.path.isdir(m.config.build_folder), 'build folder was removed'
assert os.listdir(m.config.build_folder), 'build folder has no files'
# make sure that it does not leave lock files, though, as these cause permission errors on
# centralized installations
any_locks = False
locks_list = set()
for lock in locks:
if os.path.isfile(lock.lock_file):
any_locks = True
dest_path = base64.b64decode(os.path.basename(lock.lock_file))
if PY3 and hasattr(dest_path, 'decode'):
dest_path = dest_path.decode()
locks_list.add((lock.lock_file, dest_path))
assert not any_locks, "remaining locks:\n{}".format('\n'.join('->'.join((l, r))
for (l, r) in locks_list))
def test_only_r_env_vars_defined(testing_config):
recipe = os.path.join(metadata_dir, '_r_env_defined')
testing_config.channel_urls = ('r', )
api.build(recipe, config=testing_config)
def test_only_perl_env_vars_defined(testing_config):
recipe = os.path.join(metadata_dir, '_perl_env_defined')
testing_config.channel_urls = ('c3i_test', )
api.build(recipe, config=testing_config)
@pytest.mark.skipif(on_win, reason='no lua package on win')
def test_only_lua_env(testing_config):
recipe = os.path.join(metadata_dir, '_lua_env_defined')
testing_config.channel_urls = ('conda-forge', )
testing_config.prefix_length = 80
testing_config.set_build_id = False
api.build(recipe, config=testing_config)
def test_run_constrained_stores_constrains_info(testing_config):
recipe = os.path.join(metadata_dir, '_run_constrained')
out_file = api.build(recipe, config=testing_config)[0]
info_contents = json.loads(package_has_file(out_file, 'info/index.json'))
assert 'constrains' in info_contents
assert len(info_contents['constrains']) == 1
assert info_contents['constrains'][0] == 'bzip2 1.*'
@pytest.mark.serial
def test_no_locking(testing_config):
recipe = os.path.join(metadata_dir, 'source_git_jinja2')
api.update_index(os.path.join(testing_config.croot, testing_config.subdir),
config=testing_config)
api.build(recipe, config=testing_config, locking=False)
def test_test_dependencies(testing_workdir, testing_config):
recipe = os.path.join(fail_dir, 'check_test_dependencies')
with pytest.raises(exceptions.DependencyNeedsBuildingError) as e:
api.build(recipe, config=testing_config)
assert 'Unsatisfiable dependencies for platform ' in str(e.value)
assert 'pytest-package-does-not-exist' in str(e.value)
def test_runtime_dependencies(testing_workdir, testing_config):
recipe = os.path.join(fail_dir, 'check_runtime_dependencies')
with pytest.raises(exceptions.DependencyNeedsBuildingError) as e:
api.build(recipe, config=testing_config)
assert 'Unsatisfiable dependencies for platform ' in str(e.value)
assert 'some-nonexistent-package1' in str(e.value)
def test_no_force_upload_condarc_setting(mocker, testing_workdir, testing_metadata):
testing_metadata.config.anaconda_upload = True
del testing_metadata.meta['test']
api.output_yaml(testing_metadata, 'meta.yaml')
call = mocker.patch.object(conda_build.build.subprocess, 'call')
cc_conda_build['force_upload'] = False
pkg = api.build(testing_workdir)
assert call.called_once_with(['anaconda', 'upload', pkg])
del cc_conda_build['force_upload']
pkg = api.build(testing_workdir)
assert call.called_once_with(['anaconda', 'upload', '--force', pkg])
def test_setup_py_data_in_env(testing_config):
recipe = os.path.join(metadata_dir, '_setup_py_data_in_env')
# should pass with any modern python (just not 3.5)
api.build(recipe, config=testing_config)
# make sure it fails with our special python logic
with pytest.raises(subprocess.CalledProcessError):
api.build(recipe, config=testing_config, python='3.4')
def test_numpy_xx(testing_config):
recipe = os.path.join(metadata_dir, '_numpy_xx')
api.build(recipe, config=testing_config, numpy='1.12')
def test_numpy_xx_host(testing_config):
recipe = os.path.join(metadata_dir, '_numpy_xx_host')
api.build(recipe, config=testing_config, numpy='1.12')
def test_python_xx(testing_config):
recipe = os.path.join(metadata_dir, '_python_xx')
api.build(recipe, config=testing_config, python='3.4')
def test_indirect_numpy_dependency(testing_metadata):
testing_metadata.meta['requirements']['build'] = ['arrow-cpp 0.5.*']
testing_metadata.config.channel_urls = ['conda-forge']
api.build(testing_metadata, numpy=1.13)
def test_dependencies_with_notest(testing_workdir, testing_config):
recipe = os.path.join(metadata_dir, '_test_dependencies')
api.build(recipe, config=testing_config, notest=True)
with pytest.raises(DependencyNeedsBuildingError) as excinfo:
api.build(recipe, config=testing_config, notest=False)
assert 'Unsatisfiable dependencies for platform' in str(excinfo.value)
assert 'somenonexistentpackage1' in str(excinfo.value)
def test_source_cache_build(testing_workdir):
recipe = os.path.join(metadata_dir, 'source_git_jinja2')
config = api.Config(src_cache_root=testing_workdir)
api.build(recipe, notest=True, config=config)
git_cache_directory = '{}/git_cache' .format(testing_workdir)
assert os.path.isdir(git_cache_directory)
files = [filename for _, _, filenames in os.walk(git_cache_directory)
for filename in filenames]
assert len(files) > 0
def test_copy_test_source_files(testing_config):
recipe = os.path.join(metadata_dir, '_test_test_source_files')
filenames = set()
for copy in (False, True):
testing_config.copy_test_source_files = copy
outputs = api.build(recipe, notest=False, config=testing_config)
filenames.add(os.path.basename(outputs[0]))
tf = tarfile.open(outputs[0])
found = False
for f in tf.getmembers():
if f.name.startswith('info/test/'):
found = True
break
if found:
assert copy, "'info/test/' found in tar.bz2 but not copying test source files"
else:
assert not copy, "'info/test/' not found in tar.bz2 but copying test source files"
assert len(filenames) == 2, "copy_test_source_files does not modify the build hash but should"
def test_pin_depends(testing_config):
"""purpose of 'record' argument is to put a 'requires' file that records pinned run
dependencies
"""
recipe = os.path.join(metadata_dir, '_pin_depends_record')
m = api.render(recipe, config=testing_config)[0][0]
# the recipe python is not pinned, and having pin_depends set to record will not show it in record
assert not any(re.search('python\s+[23]\.', dep) for dep in m.meta['requirements']['run'])
output = api.build(m, config=testing_config)[0]
requires = package_has_file(output, 'info/requires')
assert requires
if PY3 and hasattr(requires, 'decode'):
requires = requires.decode()
assert re.search('python\=[23]\.', requires), "didn't find pinned python in info/requires"
def test_failed_patch_exits_build(testing_config):
with pytest.raises(RuntimeError):
api.build(os.path.join(metadata_dir, '_bad_patch'), config=testing_config)
def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder(testing_config):
# unsatisfiable; also not buildable (test_a recipe version is 2.0)
testing_config.variant['test_a'] = "1.0"
recipe = os.path.join(metadata_dir, '_build_deps_no_infinite_loop', 'test_b')
with pytest.raises(DependencyNeedsBuildingError):
api.build(recipe, config=testing_config)
# passes now, because package can be built, or is already built. Doesn't matter which.
testing_config.variant['test_a'] = "2.0"
api.build(recipe, config=testing_config)
| [
[
[
90,
96
],
[
44774,
44780
]
],
[
[
121,
132
],
[
1609,
1620
],
[
21723,
21734
],
[
22161,
22172
],
[
22199,
22210
],
[
22343,
22354
],
[
22486,
22497
],
[
22880,
22891
],
[
24293,
24304
],
[
24331,
24342
],
[
24442,
24453
]
],
[
[
150,
154
],
[
34716,
34720
],
[
35423,
35427
]
],
[
[
162,
169
],
[
1744,
1751
]
],
[
[
177,
179
],
[
1216,
1218
],
[
2329,
2331
],
[
2076,
2078
],
[
2092,
2094
],
[
2105,
2107
],
[
2451,
2453
],
[
4320,
4322
],
[
4566,
4568
],
[
5759,
5761
],
[
6108,
6110
],
[
6346,
6348
],
[
6550,
6552
],
[
6913,
6915
],
[
7110,
7112
],
[
7426,
7428
],
[
7702,
7704
],
[
7718,
7720
],
[
7731,
7733
],
[
7904,
7906
],
[
8195,
8197
],
[
8211,
8213
],
[
8224,
8226
],
[
8372,
8374
],
[
8635,
8637
],
[
9091,
9093
],
[
9625,
9627
],
[
9652,
9654
],
[
9967,
9969
],
[
9996,
9998
],
[
10200,
10202
],
[
10230,
10232
],
[
10292,
10294
],
[
11312,
11314
],
[
11429,
11431
],
[
11474,
11476
],
[
11763,
11765
],
[
12069,
12071
],
[
12392,
12394
],
[
12677,
12679
],
[
13174,
13176
],
[
13844,
13846
],
[
13903,
13905
],
[
13969,
13971
],
[
14017,
14019
],
[
14040,
14042
],
[
14297,
14299
],
[
14336,
14338
],
[
14897,
14899
],
[
15526,
15528
],
[
15730,
15732
],
[
16237,
16239
],
[
16590,
16592
],
[
16705,
16707
],
[
16982,
16984
],
[
17153,
17155
],
[
17987,
17989
],
[
18033,
18035
],
[
18071,
18073
],
[
18121,
18123
],
[
18163,
18165
],
[
18213,
18215
],
[
18255,
18257
],
[
18766,
18768
],
[
18840,
18842
],
[
19229,
19231
],
[
19599,
19601
],
[
19969,
19971
],
[
21955,
21957
],
[
22010,
22012
],
[
22050,
22052
],
[
22093,
22095
],
[
24202,
24204
],
[
24724,
24726
],
[
24749,
24751
],
[
24802,
24804
],
[
24827,
24829
],
[
25592,
25594
],
[
25729,
25731
],
[
25946,
25948
],
[
27104,
27106
],
[
27272,
27274
],
[
27710,
27712
],
[
27888,
27890
],
[
27905,
27907
],
[
28160,
28162
],
[
28832,
28834
],
[
29004,
29006
],
[
29046,
29048
],
[
29607,
29609
],
[
30394,
30396
],
[
31227,
31229
],
[
31795,
31797
],
[
32336,
32338
],
[
33626,
33628
],
[
33659,
33661
],
[
33779,
33781
],
[
33792,
33794
],
[
34234,
34236
],
[
34721,
34723
],
[
34863,
34865
],
[
35428,
35430
],
[
35941,
35943
],
[
37308,
37310
],
[
38506,
38508
],
[
38587,
38589
],
[
40722,
40724
],
[
41183,
41185
],
[
41725,
41727
],
[
41780,
41782
],
[
41954,
41956
],
[
42295,
42297
],
[
42480,
42482
],
[
42870,
42872
],
[
43148,
43150
],
[
43513,
43515
],
[
43648,
43650
],
[
43880,
43882
],
[
44082,
44084
],
[
44341,
44343
],
[
44417,
44419
],
[
44689,
44691
],
[
44791,
44793
],
[
45221,
45223
],
[
45421,
45423
],
[
45677,
45679
],
[
45979,
45981
],
[
46383,
46385
],
[
46452,
46454
],
[
46691,
46693
],
[
47070,
47072
],
[
48018,
48020
],
[
48394,
48396
],
[
48548,
48550
],
[
48703,
48705
],
[
49118,
49120
],
[
49550,
49552
],
[
49782,
49784
],
[
49863,
49865
],
[
50023,
50025
],
[
50278,
50280
],
[
50984,
50986
],
[
51702,
51704
],
[
51987,
51989
]
],
[
[
187,
189
],
[
41301,
41303
],
[
51212,
51214
],
[
51509,
51511
]
],
[
[
197,
207
],
[
8786,
8796
],
[
10036,
10046
],
[
19032,
19042
],
[
25088,
25098
],
[
25343,
25353
],
[
41996,
42006
],
[
42093,
42103
],
[
48249,
48259
]
],
[
[
215,
218
],
[
7243,
7246
],
[
10398,
10401
],
[
10432,
10435
],
[
10633,
10636
],
[
10662,
10665
],
[
10711,
10714
],
[
11856,
11859
],
[
25437,
25440
],
[
37051,
37054
],
[
41554,
41557
],
[
43273,
43276
],
[
6737,
6740
],
[
6761,
6764
],
[
9015,
9018
],
[
9128,
9131
],
[
9567,
9570
],
[
17329,
17332
],
[
17353,
17356
]
],
[
[
226,
230
],
[
26030,
26034
],
[
26245,
26249
],
[
27416,
27420
],
[
28274,
28278
],
[
32554,
32558
],
[
46105,
46109
]
],
[
[
238,
242
],
[
3015,
3019
]
],
[
[
265,
270
],
[
26917,
26922
],
[
27978,
27983
],
[
26380,
26385
],
[
26731,
26736
]
],
[
[
312,
315
],
[
2226,
2229
],
[
44840,
44843
],
[
51424,
51427
]
],
[
[
317,
325
],
[
14511,
14519
],
[
14658,
14666
]
],
[
[
327,
336
],
[
42392,
42401
],
[
42571,
42580
]
],
[
[
338,
348
],
[
42403,
42413
],
[
42582,
42592
]
],
[
[
350,
364
],
[
47669,
47683
],
[
47815,
47829
]
],
[
[
372,
383
],
[
47627,
47638
]
],
[
[
421,
427
],
[
3834,
3840
]
],
[
[
429,
433
],
[
3430,
3434
],
[
3751,
3755
],
[
3917,
3921
]
],
[
[
468,
476
],
[
3411,
3419
],
[
3898,
3906
]
],
[
[
503,
516
],
[
26903,
26916
],
[
26938,
26951
],
[
27964,
27977
],
[
27999,
28012
]
],
[
[
524,
530
],
[
2283,
2289
],
[
3936,
3942
],
[
6827,
6833
],
[
7031,
7037
],
[
7224,
7230
],
[
10692,
10698
],
[
10768,
10774
],
[
11506,
11512
],
[
11554,
11560
],
[
11837,
11843
],
[
13297,
13303
],
[
13618,
13624
],
[
15610,
15616
],
[
16411,
16417
],
[
16846,
16852
],
[
25414,
25420
],
[
26885,
26891
],
[
27945,
27951
],
[
28746,
28752
],
[
31507,
31513
],
[
31576,
31582
],
[
34075,
34081
],
[
34095,
34101
],
[
34769,
34775
],
[
37032,
37038
],
[
38400,
38406
],
[
40498,
40504
],
[
41535,
41541
],
[
42207,
42213
],
[
42648,
42654
],
[
42978,
42984
],
[
43254,
43260
],
[
43978,
43984
],
[
45566,
45572
],
[
46314,
46320
],
[
3397,
3403
],
[
3884,
3890
],
[
8772,
8778
],
[
10022,
10028
],
[
12003,
12009
],
[
12340,
12346
],
[
12580,
12586
],
[
13122,
13128
],
[
14845,
14851
],
[
16658,
16664
],
[
19018,
19024
],
[
25074,
25080
],
[
25329,
25335
],
[
25781,
25787
],
[
35999,
36005
],
[
42363,
42369
],
[
42542,
42548
],
[
44237,
44243
],
[
46751,
46757
],
[
47133,
47139
],
[
48235,
48241
],
[
49235,
49241
],
[
51655,
51661
],
[
52065,
52071
]
],
[
[
538,
542
],
[
1588,
1592
],
[
1530,
1534
],
[
23495,
23499
],
[
24586,
24590
]
],
[
[
550,
557
],
[
28912,
28919
],
[
29744,
29751
],
[
30543,
30550
],
[
31337,
31344
],
[
32507,
32514
],
[
50321,
50328
]
],
[
[
583,
586
],
[
2896,
2899
],
[
3643,
3646
],
[
4096,
4099
],
[
4386,
4389
],
[
4452,
4455
],
[
4947,
4950
],
[
5304,
5307
],
[
5637,
5640
],
[
5749,
5752
],
[
6098,
6101
],
[
6402,
6405
],
[
6468,
6471
],
[
6903,
6906
],
[
7100,
7103
],
[
7416,
7419
],
[
7879,
7882
],
[
8451,
8454
],
[
8721,
8724
],
[
8863,
8866
],
[
10282,
10285
],
[
11302,
11305
],
[
11753,
11756
],
[
12059,
12062
],
[
12382,
12385
],
[
12667,
12670
],
[
13164,
13167
],
[
13413,
13416
],
[
13466,
13469
],
[
13754,
13757
],
[
14126,
14129
],
[
14360,
14363
],
[
14538,
14541
],
[
14887,
14890
],
[
15516,
15519
],
[
15720,
15723
],
[
16227,
16230
],
[
16580,
16583
],
[
16695,
16698
],
[
16815,
16818
],
[
17041,
17044
],
[
17170,
17173
],
[
23900,
23903
],
[
23974,
23977
],
[
24108,
24111
],
[
24663,
24666
],
[
25128,
25131
],
[
25383,
25386
],
[
25582,
25585
],
[
25836,
25839
],
[
25936,
25939
],
[
26205,
26208
],
[
27164,
27167
],
[
27262,
27265
],
[
27700,
27703
],
[
28218,
28221
],
[
28822,
28825
],
[
29597,
29600
],
[
30476,
30479
],
[
31287,
31290
],
[
31849,
31852
],
[
32176,
32179
],
[
32326,
32329
],
[
33558,
33561
],
[
33732,
33735
],
[
34602,
34605
],
[
34654,
34657
],
[
35232,
35235
],
[
35358,
35361
],
[
35540,
35543
],
[
35745,
35748
],
[
36041,
36044
],
[
37513,
37516
],
[
38496,
38499
],
[
38577,
38580
],
[
38942,
38945
],
[
38997,
39000
],
[
39487,
39490
],
[
39542,
39545
],
[
39983,
39986
],
[
40038,
40041
],
[
40712,
40715
],
[
41244,
41247
],
[
41491,
41494
],
[
42163,
42166
],
[
42425,
42428
],
[
42604,
42607
],
[
42934,
42937
],
[
43210,
43213
],
[
43561,
43564
],
[
43699,
43702
],
[
43934,
43937
],
[
44132,
44135
],
[
44317,
44320
],
[
45312,
45315
],
[
45522,
45525
],
[
45858,
45861
],
[
46041,
46044
],
[
46435,
46438
],
[
46559,
46562
],
[
46820,
46823
],
[
47202,
47205
],
[
47549,
47552
],
[
47718,
47721
],
[
47856,
47859
],
[
48130,
48133
],
[
48289,
48292
],
[
48438,
48441
],
[
48597,
48600
],
[
48748,
48751
],
[
48995,
48998
],
[
49171,
49174
],
[
49299,
49302
],
[
49611,
49614
],
[
49658,
49661
],
[
50201,
50204
],
[
51042,
51045
],
[
51301,
51304
],
[
51692,
51695
],
[
52118,
52121
],
[
52300,
52303
]
],
[
[
588,
598
],
[
12609,
12619
],
[
25795,
25805
],
[
44266,
44276
],
[
46765,
46775
],
[
47147,
47157
]
],
[
[
600,
611
],
[
26476,
26487
]
],
[
[
642,
654
],
[
26718,
26730
],
[
26753,
26765
]
],
[
[
686,
703
],
[
41005,
41022
]
],
[
[
735,
744
],
[
13995,
14004
],
[
41829,
41838
]
],
[
[
746,
752
],
[
16434,
16440
],
[
16865,
16871
],
[
31530,
31536
],
[
45585,
45591
],
[
28540,
28546
]
],
[
[
754,
768
],
[
10114,
10128
],
[
19110,
19124
],
[
19285,
19299
],
[
19411,
19425
],
[
19477,
19491
],
[
19655,
19669
],
[
19781,
19795
],
[
19847,
19861
],
[
20021,
20035
],
[
20147,
20161
],
[
20213,
20227
],
[
20359,
20373
],
[
20550,
20564
],
[
20853,
20867
],
[
20937,
20951
],
[
21066,
21080
]
],
[
[
770,
802
],
[
20428,
20460
]
],
[
[
835,
851
],
[
4986,
5002
],
[
5363,
5379
],
[
5676,
5692
],
[
5878,
5894
],
[
15904,
15920
],
[
15989,
16005
],
[
16042,
16058
],
[
16092,
16108
],
[
26041,
26057
],
[
26256,
26272
],
[
27355,
27371
],
[
27427,
27443
],
[
28285,
28301
],
[
31901,
31917
],
[
31968,
31984
],
[
32450,
32466
],
[
46116,
46132
],
[
51355,
51371
]
],
[
[
853,
869
],
[
2149,
2165
]
],
[
[
871,
896
],
[
44192,
44217
]
],
[
[
940,
955
],
[
18584,
18599
]
],
[
[
991,
1019
],
[
49249,
49277
],
[
52079,
52107
]
],
[
[
1040,
1052
],
[
2381,
2393
]
],
[
[
1054,
1066
],
[
1229,
1241
],
[
2340,
2352
],
[
2394,
2406
],
[
2464,
2476
],
[
4333,
4345
],
[
5772,
5784
],
[
6121,
6133
],
[
6359,
6371
],
[
6926,
6938
],
[
7123,
7135
],
[
7439,
7451
],
[
7917,
7929
],
[
8385,
8397
],
[
8648,
8660
],
[
10305,
10317
],
[
11325,
11337
],
[
11776,
11788
],
[
14910,
14922
],
[
15539,
15551
],
[
15743,
15755
],
[
16250,
16262
],
[
16603,
16615
],
[
16995,
17007
],
[
25605,
25617
],
[
25742,
25754
],
[
25959,
25971
],
[
27117,
27129
],
[
27285,
27297
],
[
27723,
27735
],
[
28173,
28185
],
[
28845,
28857
],
[
29620,
29632
],
[
30407,
30419
],
[
31240,
31252
],
[
31808,
31820
],
[
32349,
32361
],
[
34247,
34259
],
[
34876,
34888
],
[
35954,
35966
],
[
37321,
37333
],
[
38519,
38531
],
[
38600,
38612
],
[
40735,
40747
],
[
41196,
41208
],
[
41738,
41750
],
[
42308,
42320
],
[
42493,
42505
],
[
42883,
42895
],
[
43161,
43173
],
[
43661,
43673
],
[
43893,
43905
],
[
45234,
45246
],
[
45434,
45446
],
[
45690,
45702
],
[
45992,
46004
],
[
46396,
46408
],
[
48031,
48043
],
[
48407,
48419
],
[
48561,
48573
],
[
48716,
48728
],
[
49131,
49143
],
[
49563,
49575
],
[
50036,
50048
],
[
50997,
51009
],
[
51715,
51727
],
[
52000,
52012
]
],
[
[
1068,
1076
],
[
12082,
12090
],
[
12405,
12413
],
[
12690,
12698
],
[
13187,
13195
],
[
16718,
16726
],
[
44095,
44103
],
[
46704,
46712
],
[
47083,
47091
]
],
[
[
1078,
1090
],
[
16014,
16026
],
[
16117,
16129
]
],
[
[
1092,
1109
],
[
12030,
12047
]
],
[
[
1199,
1213
],
[
4106,
4120
],
[
13423,
13437
],
[
13476,
13490
]
],
[
[
1267,
1288
],
[
1622,
1643
]
],
[
[
1653,
1671
],
[
3188,
3206
]
],
[
[
2021,
2034
],
[
7847,
7860
],
[
8340,
8353
]
],
[
[
2423,
2429
]
],
[
[
2591,
2609
]
],
[
[
2943,
2960
]
],
[
[
4005,
4036
]
],
[
[
4252,
4284
]
],
[
[
4744,
4777
]
],
[
[
5424,
5456
]
],
[
[
5939,
5955
]
],
[
[
6260,
6293
]
],
[
[
6850,
6895
]
],
[
[
7054,
7092
]
],
[
[
7349,
7377
]
],
[
[
7511,
7544
]
],
[
[
8114,
8150
]
],
[
[
8541,
8587
]
],
[
[
8910,
8926
],
[
9870,
9886
],
[
18669,
18685
]
],
[
[
9724,
9756
]
],
[
[
10377,
10386
],
[
10488,
10497
],
[
11542,
11551
]
],
[
[
10461,
10470
],
[
11542,
11551
]
],
[
[
10503,
10512
],
[
11597,
11606
]
],
[
[
10541,
10550
],
[
10804,
10813
]
],
[
[
10587,
10596
],
[
10804,
10813
]
],
[
[
10606,
10615
],
[
11597,
11606
]
],
[
[
10819,
10843
]
],
[
[
11612,
11632
]
],
[
[
11935,
11952
]
],
[
[
12270,
12296
]
],
[
[
12517,
12536
]
],
[
[
13063,
13078
]
],
[
[
13320,
13338
]
],
[
[
13641,
13663
]
],
[
[
14714,
14742
]
],
[
[
15019,
15053
]
],
[
[
15633,
15672
]
],
[
[
16147,
16180
]
],
[
[
16515,
16558
]
],
[
[
16765,
16791
]
],
[
[
16922,
16946
]
],
[
[
17390,
17427
]
],
[
[
24157,
24168
]
],
[
[
24870,
24886
]
],
[
[
25544,
25560
]
],
[
[
25659,
25681
]
],
[
[
25883,
25904
]
],
[
[
26148,
26171
]
],
[
[
27044,
27073
]
],
[
[
27211,
27231
]
],
[
[
27644,
27669
]
],
[
[
28111,
28129
]
],
[
[
28769,
28790
]
],
[
[
29529,
29565
]
],
[
[
30322,
30363
]
],
[
[
31176,
31196
]
],
[
[
31724,
31751
]
],
[
[
32036,
32065
]
],
[
[
32274,
32294
]
],
[
[
33428,
33456
]
],
[
[
34154,
34181
]
],
[
[
34792,
34825
]
],
[
[
35874,
35902
]
],
[
[
37112,
37138
]
],
[
[
38423,
38439
]
],
[
[
40521,
40544
]
],
[
[
41127,
41152
]
],
[
[
41638,
41673
]
],
[
[
42230,
42264
]
],
[
[
42671,
42694
]
],
[
[
43001,
43022
]
],
[
[
43418,
43440
]
],
[
[
43595,
43617
]
],
[
[
43746,
43788
]
],
[
[
44001,
44034
]
],
[
[
45162,
45190
]
],
[
[
45359,
45390
]
],
[
[
45629,
45646
]
],
[
[
45905,
45948
]
],
[
[
46337,
46352
]
],
[
[
46621,
46643
]
],
[
[
46997,
47022
]
],
[
[
47375,
47411
]
],
[
[
47962,
47987
]
],
[
[
48350,
48363
]
],
[
[
48499,
48517
]
],
[
[
48658,
48672
]
],
[
[
48809,
48839
]
],
[
[
49041,
49070
]
],
[
[
49495,
49518
]
],
[
[
49965,
49992
]
],
[
[
50824,
50840
]
],
[
[
51599,
51628
]
],
[
[
51773,
51840
]
]
] |
"""Tests for the backend"""
from .tools import logprint, AppTestCase, load_file_to_dict, load_json
class GetConstructsAsGenbankTests(AppTestCase):
endpoint = 'get_constructs_as_genbanks'
defaults = dict(
database_token='',
constructsData={}
)
def test_emma_2_constructs_with_one_combinatorial(self):
json = load_json('emma_2_constructs_with_one_combinatorial.json')
response = self.run_job(json_request=json)
self.assertTrue('zip_file' in response)
class GetConstructsAsPDFTests(AppTestCase):
endpoint = 'get_constructs_as_pdf'
defaults = dict(constructsData={})
def test_emma_no_annotation_to_pdf(self):
json = load_json('emma_no_annotation_to_pdf.json')
response = self.run_job(json_request=json)
self.assertTrue('pdf_file' in response)
class SendOrderToEGFTests(AppTestCase):
endpoint = 'send_order_to_egf'
defaults = dict(constructsData={}, customer={})
def test_send_order_to_egf(self):
json = load_json('emma_send_order_to_egf.json')
response = self.run_job(json_request=json)
assert 'message' in response
self.assertTrue('order was sent' in response['message'])
| [
[
[
48,
56
]
],
[
[
58,
69
],
[
135,
146
],
[
540,
551
],
[
864,
875
]
],
[
[
71,
88
]
],
[
[
90,
99
],
[
350,
359
],
[
694,
703
],
[
1019,
1028
]
],
[
[
107,
134
]
],
[
[
516,
539
]
],
[
[
844,
863
]
]
] |
################################################################################
# Module: schedule.py
# Description: Functions for handling conversion of EnergyPlus schedule objects
# License: MIT, see full license in LICENSE.txt
# Web: https://github.com/samuelduchesne/archetypal
################################################################################
import functools
import io
import logging as lg
from datetime import datetime, timedelta
import archetypal
import numpy as np
import pandas as pd
from archetypal import log
class Schedule(object):
"""An object designed to handle any EnergyPlys schedule object"""
def __init__(self, sch_name, idf=None, start_day_of_the_week=0,
strict=False, base_year=2018, schType=None, **kwargs):
"""
Args:
idf (IDF): IDF object
sch_name (str): The schedule name in the idf file
start_day_of_the_week (int): 0-based day of week (Monday=0)
strict (bool): if True, schedules that have the Field-Sets such
as Holidays and CustomDay will raise an error if they are absent
from the IDF file. If False, any missing qualifiers will be
ignored.
base_year (int): The base year of the schedule. Defaults to 2018
since the first day of that year is a Monday.
"""
super(Schedule, self).__init__(**kwargs)
self.strict = strict
self.idf = idf
self.schName = sch_name
self.startDayOfTheWeek = self.get_sdow(start_day_of_the_week)
self.year = base_year
self.startDate = self.start_date()
self.count = 0
self.startHOY = 1
self.endHOY = 24
self.unit = "unknown"
self.index_ = None
self.values = None
self.schType = schType
_type = kwargs.get('Type', None)
if _type is None:
self.schTypeLimitsName = self.get_schedule_type_limits_name(
sch_type=self.schType)
else:
self.schTypeLimitsName = _type
@classmethod
def constant_schedule(cls, hourly_value=1, Name='AlwaysOn', **kwargs):
idftxt = "VERSION, 8.9;" # Not an emplty string. has just the
# version number
# we can make a file handle of a string
fhandle = io.StringIO(idftxt)
# initialize the IDF object with the file handle
idf_scratch = archetypal.IDF(fhandle)
idf_scratch.add_object(ep_object='Schedule:Constant'.upper(),
**dict(Name=Name,
Schedule_Type_Limits_Name='',
Hourly_Value=hourly_value),
save=False)
sched = Schedule(sch_name=Name, idf=idf_scratch, **kwargs)
return sched
@property
def all_values(self):
"""returns the values array"""
if self.values is None:
self.values = self.get_schedule_values(sch_name=self.schName,
sch_type=self.schType)
return self.values
else:
return self.values
@property
def max(self):
return max(self.all_values)
@property
def min(self):
return min(self.all_values)
@property
def mean(self):
return np.mean(self.all_values)
@property
def series(self):
"""Returns the schedule values as a pd.Series object with a
DateTimeIndex"""
index = pd.date_range(start=self.startDate, periods=len(
self.all_values), freq='1H')
return pd.Series(self.all_values, index=index)
def get_schedule_type_limits_name(self, sch_name=None, sch_type=None):
"""Return the Schedule Type Limits name associated to a schedule
name"""
if sch_name is None:
sch_name = self.schName
if sch_type is None:
schedule_values = self.idf.get_schedule_data_by_name(sch_name,
sch_type=sch_type)
try:
schedule_limit_name = schedule_values.Schedule_Type_Limits_Name
except:
return 'unknown'
else:
return schedule_limit_name
def get_schedule_type_limits_data(self, sch_name=None):
"""Returns Schedule Type Limits data from schedule name"""
if sch_name is None:
sch_name = self.schName
schedule_values = self.idf.get_schedule_data_by_name(sch_name)
try:
schedule_limit_name = schedule_values.Schedule_Type_Limits_Name
except:
# this schedule is probably a 'Schedule:Week:Daily' which does
# not have a Schedule_Type_Limits_Name field
return '', '', '', ''
else:
lower_limit, upper_limit, numeric_type, unit_type = \
self.idf.get_schedule_type_limits_data_by_name(
schedule_limit_name)
self.unit = unit_type
if self.unit == "unknown":
self.unit = numeric_type
return lower_limit, upper_limit, numeric_type, unit_type
def get_schedule_type(self, sch_name=None):
"""Return the schedule type"""
if sch_name is None:
sch_name = self.schName
schedule_values = self.idf.get_schedule_data_by_name(sch_name)
sch_type = schedule_values.fieldvalues[0]
return sch_type
def start_date(self):
"""The start date of the schedule. Satisfies `startDayOfTheWeek`"""
import calendar
c = calendar.Calendar(firstweekday=self.startDayOfTheWeek)
start_date = c.monthdatescalendar(self.year, 1)[0][0]
return datetime(start_date.year, start_date.month, start_date.day)
def plot(self, slice=None, **kwargs):
hourlyvalues = self.all_values
index = pd.date_range(self.startDate, periods=len(
hourlyvalues),
freq='1H')
series = pd.Series(hourlyvalues, index=index, dtype=float)
if slice is None:
slice = pd.IndexSlice[:]
elif len(slice) > 1:
slice = pd.IndexSlice[slice[0]:slice[1]]
ax = series.loc[slice].plot(**kwargs, label=self.schName)
return ax
def get_interval_day_ep_schedule_values(self, sch_name=None):
"""'Schedule:Day:Interval"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('Schedule:Day:Interval'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
number_of_day_sch = int((len(values.fieldvalues) - 3) / 2)
hourly_values = np.arange(24)
start_hour = 0
for i in range(number_of_day_sch):
value = float(values['Value_Until_Time_{}'.format(i + 1)])
until_time = [int(s.strip()) for s in
values['Time_{}'.format(i + 1)].split(":") if
s.strip().isdigit()]
end_hour = int(until_time[0] + until_time[1] / 60)
for hour in range(start_hour, end_hour):
hourly_values[hour] = value
start_hour = end_hour
if numeric_type.strip().lower() == "discrete":
hourly_values = hourly_values.astype(int)
return hourly_values
def get_hourly_day_ep_schedule_values(self, sch_name=None):
"""'Schedule:Day:Hourly'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('Schedule:Day:Hourly'.upper(), sch_name)
fieldvalues_ = np.array(values.fieldvalues[3:])
return fieldvalues_
def get_compact_weekly_ep_schedule_values(self, sch_name=None,
start_date=None, index=None):
"""'schedule:week:compact'"""
if start_date is None:
start_date = self.startDate
if index is None:
idx = pd.date_range(start=start_date, periods=168, freq='1H')
slicer_ = pd.Series([False] * (len(idx)), index=idx)
else:
slicer_ = pd.Series([False] * (len(index)), index=index)
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:week:compact'.upper(), sch_name)
weekly_schedules = pd.Series([0] * len(slicer_), index=slicer_.index)
# update last day of schedule
if self.count == 0:
self.schType = values.key
self.endHOY = 168
num_of_daily_schedules = int(len(values.fieldvalues[2:]) / 2)
for i in range(num_of_daily_schedules):
day_type = values['DayType_List_{}'.format(i + 1)].lower()
how = self.field_set(day_type, slicer_)
if not weekly_schedules.loc[how].empty:
# Loop through days and replace with day:schedule values
days = []
for name, day in weekly_schedules.loc[how].groupby(pd.Grouper(
freq='D')):
if not day.empty:
ref = values.get_referenced_object(
"ScheduleDay_Name_{}".format(i + 1))
day.loc[:] = self.get_schedule_values(
sch_name=ref.Name, sch_type=ref.key)
days.append(day)
new = pd.concat(days)
slicer_.update(
pd.Series([True] * len(new.index), index=new.index))
slicer_ = slicer_.apply(lambda x: x == True)
weekly_schedules.update(new)
else:
return weekly_schedules.values
return weekly_schedules.values
def get_daily_weekly_ep_schedule_values(self, sch_name=None):
"""'schedule:week:daily'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:week:daily'.upper(), sch_name)
# 7 list for 7 days of the week
hourly_values = []
for day in ['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday']:
ref = values.get_referenced_object(
'{}_ScheduleDay_Name'.format(day))
h = self.get_schedule_values(sch_name=ref.Name, sch_type=ref.key)
hourly_values.append(h)
hourly_values = np.array(hourly_values)
# shift days earlier by self.startDayOfTheWeek
hourly_values = np.roll(hourly_values, -self.startDayOfTheWeek, axis=0)
return hourly_values.ravel()
def get_list_day_ep_schedule_values(self, sch_name=None):
"""'schedule:day:list'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:day:list'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
import pandas as pd
freq = int(values['Minutes_per_Item']) # Frequency of the values
num_values = values.fieldvalues[5:] # List of values
method = values['Interpolate_to_Timestep'] # How to resample
# fill a list of available values and pad with zeros (this is safer
# but should not occur)
all_values = np.arange(int(24 * 60 / freq))
for i in all_values:
try:
all_values[i] = num_values[i]
except:
all_values[i] = 0
# create a fake index to help us with the resampling
index = pd.date_range(start=self.startDate,
periods=(24 * 60) / freq,
freq='{}T'.format(freq))
series = pd.Series(all_values, index=index)
# resample series to hourly values and apply resampler function
series = series.resample('1H').apply(_how(method))
return series.values
def get_constant_ep_schedule_values(self, sch_name=None):
"""'schedule:constant'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:constant'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
hourly_values = np.arange(8760)
value = float(values['Hourly_Value'])
for hour in hourly_values:
hourly_values[hour] = value
if numeric_type.strip().lower() == 'discrete':
hourly_values = hourly_values.astype(int)
return hourly_values
def get_file_ep_schedule_values(self, sch_name=None):
"""'schedule:file'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:file'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
filename = values['File_Name']
column = values['Column_Number']
rows = values['Rows_to_Skip_at_Top']
hours = values['Number_of_Hours_of_Data']
sep = values['Column_Separator']
interp = values['Interpolate_to_Timestep']
import pandas as pd
import os
idfdir = os.path.dirname(self.idf.idfname)
file = os.path.join(idfdir, filename)
delimeter = _separator(sep)
skip_rows = int(rows) - 1 # We want to keep the column
col = [int(column) - 1] # zero-based
values = pd.read_csv(file, delimiter=delimeter, skiprows=skip_rows,
usecols=col)
return values.iloc[:, 0].values
def get_compact_ep_schedule_values(self, sch_name=None):
"""'schedule:compact'"""
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:compact'.upper(), sch_name)
lower_limit, upper_limit, numeric_type, unit_type = \
self.get_schedule_type_limits_data(sch_name)
field_sets = ['through', 'for', 'interpolate', 'until', 'value']
fields = values.fieldvalues[3:]
index = pd.date_range(start=self.startDate, periods=8760, freq='H')
zeros = np.zeros(len(index))
slicer_ = pd.Series([False] * len(index), index=index)
series = pd.Series(zeros, index=index)
from_day = self.startDate
ep_from_day = datetime(self.year, 1, 1)
from_time = '00:00'
how_interpolate = None
for field in fields:
if any([spe in field.lower() for spe in field_sets]):
f_set, hour, minute, value = self.field_interpreter(field)
if f_set.lower() == 'through':
# main condition. All sub-conditions must obey a
# `Through` condition
# First, initialize the slice (all False for now)
through_conditions = self.invalidate_condition(series)
# reset from_time
from_time = '00:00'
# Prepare ep_to_day variable
ep_to_day = self.date_field_interpretation(value) + \
timedelta(days=1)
# Calculate Timedelta in days
days = (ep_to_day - ep_from_day).days
# Add timedelta to start_date
to_day = from_day + timedelta(days=days) + timedelta(
hours=-1)
# slice the conditions with the range and apply True
through_conditions.loc[from_day:to_day] = True
from_day = to_day + timedelta(hours=1)
ep_from_day = ep_to_day
elif f_set.lower() == 'for':
# slice specific days
# reset from_time
from_time = '00:00'
for_condition = self.invalidate_condition(series)
values = value.split()
if len(values) > 1:
# if multiple `For`. eg.: For: Weekends Holidays,
# Combine both conditions
for value in values:
if value.lower() == 'allotherdays':
# Apply condition to slice
how = self.field_set(value, slicer_)
# Reset though condition
through_conditions = how
for_condition = how
else:
how = self.field_set(value, slicer_)
for_condition.loc[how] = True
elif value.lower() == 'allotherdays':
# Apply condition to slice
how = self.field_set(value, slicer_)
# Reset though condition
through_conditions = how
for_condition = how
else:
# Apply condition to slice
how = self.field_set(value)
for_condition.loc[how] = True
# Combine the for_condition with all_conditions
all_conditions = through_conditions & for_condition
# update in memory slice
# self.sliced_day_.loc[all_conditions] = True
elif 'interpolate' in f_set.lower():
# we need to upsample to series to 8760 * 60 values
new_idx = pd.date_range(start=self.startDate,
periods=525600, closed='left',
freq='T')
series = series.resample('T').pad()
series = series.reindex(new_idx)
series.fillna(method='pad', inplace=True)
through_conditions = through_conditions.resample('T').pad()
through_conditions = through_conditions.reindex(new_idx)
through_conditions.fillna(method='pad', inplace=True)
for_condition = for_condition.resample('T').pad()
for_condition = for_condition.reindex(new_idx)
for_condition.fillna(method='pad', inplace=True)
how_interpolate = value.lower()
elif f_set.lower() == 'until':
until_condition = self.invalidate_condition(series)
if series.index.freq.name == 'T':
# until_time = str(int(hour) - 1) + ':' + minute
until_time = timedelta(hours=int(hour),
minutes=int(minute)) - timedelta(
minutes=1)
else:
until_time = str(int(hour) - 1) + ':' + minute
until_condition.loc[until_condition.between_time(from_time,
str(
until_time)).index] = True
all_conditions = for_condition & through_conditions & \
until_condition
from_time = str(int(hour)) + ':' + minute
elif f_set.lower() == 'value':
# If the therm `Value: ` field is used, we will catch it
# here.
# update in memory slice
slicer_.loc[all_conditions] = True
series[all_conditions] = value
else:
# Do something here before looping to the next Field
pass
else:
# If the term `Value: ` is not used; the variable is simply
# passed in the Field
value = float(field)
series[all_conditions] = value
# update in memory slice
slicer_.loc[all_conditions] = True
if how_interpolate:
return series.resample('H').mean().values
else:
return series.values
def field_interpreter(self, field):
"""dealing with a Field-Set (Through, For, Interpolate,
# Until, Value) and return the parsed string"""
if 'through' in field.lower():
# deal with through
if ':' in field.lower():
# parse colon
f_set, statement = field.split(':')
hour = None
minute = None
value = statement.strip()
else:
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
elif 'for' in field.lower():
if ':' in field.lower():
# parse colon
f_set, statement = field.split(':')
value = statement.strip()
hour = None
minute = None
else:
# parse without a colon
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
elif 'interpolate' in field.lower():
msg = 'The schedule "{sch}" contains sub-hourly values (' \
'Field-Set="{field}"). The average over the hour is ' \
'taken'.format(sch=self.schName, field=field)
log(msg, lg.WARNING)
f_set, value = field.split(':')
hour = None
minute = None
elif 'until' in field.lower():
if ':' in field.lower():
# parse colon
try:
f_set, hour, minute = field.split(':')
hour = hour.strip() # remove trailing spaces
minute = minute.strip() # remove trailing spaces
value = None
except:
f_set = 'until'
hour, minute = field.split(':')
hour = hour[-2:].strip()
minute = minute.strip()
value = None
else:
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
elif 'value' in field.lower():
if ':' in field.lower():
# parse colon
f_set, statement = field.split(':')
value = statement.strip()
hour = None
minute = None
else:
msg = 'The schedule "{sch}" contains a Field ' \
'that is not understood: "{field}"'.format(
sch=self.schName, field=field)
raise NotImplementedError(msg)
else:
# deal with the data value
f_set = field
hour = None
minute = None
value = field[len(field) + 1:].strip()
return f_set, hour, minute, value
@staticmethod
def invalidate_condition(series):
index = series.index
periods = len(series)
return pd.Series([False] * periods, index=index)
def get_yearly_ep_schedule_values(self, sch_name=None):
"""'schedule:year'"""
# first week
start_date = self.startDate
idx = pd.date_range(start=start_date, periods=8760, freq='1H')
hourly_values = pd.Series([0] * 8760, index=idx)
# update last day of schedule
self.endHOY = 8760
if sch_name is None:
sch_name = self.schName
values = self.idf.getobject('schedule:year'.upper(), sch_name)
# generate weekly schedules
num_of_weekly_schedules = int(len(values.fieldvalues[3:]) / 5)
for i in range(num_of_weekly_schedules):
ref = values.get_referenced_object(
'ScheduleWeek_Name_{}'.format(i + 1))
start_month = values['Start_Month_{}'.format(i + 1)]
end_month = values['End_Month_{}'.format(i + 1)]
start_day = values['Start_Day_{}'.format(i + 1)]
end_day = values['End_Day_{}'.format(i + 1)]
start = datetime.strptime(
'{}/{}/{}'.format(self.year, start_month, start_day),
'%Y/%m/%d')
end = datetime.strptime(
'{}/{}/{}'.format(self.year, end_month, end_day),
'%Y/%m/%d')
days = (end - start).days + 1
end_date = start_date + timedelta(days=days) + timedelta(hours=23)
how = pd.IndexSlice[start_date:end_date]
weeks = []
for name, week in hourly_values.loc[how].groupby(
pd.Grouper(freq='168H')):
if not week.empty:
try:
week.loc[:] = self.get_schedule_values(
sch_name=ref.Name, start_date=week.index[0],
index=week.index, sch_type=ref.key)
except ValueError:
week.loc[:] = self.get_schedule_values(
ref.Name, week.index[0])[0:len(week)]
finally:
weeks.append(week)
new = pd.concat(weeks)
hourly_values.update(new)
start_date += timedelta(days=days)
return hourly_values.values
def get_schedule_values(self, sch_name=None, start_date=None, index=None,
sch_type=None):
"""Main function that returns the schedule values
Args:
sch_type:
index:
start_date:
"""
if sch_name is None:
sch_name = self.schName
if sch_type is None:
schedule_values = self.idf.get_schedule_data_by_name(sch_name)
self.schType = schedule_values.key.upper()
sch_type = self.schType
if self.count == 0:
# This is the first time, get the schedule type and the type limits.
self.schTypeLimitsName = self.get_schedule_type_limits_name()
self.count += 1
if sch_type.upper() == "schedule:year".upper():
hourly_values = self.get_yearly_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:day:interval".upper():
hourly_values = self.get_interval_day_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:day:hourly".upper():
hourly_values = self.get_hourly_day_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:day:list".upper():
hourly_values = self.get_list_day_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:week:compact".upper():
hourly_values = self.get_compact_weekly_ep_schedule_values(
sch_name, start_date, index)
elif sch_type.upper() == "schedule:week:daily".upper():
hourly_values = self.get_daily_weekly_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:constant".upper():
hourly_values = self.get_constant_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:compact".upper():
hourly_values = self.get_compact_ep_schedule_values(
sch_name)
elif sch_type.upper() == "schedule:file".upper():
hourly_values = self.get_file_ep_schedule_values(
sch_name)
else:
log('Archetypal does not support "{}" currently'.format(
self.schType), lg.WARNING)
hourly_values = []
return hourly_values
def is_schedule(self, sch_name):
"""Returns True if idfobject is one of 'schedule_types'"""
if sch_name.upper() in self.idf.schedules_dict:
return True
else:
return False
def to_year_week_day(self):
"""convert a Schedule Class to the 'Schedule:Year',
'Schedule:Week:Daily' and 'Schedule:Day:Hourly' representation
Returns:
'Schedule:Year', list of ['Schedule:Week:Daily'],
list of ['Schedule:Day:Hourly']
"""
full_year = np.array(self.all_values) # array of shape (8760,)
values = full_year.reshape(-1, 24) # shape (365, 24)
# create unique days
unique_days, nds = np.unique(values, axis=0, return_inverse=True)
ep_days = []
dict_day = {}
count_day = 0
for unique_day in unique_days:
name = 'd_' + self.schName + '_' + '%03d' % count_day
name, count_day = archetypal.check_unique_name('d', count_day,
name,
archetypal.settings.unique_schedules,
suffix=True)
dict_day[name] = unique_day
archetypal.settings.unique_schedules.append(name)
# Create idf_objects for schedule:day:hourly
ep_day = self.idf.add_object(
ep_object='Schedule:Day:Hourly'.upper(),
save=False,
**dict(Name=name,
Schedule_Type_Limits_Name=self.schType,
**{'Hour_{}'.format(i + 1): unique_day[i]
for i in range(24)})
)
ep_days.append(ep_day)
# create unique weeks from unique days
unique_weeks, nwsi, nws, count = np.unique(
full_year[:364 * 24, ...].reshape(-1, 168), return_index=True,
axis=0, return_inverse=True, return_counts=True)
# Appending unique weeks in dictionary with name and values of weeks as
# keys
# {'name_week': {'dayName':[]}}
dict_week = {}
count_week = 0
for unique_week in unique_weeks:
week_id = 'w_' + self.schName + '_' + '%03d' % count_week
week_id, count_week = archetypal.check_unique_name('w',
count_week,
week_id,
archetypal.settings.unique_schedules,
suffix=True)
archetypal.settings.unique_schedules.append(week_id)
dict_week[week_id] = {}
for i in list(range(0, 7)):
day_of_week = unique_week[..., i * 24:(i + 1) * 24]
for key in dict_day:
if (day_of_week == dict_day[key]).all():
dict_week[week_id]['day_{}'.format(i)] = key
# Create idf_objects for schedule:week:daily
list_day_of_week = ['Sunday', 'Monday', 'Tuesday',
'Wednesday', 'Thursday', 'Friday', 'Saturday']
ordered_day_n = np.array([6, 0, 1, 2, 3, 4, 5])
ordered_day_n = np.roll(ordered_day_n, self.startDayOfTheWeek)
ep_weeks = []
for week_id in dict_week:
ep_week = self.idf.add_object(
ep_object='Schedule:Week:Daily'.upper(),
save=False,
**dict(Name=week_id,
**{'{}_ScheduleDay_Name'.format(
weekday): dict_week[week_id][
'day_{}'.format(i)] for
i, weekday in
zip(ordered_day_n, list_day_of_week)
},
Holiday_ScheduleDay_Name=
dict_week[week_id]['day_6'],
SummerDesignDay_ScheduleDay_Name=
dict_week[week_id]['day_1'],
WinterDesignDay_ScheduleDay_Name=
dict_week[week_id]['day_1'],
CustomDay1_ScheduleDay_Name=
dict_week[week_id]['day_2'],
CustomDay2_ScheduleDay_Name=
dict_week[week_id]['day_5'])
)
ep_weeks.append(ep_week)
import itertools
blocks = {}
from_date = datetime(self.year, 1, 1)
bincount = [sum(1 for _ in group)
for key, group in itertools.groupby(nws + 1) if key]
week_order = {i: v for i, v in enumerate(np.array(
[key for key, group in itertools.groupby(nws + 1) if key]) - 1)}
for i, (week_n, count) in enumerate(
zip(week_order, bincount)):
week_id = list(dict_week)[week_order[i]]
to_date = from_date + timedelta(days=int(count * 7), hours=-1)
blocks[i] = {}
blocks[i]['week_id'] = week_id
blocks[i]['from_day'] = from_date.day
blocks[i]['end_day'] = to_date.day
blocks[i]['from_month'] = from_date.month
blocks[i]['end_month'] = to_date.month
from_date = to_date + timedelta(hours=1)
# If this is the last block, force end of year
if i == len(bincount) - 1:
blocks[i]['end_day'] = 31
blocks[i]['end_month'] = 12
new_dict = dict(Name=self.schName + '_',
Schedule_Type_Limits_Name=self.schTypeLimitsName)
for i in blocks:
new_dict.update({"ScheduleWeek_Name_{}".format(i + 1):
blocks[i]['week_id'],
"Start_Month_{}".format(i + 1):
blocks[i]['from_month'],
"Start_Day_{}".format(i + 1):
blocks[i]['from_day'],
"End_Month_{}".format(i + 1):
blocks[i]['end_month'],
"End_Day_{}".format(i + 1):
blocks[i]['end_day']})
ep_year = self.idf.add_object(ep_object='Schedule:Year'.upper(),
save=False, **new_dict)
return ep_year, ep_weeks, ep_days
def date_field_interpretation(self, field):
"""Date Field Interpretation
Args:
field (str): The EnergyPlus Field Contents
Returns:
(datetime): The datetime object
Info:
See EnergyPlus documentation for more details:
1.6.8.1.2 Field: Start Date (Table 1.4: Date Field Interpretation)
"""
# < number > Weekday in Month
formats = ['%m/%d', '%d %B', '%B %d', '%d %b', '%b %d']
date = None
for format_str in formats:
# Tru to parse using each defined formats
try:
date = datetime.strptime(field, format_str)
except:
pass
else:
date = datetime(self.year, date.month, date.day)
if date is None:
# if the defined formats did not work, try the fancy parse
try:
date = self.parse_fancy_string(field)
except:
msg = "the schedule '{sch}' contains a " \
"Field that is not understood: '{field}'".format(
sch=self.schName,
field=field)
raise ValueError(msg)
else:
return date
else:
return date
def parse_fancy_string(self, field):
"""Will try to parse cases such as `3rd Monday in February` or `Last
Weekday In Month`
Args:
field (str): The EnergyPlus Field Contents
Returns:
(datetime): The datetime object
"""
import re
# split the string at the term ' in '
time, month = field.lower().split(' in ')
month = datetime.strptime(month, '%B').month
# split the first part into nth and dayofweek
nth, dayofweek = time.split(' ')
if 'last' in nth:
nth = -1 # Use the last one
else:
nth = re.findall(r'\d+', nth) # use the nth one
nth = int(nth[0]) - 1 # python is zero-based
weekday = {'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3,
'friday': 4, 'saturday': 5, 'sunday': 6}
# parse the dayofweek eg. monday
dayofweek = weekday.get(dayofweek, 6)
# create list of possible days using Calendar
import calendar
c = calendar.Calendar(firstweekday=self.startDayOfTheWeek)
monthcal = c.monthdatescalendar(self.year, month)
# iterate though the month and get the nth weekday
date = [day for week in monthcal for day in week if \
day.weekday() == dayofweek and \
day.month == month][nth]
return datetime(date.year, date.month, date.day)
def field_set(self, field, slicer_=None):
"""helper function to return the proper slicer depending on the
field_set value.
Available values are:
Weekdays, Weekends, Holidays, Alldays, SummerDesignDay,
WinterDesignDay, Sunday, Monday, Tuesday, Wednesday, Thursday,
Friday, Saturday, CustomDay1, CustomDay2, AllOtherDays
Args:
field (str): The EnergyPlus field set value.
slicer_ (pd.Series): The persistent slicer for this schedule
Returns:
(indexer-like): Returns the appropriate indexer for the series.
"""
if field.lower() == 'weekdays':
# return only days of weeks
return lambda x: x.index.dayofweek < 5
elif field.lower() == 'weekends':
# return only weekends
return lambda x: x.index.dayofweek >= 5
elif field.lower() == 'alldays':
log('For schedule "{}", the field-set "AllDays" may be overridden '
'by the "AllOtherDays" field-set'.format(
self.schName), lg.WARNING)
# return all days := equivalenet to .loc[:]
return pd.IndexSlice[:]
elif field.lower() == 'allotherdays':
# return unused days (including special days). Uses the global
# variable `slicer_`
import operator
if slicer_ is not None:
return _conjunction(*[self.special_day(field, slicer_),
~slicer_], logical=operator.or_)
else:
raise NotImplementedError
elif field.lower() == 'sunday':
# return only sundays
return lambda x: x.index.dayofweek == 6
elif field.lower() == 'monday':
# return only mondays
return lambda x: x.index.dayofweek == 0
elif field.lower() == 'tuesday':
# return only Tuesdays
return lambda x: x.index.dayofweek == 1
elif field.lower() == 'wednesday':
# return only Wednesdays
return lambda x: x.index.dayofweek == 2
elif field.lower() == 'thursday':
# return only Thursdays
return lambda x: x.index.dayofweek == 3
elif field.lower() == 'friday':
# return only Fridays
return lambda x: x.index.dayofweek == 4
elif field.lower() == 'saturday':
# return only Saturdays
return lambda x: x.index.dayofweek == 5
elif field.lower() == 'summerdesignday':
# return design_day(self, field)
return None
elif field.lower() == 'winterdesignday':
# return design_day(self, field)
return None
elif field.lower() == 'holiday' or field.lower() == 'holidays':
field = 'holiday'
return self.special_day(field, slicer_)
elif not self.strict:
# If not strict, ignore missing field-sets such as CustomDay1
return pd.IndexSlice[:]
else:
raise NotImplementedError(
'Archetypal does not yet support The '
'Field_set "{}"'.format(field))
def __len__(self):
"""returns the length of all values of the schedule"""
return len(self.all_values)
def __eq__(self, other):
"""Overrides the default implementation"""
if isinstance(other, Schedule):
return self.all_values == other.all_values
else:
raise NotImplementedError
def __ne__(self, other):
return ~(self.__eq__(other))
def __add__(self, other):
if isinstance(other, Schedule):
return self.all_values + other.all_values
elif isinstance(other, list):
return self.all_values + other
else:
raise NotImplementedError
def __sub__(self, other):
if isinstance(other, Schedule):
return self.all_values - other.all_values
elif isinstance(other, list):
return self.all_values - other
else:
raise NotImplementedError
def __mul__(self, other):
if isinstance(other, Schedule):
return self.all_values * other.all_values
elif isinstance(other, list):
return self.all_values * other
else:
raise NotImplementedError
def get_sdow(self, start_day_of_week):
"""Returns the start day of the week"""
if start_day_of_week is None:
return self.idf.day_of_week_for_start_day
else:
return start_day_of_week
def special_day(self, field, slicer_):
"""try to get the RunPeriodControl:SpecialDays for the corresponding
Day Type"""
sp_slicer_ = slicer_.copy()
sp_slicer_.loc[:] = False
special_day_types = ['holiday', 'customday1', 'customday2']
dds = self.idf.idfobjects['RunPeriodControl:SpecialDays'.upper()]
dd = [dd for dd in dds if dd.Special_Day_Type.lower() == field
or dd.Special_Day_Type.lower() in special_day_types]
if len(dd) > 0:
slice = []
for dd in dd:
# can have more than one special day types
data = dd.Start_Date
ep_start_date = self.date_field_interpretation(data)
ep_orig = datetime(self.year, 1, 1)
days_to_speciald = (ep_start_date - ep_orig).days
duration = int(dd.Duration)
from_date = self.startDate + timedelta(days=days_to_speciald)
to_date = from_date + timedelta(days=duration) + timedelta(
hours=-1)
sp_slicer_.loc[from_date:to_date] = True
return sp_slicer_
elif not self.strict:
return sp_slicer_
else:
msg = 'Could not find a "SizingPeriod:DesignDay" object ' \
'needed for schedule "{}" with Day Type "{}"'.format(
self.schName, field.capitalize()
)
raise ValueError(msg)
def design_day(schedule, field):
# try to get the SizingPeriod:DesignDay for the corresponding Day Type
dds = schedule.idf.idfobjects['SizingPeriod:DesignDay'.upper()]
dd = [dd for dd in dds if dd.Day_Type.lower() == field]
if len(dd) > 0:
# should have found only one design day matching the Day Type
data = [dd[0].Month, dd[0].Day_of_Month]
date = '/'.join([str(item).zfill(2) for item in data])
date = schedule.date_field_interpretation(date)
return lambda x: x.index == date
else:
msg = 'Could not find a "SizingPeriod:DesignDay" object ' \
'needed for schedule "{}" with Day Type "{}"'.format(
schedule.schName, field.capitalize()
)
raise ValueError(msg)
def _conjunction(*conditions, logical=np.logical_and):
"""Applies a logical function on n conditions"""
return functools.reduce(logical, conditions)
def _separator(sep):
"""helper function to return the correct delimiter"""
if sep == 'Comma':
return ','
elif sep == 'Tab':
return '\t'
elif sep == 'Fixed':
return None
elif sep == 'Semicolon':
return ';'
else:
return ','
def _how(how):
"""Helper function to return the correct resampler"""
if how.lower() == 'average':
return 'mean'
elif how.lower() == 'linear':
return 'interpolate'
elif how.lower() == 'no':
return 'max'
else:
return 'max'
| [
[
[
372,
381
],
[
44582,
44591
]
],
[
[
389,
391
],
[
2337,
2339
]
],
[
[
399,
412
],
[
21931,
21933
],
[
28267,
28269
],
[
38673,
38675
]
],
[
[
434,
442
],
[
5771,
5779
],
[
14514,
14522
],
[
24781,
24789
],
[
24916,
24924
],
[
32904,
32912
],
[
35454,
35462
],
[
35573,
35581
],
[
36546,
36554
],
[
37536,
37544
],
[
42965,
42973
]
],
[
[
444,
453
],
[
15309,
15318
],
[
15526,
15535
],
[
15549,
15558
],
[
15776,
15785
],
[
18861,
18870
],
[
18958,
18967
],
[
25108,
25117
],
[
25131,
25140
],
[
25937,
25946
],
[
33357,
33366
],
[
33704,
33713
],
[
43146,
43155
],
[
43217,
43226
],
[
43244,
43253
]
],
[
[
462,
472
],
[
2436,
2446
],
[
29305,
29315
],
[
29474,
29484
],
[
29637,
29647
],
[
30693,
30703
],
[
30937,
30947
],
[
31063,
31073
]
],
[
[
480,
491
],
[
44501,
44503
],
[
3372,
3374
],
[
6798,
6800
],
[
7721,
7723
],
[
10508,
10510
],
[
10611,
10613
],
[
11427,
11429
],
[
12424,
12426
],
[
14325,
14327
],
[
28886,
28888
],
[
29057,
29059
],
[
30219,
30221
],
[
31640,
31642
],
[
31696,
31698
],
[
33094,
33096
]
],
[
[
499,
511
],
[
3543,
3545
],
[
3648,
3650
],
[
5929,
5931
],
[
6057,
6059
],
[
6153,
6155
],
[
6219,
6221
],
[
8080,
8082
],
[
8158,
8160
],
[
8237,
8239
],
[
8457,
8459
],
[
9104,
9106
],
[
9506,
9508
],
[
9574,
9576
],
[
14249,
14251
],
[
14365,
14367
],
[
14427,
14429
],
[
17753,
17755
],
[
23732,
23734
],
[
23937,
23939
],
[
24018,
24020
],
[
25169,
25171
],
[
25310,
25312
],
[
25856,
25858
],
[
38760,
38762
],
[
40609,
40611
]
],
[
[
535,
538
],
[
21922,
21925
],
[
28179,
28182
],
[
38516,
38519
]
],
[
[
547,
555
],
[
1394,
1402
],
[
2774,
2782
],
[
41015,
41023
],
[
41260,
41268
],
[
41518,
41526
],
[
41776,
41784
]
],
[
[
43694,
43704
]
],
[
[
44467,
44479
],
[
39018,
39030
]
],
[
[
44626,
44636
],
[
13478,
13488
]
],
[
[
44914,
44918
],
[
11998,
12002
]
]
] |
__version__ = '0.10.2' # pragma: no cover
| [
[
[
0,
11
]
]
] |
from reports import suite as reports_suite
from orders import suite as orders_suite
| [
[
[
20,
42
]
],
[
[
62,
83
]
]
] |
# Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Score2Perf music encoders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
import magenta
from magenta.models.score2perf import music_encoders
from magenta.music import testing_lib
from magenta.music.protobuf import music_pb2
import tensorflow.compat.v1 as tf
class MidiPerformanceEncoderTest(tf.test.TestCase):
def testNumReservedIds(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108)
self.assertEqual(2, encoder.num_reserved_ids)
def testEncodeEmptyNoteSequence(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108)
ids = encoder.encode_note_sequence(music_pb2.NoteSequence())
self.assertEqual([], ids)
def testEncodeEmptyNoteSequenceAddEos(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108,
add_eos=True)
ids = encoder.encode_note_sequence(music_pb2.NoteSequence())
self.assertEqual([1], ids)
def testEncodeNoteSequence(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108)
ns = music_pb2.NoteSequence()
testing_lib.add_track_to_sequence(
ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)])
ids = encoder.encode_note_sequence(ns)
expected_ids = [
302, # VELOCITY(25)
41, # NOTE-ON(60)
45, # NOTE-ON(64)
277, # TIME-SHIFT(100)
309, # VELOCITY(32)
48, # NOTE-ON(67)
277, # TIME-SHIFT(100)
136, # NOTE-OFF(67)
277, # TIME-SHIFT(100)
133, # NOTE-OFF(64
277, # TIME-SHIFT(100)
129 # NOTE-OFF(60)
]
self.assertEqual(expected_ids, ids)
def testEncodeNoteSequenceAddEos(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108,
add_eos=True)
ns = music_pb2.NoteSequence()
testing_lib.add_track_to_sequence(
ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)])
ids = encoder.encode_note_sequence(ns)
expected_ids = [
302, # VELOCITY(25)
41, # NOTE-ON(60)
45, # NOTE-ON(64)
277, # TIME-SHIFT(100)
309, # VELOCITY(32)
48, # NOTE-ON(67)
277, # TIME-SHIFT(100)
136, # NOTE-OFF(67)
277, # TIME-SHIFT(100)
133, # NOTE-OFF(64
277, # TIME-SHIFT(100)
129, # NOTE-OFF(60)
1 # EOS
]
self.assertEqual(expected_ids, ids)
def testEncodeNoteSequenceNGrams(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108,
ngrams=[(41, 45), (277, 309, 300), (309, 48), (277, 129, 130)])
ns = music_pb2.NoteSequence()
testing_lib.add_track_to_sequence(
ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)])
ids = encoder.encode_note_sequence(ns)
expected_ids = [
302, # VELOCITY(25)
310, # NOTE-ON(60), NOTE-ON(64)
277, # TIME-SHIFT(100)
312, # VELOCITY(32), NOTE-ON(67)
277, # TIME-SHIFT(100)
136, # NOTE-OFF(67)
277, # TIME-SHIFT(100)
133, # NOTE-OFF(64
277, # TIME-SHIFT(100)
129 # NOTE-OFF(60)
]
self.assertEqual(expected_ids, ids)
def testEncode(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108,
ngrams=[(277, 129)])
ns = music_pb2.NoteSequence()
testing_lib.add_track_to_sequence(ns, 0, [(60, 97, 0.0, 1.0)])
# Write NoteSequence to MIDI file as encoder takes in filename.
with tempfile.NamedTemporaryFile(suffix='.mid') as f:
magenta.music.sequence_proto_to_midi_file(ns, f.name)
ids = encoder.encode(f.name)
expected_ids = [
302, # VELOCITY(25)
41, # NOTE-ON(60)
310 # TIME-SHIFT(100), NOTE-OFF(60)
]
self.assertEqual(expected_ids, ids)
def testDecode(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108,
ngrams=[(277, 129)])
ids = [
302, # VELOCITY(25)
41, # NOTE-ON(60)
310 # TIME-SHIFT(100), NOTE-OFF(60)
]
# Decode method returns MIDI filename, read and convert to NoteSequence.
filename = encoder.decode(ids)
ns = magenta.music.midi_file_to_sequence_proto(filename)
# Remove default tempo & time signature.
del ns.tempos[:]
del ns.time_signatures[:]
expected_ns = music_pb2.NoteSequence(ticks_per_quarter=220)
testing_lib.add_track_to_sequence(expected_ns, 0, [(60, 97, 0.0, 1.0)])
# Add source info fields.
expected_ns.source_info.encoding_type = (
music_pb2.NoteSequence.SourceInfo.MIDI)
expected_ns.source_info.parser = (
music_pb2.NoteSequence.SourceInfo.PRETTY_MIDI)
self.assertEqual(expected_ns, ns)
def testVocabSize(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108)
self.assertEqual(310, encoder.vocab_size)
def testVocabSizeNGrams(self):
encoder = music_encoders.MidiPerformanceEncoder(
steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108,
ngrams=[(41, 45), (277, 309, 300), (309, 48), (277, 129, 130)])
self.assertEqual(314, encoder.vocab_size)
class TextChordsEncoderTest(tf.test.TestCase):
def testEncodeNoteSequence(self):
encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1)
ns = music_pb2.NoteSequence()
ns.tempos.add(qpm=60)
testing_lib.add_chords_to_sequence(
ns, [('C', 1), ('Dm', 3), ('Bdim', 4)])
ns.total_time = 5.0
ids = encoder.encode_note_sequence(ns)
expected_ids = [
2, # no-chord
3, # C major
3, # C major
17, # D minor
50 # B diminished
]
self.assertEqual(expected_ids, ids)
def testEncode(self):
encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1)
ids = encoder.encode('C G Am F')
expected_ids = [
3, # C major
10, # G major
24, # A minor
8 # F major
]
self.assertEqual(expected_ids, ids)
def testVocabSize(self):
encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1)
self.assertEqual(51, encoder.vocab_size)
class TextMelodyEncoderTest(tf.test.TestCase):
def testEncodeNoteSequence(self):
encoder = music_encoders.TextMelodyEncoder(
steps_per_quarter=4, min_pitch=21, max_pitch=108)
encoder_absolute = music_encoders.TextMelodyEncoderAbsolute(
steps_per_second=4, min_pitch=21, max_pitch=108)
ns = music_pb2.NoteSequence()
ns.tempos.add(qpm=60)
testing_lib.add_track_to_sequence(
ns, 0,
[(60, 127, 0.0, 0.25), (62, 127, 0.25, 0.75), (64, 127, 1.25, 2.0)])
ids = encoder.encode_note_sequence(ns)
ids_absolute = encoder_absolute.encode_note_sequence(ns)
expected_ids = [
43, # ON(60)
45, # ON(62)
2, # HOLD(62)
3, # OFF(62)
2, # REST
47, # ON(64)
2, # HOLD(64)
2 # HOLD(64)
]
self.assertEqual(expected_ids, ids)
self.assertEqual(expected_ids, ids_absolute)
def testEncode(self):
encoder = music_encoders.TextMelodyEncoder(
steps_per_quarter=4, min_pitch=21, max_pitch=108)
ids = encoder.encode('60 -2 62 -1 64 -2')
expected_ids = [
43, # ON(60)
2, # HOLD(60)
45, # ON(62)
3, # OFF(62)
47, # ON(64)
2 # HOLD(64)
]
self.assertEqual(expected_ids, ids)
def testVocabSize(self):
encoder = music_encoders.TextMelodyEncoder(
steps_per_quarter=4, min_pitch=21, max_pitch=108)
self.assertEqual(92, encoder.vocab_size)
class FlattenedTextMelodyEncoderTest(tf.test.TestCase):
def testEncodeNoteSequence(self):
encoder = music_encoders.FlattenedTextMelodyEncoderAbsolute(
steps_per_second=4, num_velocity_bins=127)
ns = music_pb2.NoteSequence()
ns.tempos.add(qpm=60)
testing_lib.add_track_to_sequence(
ns, 0,
[(60, 127, 0.0, 0.25), (62, 15, 0.25, 0.75), (64, 32, 1.25, 2.0)])
ids = encoder.encode_note_sequence(ns)
expected_ids = [
130, # ON(vel=127)
18, # ON(vel=15)
2, # HOLD(62)
2, # REST
2, # REST
35, # ON(vel=32)
2, # HOLD(64)
2 # HOLD(64)
]
self.assertEqual(expected_ids, ids)
def testVocabSize(self):
num_vel_bins = 12
encoder = music_encoders.FlattenedTextMelodyEncoderAbsolute(
steps_per_second=4, num_velocity_bins=num_vel_bins)
expected = num_vel_bins + encoder.num_reserved_ids + 2
self.assertEqual(expected, encoder.vocab_size)
class CompositeScoreEncoderTest(tf.test.TestCase):
def testEncodeNoteSequence(self):
encoder = music_encoders.CompositeScoreEncoder([
music_encoders.TextChordsEncoder(steps_per_quarter=4),
music_encoders.TextMelodyEncoder(
steps_per_quarter=4, min_pitch=21, max_pitch=108)
])
ns = music_pb2.NoteSequence()
ns.tempos.add(qpm=60)
testing_lib.add_chords_to_sequence(ns, [('C', 0.5), ('Dm', 1.0)])
testing_lib.add_track_to_sequence(
ns, 0,
[(60, 127, 0.0, 0.25), (62, 127, 0.25, 0.75), (64, 127, 1.25, 2.0)])
chord_ids, melody_ids = zip(*encoder.encode_note_sequence(ns))
expected_chord_ids = [
2, # no-chord
2, # no-chord
3, # C major
3, # C major
17, # D minor
17, # D minor
17, # D minor
17 # D minor
]
expected_melody_ids = [
43, # ON(60)
45, # ON(62)
2, # HOLD(62)
3, # OFF(62)
2, # REST
47, # ON(64)
2, # HOLD(64)
2 # HOLD(64)
]
self.assertEqual(expected_chord_ids, list(chord_ids))
self.assertEqual(expected_melody_ids, list(melody_ids))
# TODO(iansimon): also test MusicXML encoding
def testVocabSize(self):
encoder = music_encoders.CompositeScoreEncoder([
music_encoders.TextChordsEncoder(steps_per_quarter=4),
music_encoders.TextMelodyEncoder(
steps_per_quarter=4, min_pitch=21, max_pitch=108)
])
self.assertEqual([51, 92], encoder.vocab_size)
if __name__ == '__main__':
tf.test.main()
| [
[
[
652,
667
]
],
[
[
691,
699
]
],
[
[
723,
737
]
],
[
[
746,
754
],
[
4961,
4969
]
],
[
[
763,
770
],
[
5022,
5029
],
[
5787,
5794
]
],
[
[
809,
823
],
[
1048,
1062
],
[
1288,
1302
],
[
1583,
1597
],
[
1894,
1908
],
[
2775,
2789
],
[
3704,
3718
],
[
4608,
4622
],
[
5356,
5370
],
[
6427,
6441
],
[
6655,
6669
],
[
7011,
7025
],
[
7568,
7582
],
[
7897,
7911
],
[
8106,
8120
],
[
8229,
8243
],
[
9042,
9056
],
[
9474,
9488
],
[
9734,
9748
],
[
10469,
10483
],
[
10812,
10826
],
[
10863,
10877
],
[
10930,
10944
],
[
12129,
12143
],
[
12180,
12194
],
[
12247,
12261
]
],
[
[
850,
861
],
[
2065,
2076
],
[
2972,
2983
],
[
3951,
3962
],
[
4812,
4823
],
[
6025,
6036
],
[
7142,
7153
],
[
8409,
8420
],
[
9917,
9928
],
[
11118,
11129
],
[
11192,
11203
]
],
[
[
897,
906
],
[
1455,
1464
],
[
1776,
1785
],
[
2032,
2041
],
[
2939,
2948
],
[
3918,
3927
],
[
4779,
4788
],
[
5971,
5980
],
[
6194,
6203
],
[
6289,
6298
],
[
7079,
7088
],
[
8346,
8355
],
[
9854,
9863
],
[
11055,
11064
]
],
[
[
914,
940
],
[
976,
978
],
[
6935,
6937
],
[
8030,
8032
],
[
9658,
9660
],
[
10736,
10738
],
[
12446,
12448
]
],
[
[
949,
975
]
],
[
[
6913,
6934
]
],
[
[
8008,
8029
]
],
[
[
9627,
9657
]
],
[
[
10710,
10735
]
]
] |
"""
This is an end to end release test automation script used to kick off periodic
release tests, running on Anyscale.
The tool leverages app configs and compute templates.
Calling this script will run a single release test.
Example:
python e2e.py --test-config ~/ray/release/xgboost_tests/xgboost_tests.yaml --test-name tune_small
The following steps are then performed:
1. It will look up the test tune_small in the file xgboost_tests.yaml
2. It will fetch the specified app config and compute template and register
those with anyscale (if they don’t exist yet)
3. It waits until the app config is built
4. It then kicks off the script defined in the run block
5. When the script is finished, it will fetch the latest logs, the full log
output, and any artifacts specified in the artifacts block.
6. The full logs and artifacts will be stored in a s3 bucket
7. It will also fetch the json file specified in the run block as results.
This is the file where you should write your metrics to.
8. All results are then stored in a database.
Specifically it will store the following fields:
- Timestamp
- Test name
- Status (finished, error, timeout, invalid)
- Last logs (50 lines)
- results (see above)
- artifacts (links to s3 files)
Then the script exits. If an error occurs at any time, a fail result is
written to the database.
Writing a new release test
--------------------------
Each release test requires the following:
1. It has to be added in a release test yaml file, describing meta information
about the test (e.g. name, command to run, timeout)
2. You need an app config yaml
3. You need a compute template yaml
4. You need to define a command to run. This is usually a python script.
The command should accept (or ignore) a single optional
`--smoke-test` argument.
Usually the command should write its result metrics to a json file.
The json filename is available in the TEST_OUTPUT_JSON env variable.
5. Add your test in release/.buildkite/build_pipeline.py.
The script will have access to these environment variables:
"RAY_ADDRESS": os.environ.get("RAY_ADDRESS", "auto")
"TEST_OUTPUT_JSON": results_json_filename
"IS_SMOKE_TEST": "1" if smoke_test else "0"
For an example, take a look at the XGBoost test suite:
https://github.com/ray-project/ray/blob/master/release/xgboost_tests/xgboost_tests.yaml
These all use the same app configs and similar compute templates. This means
that app configs can be re-used across runs and only have to be built ones.
App configs and compute templates can interpret environment variables.
A notable one is the `RAY_WHEELS` variable which points to the wheels that
should be tested (e.g. latest master wheels). You might want to include
something like this in your `post_build_cmds`:
- pip3 install -U {{ env["RAY_WHEELS"] | default("ray") }}
If you want to force rebuilds, consider using something like
- echo {{ env["TIMESTAMP"] }}
so that your app configs changes each time the script is executed. If you
only want to trigger rebuilds once per day, use `DATESTAMP` instead:
- echo {{ env["DATESTAMP"] }}
Local testing
-------------
For local testing, make sure to authenticate with the ray-ossci AWS user
(e.g. by setting the respective environment variables obtained from go/aws),
or use the `--no-report` command line argument.
Also make sure to set these environment variables:
- ANYSCALE_CLI_TOKEN (should contain your anyscale credential token)
- ANYSCALE_PROJECT (should point to a project ID you have access to)
A test can then be run like this:
python e2e.py --no-report --test-config ~/ray/release/xgboost_tests/xgboost_tests.yaml --test-name tune_small
The `--no-report` option disables storing the results in the DB and
artifacts on S3. If you set this option, you do not need access to the
ray-ossci AWS user.
Using Compilation on Product + App Config Override
--------------------------------------------------
For quick iteration when debugging a release test, go/compile-on-product allows
you to easily modify and recompile Ray, such that the recompilation happens
within an app build step and can benefit from a warm Bazel cache. See
go/compile-on-product for more information.
After kicking off the app build, you can give the app config ID to this script
as an app config override, where the indicated app config will be used instead
of the app config given in the test config. E.g., running
python e2e.py --no-report --test-config ~/ray/benchmarks/benchmark_tests.yaml --test-name=single_node --app-config-id-override=apt_TBngEXXXrhipMXgexVcrpC9i
would run the single_node benchmark test with the apt_TBngEXXXrhipMXgexVcrpC9i
app config instead of the app config given in
~/ray/benchmarks/benchmark_tests.yaml. If the build for the app config is still
in progress, the script will wait until it completes, same as for a locally
defined app config.
Running on Head Node vs Running with Anyscale Connect
-----------------------------------------------------
By default release tests run their drivers on the head node. Support is being
added to run release tests that execute the driver as a subprocess and run
the workload on Anyscale product via Anyscale connect.
Note that when the driver in the test is a subprocess of releaser, releaser
cannot be terminated before the test finishes.
Other known feature gaps when running with Anyscale connect:
- Kicking off a test or checking progress is not supported.
- Downloading / uploading logs and artifacts are unsupported.
- Logs from remote may not have finished streaming, before the driver exits.
Long running tests
------------------
Long running tests can be kicked off with by adding the --kick-off-only
parameters to the e2e script. The status can then be checked with the
--check command.
Long running test sessions will be terminated after `timeout` seconds, after
which the latest result in the TEST_OUTPUT_JSON will be reported. Thus,
long running release tests should update this file periodically.
There are also two config options to configure behavior. The `time_key` is
needed to track the latest update of the TEST_OUTPUT_JSON and should contain
a floating point number (usually `time.time()`). The `max_update_delay` then
specified the maximum time in seconds that can be passed without an update
to the results json. If the output file hasn't been updated in e.g. 60 seconds,
this could indicate that the command is stale/frozen, and thus should fail.
Release test yaml example
-------------------------
- name: example
owner:
mail: "[email protected]" # Currently not used
slack: "@tune-team" # Currentl not used
cluster:
app_config: app_config.yaml # Relative to the release test yaml
compute_template: tpl_cpu.yaml
run:
timeout: 600 # in seconds
prepare: python wait_cluster.py 4 600 # prepare cmd to run before test
script: python workloads/train.py # actual release test command
# Only needed for long running test
time_key: last_update # Key in the results json indicating current time
max_update_delay: 30 # If state hasn't been updated in 30s, terminate
# This block is optional
artifacts:
# Artifact name: location on head node
- detailed_output: detailed_output.csv
# This block is optional. If present, the contents will be
# deep updated for smoke testing
smoke_test:
cluster:
compute_template: tpl_cpu_smoketest.yaml
""" # noqa: E501
import argparse
import boto3
import collections
import copy
import datetime
import hashlib
import jinja2
import json
import logging
import multiprocessing
import os
import requests
import shutil
import subprocess
import sys
import tempfile
import time
from queue import Empty
from typing import Any, Dict, Optional, Tuple, List
import yaml
import anyscale
import anyscale.conf
from anyscale.api import instantiate_api_client
from anyscale.controllers.session_controller import SessionController
from anyscale.sdk.anyscale_client.sdk import AnyscaleSDK
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(stream=sys.stdout)
formatter = logging.Formatter(fmt="[%(levelname)s %(asctime)s] "
"%(filename)s: %(lineno)d "
"%(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
def getenv_default(key: str, default: Optional[str] = None):
"""Return environment variable with default value"""
# If the environment variable is set but "", still return default
return os.environ.get(key, None) or default
GLOBAL_CONFIG = {
"ANYSCALE_USER": getenv_default("ANYSCALE_USER",
"[email protected]"),
"ANYSCALE_HOST": getenv_default("ANYSCALE_HOST",
"https://beta.anyscale.com"),
"ANYSCALE_CLI_TOKEN": getenv_default("ANYSCALE_CLI_TOKEN"),
"ANYSCALE_CLOUD_ID": getenv_default(
"ANYSCALE_CLOUD_ID",
"cld_4F7k8814aZzGG8TNUGPKnc"), # cld_4F7k8814aZzGG8TNUGPKnc
"ANYSCALE_PROJECT": getenv_default("ANYSCALE_PROJECT", ""),
"RAY_VERSION": getenv_default("RAY_VERSION", "2.0.0.dev0"),
"RAY_REPO": getenv_default("RAY_REPO",
"https://github.com/ray-project/ray.git"),
"RAY_BRANCH": getenv_default("RAY_BRANCH", "master"),
"RELEASE_AWS_BUCKET": getenv_default("RELEASE_AWS_BUCKET",
"ray-release-automation-results"),
"RELEASE_AWS_LOCATION": getenv_default("RELEASE_AWS_LOCATION", "dev"),
"RELEASE_AWS_DB_NAME": getenv_default("RELEASE_AWS_DB_NAME", "ray_ci"),
"RELEASE_AWS_DB_TABLE": getenv_default("RELEASE_AWS_DB_TABLE",
"release_test_result"),
"RELEASE_AWS_DB_SECRET_ARN": getenv_default(
"RELEASE_AWS_DB_SECRET_ARN",
"arn:aws:secretsmanager:us-west-2:029272617770:secret:"
"rds-db-credentials/cluster-7RB7EYTTBK2EUC3MMTONYRBJLE/ray_ci-MQN2hh",
),
"RELEASE_AWS_DB_RESOURCE_ARN": getenv_default(
"RELEASE_AWS_DB_RESOURCE_ARN",
"arn:aws:rds:us-west-2:029272617770:cluster:ci-reporting",
),
"RELEASE_RESULTS_DIR": getenv_default("RELEASE_RESULTS_DIR",
"/tmp/ray_release_test_artifacts"),
"DATESTAMP": str(datetime.datetime.now().strftime("%Y%m%d")),
"TIMESTAMP": str(int(datetime.datetime.now().timestamp())),
"EXPIRATION_1D": str((datetime.datetime.now() +
datetime.timedelta(days=1)).strftime("%Y-%m-%d")),
"EXPIRATION_2D": str((datetime.datetime.now() +
datetime.timedelta(days=2)).strftime("%Y-%m-%d")),
"EXPIRATION_3D": str((datetime.datetime.now() +
datetime.timedelta(days=3)).strftime("%Y-%m-%d")),
}
REPORT_S = 30
RETRY_MULTIPLIER = 2
def exponential_backoff_retry(f, retry_exceptions, initial_retry_delay_s,
max_retries):
retry_cnt = 0
retry_delay_s = initial_retry_delay_s
while True:
try:
return f()
except retry_exceptions as e:
retry_cnt += 1
if retry_cnt > max_retries:
raise
logger.info(f"Retry function call failed due to {e} "
f"in {retry_delay_s} seconds...")
time.sleep(retry_delay_s)
retry_delay_s *= RETRY_MULTIPLIER
def maybe_fetch_api_token():
if GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] is None:
logger.info(
"Missing ANYSCALE_CLI_TOKEN, retrieving from AWS secrets store")
# NOTE(simon) This should automatically retrieve
# [email protected]'s anyscale token
GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] = boto3.client(
"secretsmanager", region_name="us-west-2"
).get_secret_value(
SecretId="arn:aws:secretsmanager:us-west-2:029272617770:secret:"
"release-automation/"
"anyscale-token20210505220406333800000001-BcUuKB")["SecretString"]
class PrepareCommandRuntimeError(RuntimeError):
pass
class ReleaseTestTimeoutError(RuntimeError):
pass
class SessionTimeoutError(ReleaseTestTimeoutError):
pass
class FileSyncTimeoutError(ReleaseTestTimeoutError):
pass
class CommandTimeoutError(ReleaseTestTimeoutError):
pass
class PrepareCommandTimeoutError(ReleaseTestTimeoutError):
pass
# e.g., App config failure.
class AppConfigBuildFailure(RuntimeError):
pass
class State:
def __init__(self, state: str, timestamp: float, data: Any):
self.state = state
self.timestamp = timestamp
self.data = data
sys.path.insert(0, anyscale.ANYSCALE_RAY_DIR)
def anyscale_project_url(project_id: str):
return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \
f"/o/anyscale-internal/projects/{project_id}" \
f"/?tab=session-list"
def anyscale_session_url(project_id: str, session_id: str):
return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \
f"/o/anyscale-internal/projects/{project_id}" \
f"/clusters/{session_id}"
def anyscale_compute_tpl_url(compute_tpl_id: str):
return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \
f"/o/anyscale-internal/configurations/cluster-computes" \
f"/{compute_tpl_id}"
def anyscale_app_config_build_url(build_id: str):
return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \
f"/o/anyscale-internal/configurations/app-config-details" \
f"/{build_id}"
def wheel_url(ray_version, git_branch, git_commit):
return f"https://s3-us-west-2.amazonaws.com/ray-wheels/" \
f"{git_branch}/{git_commit}/" \
f"ray-{ray_version}-cp37-cp37m-manylinux2014_x86_64.whl"
def wheel_exists(ray_version, git_branch, git_commit):
url = wheel_url(ray_version, git_branch, git_commit)
return requests.head(url).status_code == 200
def get_latest_commits(repo: str, branch: str = "master") -> List[str]:
cur = os.getcwd()
with tempfile.TemporaryDirectory() as tmpdir:
os.chdir(tmpdir)
clone_cmd = [
"git",
"clone",
"--filter=tree:0",
"--no-checkout",
# "--single-branch",
# "--depth=10",
f"--branch={branch}",
repo,
tmpdir,
]
log_cmd = [
"git",
"log",
"-n",
"10",
"--pretty=format:%H",
]
subprocess.check_output(clone_cmd)
commits = subprocess.check_output(log_cmd).decode(
sys.stdout.encoding).split("\n")
os.chdir(cur)
return commits
def find_ray_wheels(repo: str, branch: str, version: str):
url = None
commits = get_latest_commits(repo, branch)
logger.info(f"Latest 10 commits for branch {branch}: {commits}")
for commit in commits:
if wheel_exists(version, branch, commit):
url = wheel_url(version, branch, commit)
os.environ["RAY_WHEELS"] = url
os.environ["RAY_COMMIT"] = commit
logger.info(
f"Found wheels URL for Ray {version}, branch {branch}: "
f"{url}")
break
return url
def populate_wheels_sanity_check(commit: Optional[str] = None):
if not commit:
cmd = ("python -c 'import ray; print("
"\"No commit sanity check available, but this is the "
"Ray wheel commit:\", ray.__commit__)'")
else:
cmd = (f"python -c 'import ray; "
f"assert ray.__commit__ == \"{commit}\", ray.__commit__'")
os.environ["RAY_WHEELS_SANITY_CHECK"] = cmd
def _check_stop(stop_event: multiprocessing.Event, timeout_type: str):
if stop_event.is_set():
if timeout_type == "prepare_command":
raise PrepareCommandTimeoutError(
"Process timed out in the prepare command stage.")
if timeout_type == "command":
raise CommandTimeoutError(
"Process timed out while running a command.")
elif timeout_type == "file_sync":
raise FileSyncTimeoutError(
"Process timed out while syncing files.")
elif timeout_type == "session":
raise SessionTimeoutError(
"Process timed out while starting a session.")
else:
assert False, "Unexpected timeout type."
def _deep_update(d, u):
for k, v in u.items():
if isinstance(v, collections.abc.Mapping):
d[k] = _deep_update(d.get(k, {}), v)
else:
d[k] = v
return d
def _dict_hash(dt: Dict[Any, Any]) -> str:
json_str = json.dumps(dt, sort_keys=True, ensure_ascii=True)
sha = hashlib.sha256()
sha.update(json_str.encode())
return sha.hexdigest()
def _load_config(local_dir: str, config_file: Optional[str]) -> Optional[Dict]:
if not config_file:
return None
config_path = os.path.join(local_dir, config_file)
with open(config_path, "rt") as f:
# Todo: jinja2 render
content = f.read()
env = copy.deepcopy(os.environ)
env.update(GLOBAL_CONFIG)
content = jinja2.Template(content).render(env=env)
return yaml.safe_load(content)
def has_errored(result: Dict[Any, Any]) -> bool:
return result.get("status", "invalid") != "finished"
def report_result(test_suite: str, test_name: str, status: str, last_logs: str,
results: Dict[Any, Any], artifacts: Dict[Any, Any],
category: str):
now = datetime.datetime.utcnow()
rds_data_client = boto3.client("rds-data", region_name="us-west-2")
schema = GLOBAL_CONFIG["RELEASE_AWS_DB_TABLE"]
sql = (
f"INSERT INTO {schema} "
f"(created_on, test_suite, test_name, status, last_logs, "
f"results, artifacts, category) "
f"VALUES (:created_on, :test_suite, :test_name, :status, :last_logs, "
f":results, :artifacts, :category)")
parameters = [{
"name": "created_on",
"typeHint": "TIMESTAMP",
"value": {
"stringValue": now.strftime("%Y-%m-%d %H:%M:%S")
},
}, {
"name": "test_suite",
"value": {
"stringValue": test_suite
}
}, {
"name": "test_name",
"value": {
"stringValue": test_name
}
}, {
"name": "status",
"value": {
"stringValue": status
}
}, {
"name": "last_logs",
"value": {
"stringValue": last_logs
}
}, {
"name": "results",
"typeHint": "JSON",
"value": {
"stringValue": json.dumps(results)
},
}, {
"name": "artifacts",
"typeHint": "JSON",
"value": {
"stringValue": json.dumps(artifacts)
},
}, {
"name": "category",
"value": {
"stringValue": category
}
}]
# Default boto3 call timeout is 45 seconds.
retry_delay_s = 64
MAX_RDS_RETRY = 3
exponential_backoff_retry(
lambda: rds_data_client.execute_statement(
database=GLOBAL_CONFIG["RELEASE_AWS_DB_NAME"],
parameters=parameters,
secretArn=GLOBAL_CONFIG["RELEASE_AWS_DB_SECRET_ARN"],
resourceArn=GLOBAL_CONFIG["RELEASE_AWS_DB_RESOURCE_ARN"],
schema=schema,
sql=sql),
retry_exceptions=rds_data_client.exceptions.StatementTimeoutException,
initial_retry_delay_s=retry_delay_s,
max_retries=MAX_RDS_RETRY)
logger.info("Result has been persisted to the databse")
def log_results_and_artifacts(result: Dict):
results = result.get("results", {})
if results:
msg = "Observed the following results:\n\n"
for key, val in results.items():
msg += f" {key} = {val}\n"
else:
msg = "Did not find any results."
logger.info(msg)
artifacts = result.get("artifacts", {})
if artifacts:
msg = "Saved the following artifacts:\n\n"
for key, val in artifacts.items():
msg += f" {key} = {val}\n"
else:
msg = "Did not find any artifacts."
logger.info(msg)
def _cleanup_session(sdk: AnyscaleSDK, session_id: str):
if session_id:
# Just trigger a request. No need to wait until session shutdown.
sdk.terminate_session(
session_id=session_id, terminate_session_options={})
def search_running_session(sdk: AnyscaleSDK, project_id: str,
session_name: str) -> Optional[str]:
session_id = None
logger.info(f"Looking for existing session with name {session_name}")
result = sdk.search_sessions(
project_id=project_id,
sessions_query=dict(name=dict(equals=session_name)))
if len(result.results) > 0 and result.results[0].state == "Running":
logger.info("Found existing session.")
session_id = result.results[0].id
return session_id
def create_or_find_compute_template(
sdk: AnyscaleSDK,
project_id: str,
compute_tpl: Dict[Any, Any],
_repeat: bool = True) -> Tuple[Optional[str], Optional[str]]:
compute_tpl_id = None
compute_tpl_name = None
if compute_tpl:
# As of Anyscale 0.4.1, it is an error to use the same compute template
# name within the same organization, between different projects.
compute_tpl_name = f"{project_id}/compute/{_dict_hash(compute_tpl)}"
logger.info(f"Tests uses compute template "
f"with name {compute_tpl_name}. Looking up existing "
f"templates.")
paging_token = None
while not compute_tpl_id:
result = sdk.search_compute_templates(
dict(
project_id=project_id,
name=dict(equals=compute_tpl_name),
include_anonymous=True),
paging_token=paging_token)
paging_token = result.metadata.next_paging_token
for res in result.results:
if res.name == compute_tpl_name:
compute_tpl_id = res.id
logger.info(
f"Template already exists with ID {compute_tpl_id}")
break
if not paging_token:
break
if not compute_tpl_id:
logger.info(f"Compute template not found. "
f"Creating with name {compute_tpl_name}.")
try:
result = sdk.create_compute_template(
dict(
name=compute_tpl_name,
project_id=project_id,
config=compute_tpl))
compute_tpl_id = result.result.id
except Exception as e:
if _repeat:
logger.warning(
f"Got exception when trying to create compute "
f"template: {e}. Sleeping for 10 seconds and then "
f"try again once...")
time.sleep(10)
return create_or_find_compute_template(
sdk=sdk,
project_id=project_id,
compute_tpl=compute_tpl,
_repeat=False)
raise e
logger.info(f"Compute template created with ID {compute_tpl_id}")
return compute_tpl_id, compute_tpl_name
def create_or_find_app_config(
sdk: AnyscaleSDK,
project_id: str,
app_config: Dict[Any, Any],
_repeat: bool = True) -> Tuple[Optional[str], Optional[str]]:
app_config_id = None
app_config_name = None
if app_config:
app_config_name = f"{project_id}-{_dict_hash(app_config)}"
logger.info(f"Test uses an app config with hash {app_config_name}. "
f"Looking up existing app configs with this name.")
paging_token = None
while not app_config_id:
result = sdk.list_app_configs(
project_id=project_id, count=50, paging_token=paging_token)
paging_token = result.metadata.next_paging_token
for res in result.results:
if res.name == app_config_name:
app_config_id = res.id
logger.info(
f"App config already exists with ID {app_config_id}")
break
if not paging_token or app_config_id:
break
if not app_config_id:
logger.info("App config not found. Creating new one.")
try:
result = sdk.create_app_config(
dict(
name=app_config_name,
project_id=project_id,
config_json=app_config))
app_config_id = result.result.id
except Exception as e:
if _repeat:
logger.warning(
f"Got exception when trying to create app "
f"config: {e}. Sleeping for 10 seconds and then "
f"try again once...")
time.sleep(10)
return create_or_find_app_config(
sdk=sdk,
project_id=project_id,
app_config=app_config,
_repeat=False)
raise e
logger.info(f"App config created with ID {app_config_id}")
return app_config_id, app_config_name
def install_app_config_packages(app_config: Dict[Any, Any]):
os.environ.update(app_config.get("env_vars", {}))
packages = app_config["python"]["pip_packages"]
for package in packages:
subprocess.check_output(["pip", "install", "-U", package], text=True)
def install_matching_ray():
wheel = os.environ.get("RAY_WHEELS", None)
if not wheel:
return
assert "manylinux2014_x86_64" in wheel, wheel
if sys.platform == "darwin":
platform = "macosx_10_15_intel"
elif sys.platform == "win32":
platform = "win_amd64"
else:
platform = "manylinux2014_x86_64"
wheel = wheel.replace("manylinux2014_x86_64", platform)
subprocess.check_output(["pip", "uninstall", "-y", "ray"], text=True)
subprocess.check_output(["pip", "install", "-U", wheel], text=True)
def wait_for_build_or_raise(sdk: AnyscaleSDK,
app_config_id: Optional[str]) -> Optional[str]:
if not app_config_id:
return None
# Fetch build
build_id = None
last_status = None
result = sdk.list_builds(app_config_id)
for build in sorted(result.results, key=lambda b: b.created_at):
build_id = build.id
last_status = build.status
if build.status == "failed":
continue
if build.status == "succeeded":
logger.info(f"Link to app config build: "
f"{anyscale_app_config_build_url(build_id)}")
return build_id
if last_status == "failed":
raise AppConfigBuildFailure("App config build failed.")
if not build_id:
raise AppConfigBuildFailure("No build found for app config.")
# Build found but not failed/finished yet
completed = False
start_wait = time.time()
next_report = start_wait + REPORT_S
logger.info(f"Waiting for build {build_id} to finish...")
logger.info(f"Track progress here: "
f"{anyscale_app_config_build_url(build_id)}")
while not completed:
now = time.time()
if now > next_report:
logger.info(f"... still waiting for build {build_id} to finish "
f"({int(now - start_wait)} seconds) ...")
next_report = next_report + REPORT_S
result = sdk.get_build(build_id)
build = result.result
if build.status == "failed":
raise AppConfigBuildFailure(
f"App config build failed. Please see "
f"{anyscale_app_config_build_url(build_id)} for details")
if build.status == "succeeded":
logger.info("Build succeeded.")
return build_id
completed = build.status not in ["in_progress", "pending"]
if completed:
raise AppConfigBuildFailure(
f"Unknown build status: {build.status}. Please see "
f"{anyscale_app_config_build_url(build_id)} for details")
time.sleep(1)
return build_id
def run_job(cluster_name: str, compute_tpl_name: str, cluster_env_name: str,
job_name: str, min_workers: str, script: str,
script_args: List[str], env_vars: Dict[str, str],
autosuspend: int) -> Tuple[int, str]:
# Start cluster and job
address = f"anyscale://{cluster_name}?autosuspend={autosuspend}"
logger.info(f"Starting job {job_name} with Ray address: {address}")
env = copy.deepcopy(os.environ)
env.update(GLOBAL_CONFIG)
env.update(env_vars)
env["RAY_ADDRESS"] = address
env["RAY_JOB_NAME"] = job_name
env["RAY_RELEASE_MIN_WORKERS"] = str(min_workers)
proc = subprocess.Popen(
script.split(" ") + script_args,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True)
proc.stdout.reconfigure(line_buffering=True)
logs = ""
for line in proc.stdout:
logs += line
sys.stdout.write(line)
proc.wait()
return proc.returncode, logs
def create_and_wait_for_session(
sdk: AnyscaleSDK,
stop_event: multiprocessing.Event,
session_name: str,
session_options: Dict[Any, Any],
) -> str:
# Create session
logger.info(f"Creating session {session_name}")
result = sdk.create_session(session_options)
session_id = result.result.id
# Trigger session start
logger.info(f"Starting session {session_name} ({session_id})")
session_url = anyscale_session_url(
project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id)
logger.info(f"Link to session: {session_url}")
result = sdk.start_session(session_id, start_session_options={})
sop_id = result.result.id
completed = result.result.completed
# Wait for session
logger.info(f"Waiting for session {session_name}...")
start_wait = time.time()
next_report = start_wait + REPORT_S
while not completed:
# Sleep 1 sec before next check.
time.sleep(1)
session_operation_response = sdk.get_session_operation(
sop_id, _request_timeout=30)
session_operation = session_operation_response.result
completed = session_operation.completed
_check_stop(stop_event, "session")
now = time.time()
if now > next_report:
logger.info(f"... still waiting for session {session_name} "
f"({int(now - start_wait)} seconds) ...")
next_report = next_report + REPORT_S
return session_id
def run_session_command(sdk: AnyscaleSDK,
session_id: str,
cmd_to_run: str,
result_queue: multiprocessing.Queue,
env_vars: Dict[str, str],
state_str: str = "CMD_RUN") -> Tuple[str, int]:
full_cmd = " ".join(f"{k}={v}"
for k, v in env_vars.items()) + " " + cmd_to_run
logger.info(f"Running command in session {session_id}: \n" f"{full_cmd}")
session_url = anyscale_session_url(
project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id)
logger.info(f"Link to session: {session_url}")
result_queue.put(State(state_str, time.time(), None))
result = sdk.create_session_command(
dict(session_id=session_id, shell_command=full_cmd))
scd_id = result.result.id
return scd_id, result
def wait_for_session_command_to_complete(create_session_command_result,
sdk: AnyscaleSDK,
scd_id: str,
stop_event: multiprocessing.Event,
state_str: str = "CMD_RUN"):
result = create_session_command_result
completed = result.result.finished_at is not None
start_wait = time.time()
next_report = start_wait + REPORT_S
while not completed:
# Sleep 1 sec before next check.
time.sleep(1)
result = exponential_backoff_retry(
lambda: sdk.get_session_command(session_command_id=scd_id),
retry_exceptions=Exception,
initial_retry_delay_s=10,
max_retries=3)
completed = result.result.finished_at
if state_str == "CMD_RUN":
_check_stop(stop_event, "command")
elif state_str == "CMD_PREPARE":
_check_stop(stop_event, "prepare_command")
now = time.time()
if now > next_report:
logger.info(f"... still waiting for command to finish "
f"({int(now - start_wait)} seconds) ...")
next_report = next_report + REPORT_S
status_code = result.result.status_code
runtime = time.time() - start_wait
if status_code != 0:
if state_str == "CMD_RUN":
raise RuntimeError(
f"Command returned non-success status: {status_code}")
elif state_str == "CMD_PREPARE":
raise PrepareCommandRuntimeError(
f"Prepare command returned non-success status: {status_code}")
return status_code, runtime
def get_command_logs(session_controller: SessionController,
scd_id: str,
lines: int = 50):
result = exponential_backoff_retry(
lambda: session_controller.api_client.get_execution_logs_api_v2_session_commands_session_command_id_execution_logs_get( # noqa: E501
session_command_id=scd_id,
start_line=-1 * lines,
end_line=0),
retry_exceptions=Exception,
initial_retry_delay_s=10,
max_retries=3)
return result.result.lines
def get_remote_json_content(
temp_dir: str,
session_name: str,
remote_file: Optional[str],
session_controller: SessionController,
):
if not remote_file:
logger.warning("No remote file specified, returning empty dict")
return {}
local_target_file = os.path.join(temp_dir, ".tmp.json")
session_controller.pull(
session_name=session_name,
source=remote_file,
target=local_target_file)
with open(local_target_file, "rt") as f:
return json.load(f)
def get_local_json_content(local_file: Optional[str], ):
if not local_file:
logger.warning("No local file specified, returning empty dict")
return {}
with open(local_file, "rt") as f:
return json.load(f)
def pull_artifacts_and_store_in_cloud(
temp_dir: str,
logs: str,
session_name: str,
test_name: str,
artifacts: Optional[Dict[Any, Any]],
session_controller: SessionController,
):
output_log_file = os.path.join(temp_dir, "output.log")
with open(output_log_file, "wt") as f:
f.write(logs)
bucket = GLOBAL_CONFIG["RELEASE_AWS_BUCKET"]
location = f"{GLOBAL_CONFIG['RELEASE_AWS_LOCATION']}" \
f"/{session_name}/{test_name}"
saved_artifacts = {}
s3_client = boto3.client("s3")
s3_client.upload_file(output_log_file, bucket, f"{location}/output.log")
saved_artifacts["output.log"] = f"s3://{bucket}/{location}/output.log"
# Download artifacts
if artifacts:
for name, remote_file in artifacts.items():
logger.info(f"Downloading artifact `{name}` from "
f"{remote_file}")
local_target_file = os.path.join(temp_dir, name)
session_controller.pull(
session_name=session_name,
source=remote_file,
target=local_target_file)
# Upload artifacts to s3
s3_client.upload_file(local_target_file, bucket,
f"{location}/{name}")
saved_artifacts[name] = f"s3://{bucket}/{location}/{name}"
return saved_artifacts
def find_session_by_test_name(
sdk: AnyscaleSDK,
session_controller: SessionController,
temp_dir: str,
state_json: str,
project_id: str,
test_name: str,
) -> Optional[Tuple[str, str, Dict[Any, Any]]]:
paging_token = None
while True: # Will break if paging_token is None after first search
result = sdk.search_sessions(
project_id=project_id,
sessions_query=dict(
name=dict(contains=test_name),
state_filter=["Running"],
paging=dict(count=20, paging_token=paging_token)))
for session in result.results:
logger.info(f"Found sessions {session.name}")
if not session.name.startswith(test_name):
continue
try:
session_state = get_remote_json_content(
temp_dir=temp_dir,
session_name=session.name,
remote_file=state_json,
session_controller=session_controller)
except Exception as exc:
raise RuntimeError(f"Could not get remote json content "
f"for session {session.name}") from exc
if session_state.get("test_name") == test_name:
return session.id, session.name, session_state
session_token = result.metadata.next_paging_token
if not session_token:
return None
def get_latest_running_command_id(sdk: AnyscaleSDK, session_id: str
) -> Tuple[Optional[str], Optional[bool]]:
scd_id = None
paging_token = None
success = None
while not scd_id:
result = sdk.list_session_commands(
session_id=session_id, paging_token=paging_token)
paging_token = result.metadata.next_paging_token
for cmd in result.results:
if not scd_id:
scd_id = cmd.id
completed = cmd.finished_at is not None
if completed:
if success is None:
success = True
success = success and cmd.status_code == 0
if not completed:
return cmd.id, None
return scd_id, success or False
def run_test_config(
local_dir: str,
project_id: str,
test_name: str,
test_config: Dict[Any, Any],
commit_url: str,
session_name: str = None,
smoke_test: bool = False,
no_terminate: bool = False,
kick_off_only: bool = False,
check_progress: bool = False,
upload_artifacts: bool = True,
keep_results_dir: bool = False,
app_config_id_override: Optional[str] = None,
) -> Dict[Any, Any]:
"""
Returns:
Dict with the following entries:
status (str): One of [finished, error, timeout]
command_link (str): Link to command (Anyscale web UI)
last_logs (str): Last logs (excerpt) to send to owner
artifacts (dict): Dict of artifacts
Key: Name
Value: S3 URL
"""
# Todo (mid-term): Support other cluster definitions
# (not only cluster configs)
cluster_config_rel_path = test_config["cluster"].get(
"cluster_config", None)
cluster_config = _load_config(local_dir, cluster_config_rel_path)
app_config_rel_path = test_config["cluster"].get("app_config", None)
app_config = _load_config(local_dir, app_config_rel_path)
compute_tpl_rel_path = test_config["cluster"].get("compute_template", None)
compute_tpl = _load_config(local_dir, compute_tpl_rel_path)
stop_event = multiprocessing.Event()
result_queue = multiprocessing.Queue()
if not session_name:
session_name = f"{test_name}_{int(time.time())}"
temp_dir = tempfile.mkdtemp()
# Result and state files
results_json = test_config["run"].get("results", None)
if results_json is None:
results_json = "/tmp/release_test_out.json"
state_json = test_config["run"].get("state", None)
if state_json is None:
state_json = "/tmp/release_test_state.json"
env_vars = {
"RAY_ADDRESS": os.environ.get("RAY_ADDRESS", "auto"),
"TEST_OUTPUT_JSON": results_json,
"TEST_STATE_JSON": state_json,
"IS_SMOKE_TEST": "1" if smoke_test else "0",
}
with open(os.path.join(local_dir, ".anyscale.yaml"), "wt") as f:
f.write(f"project_id: {project_id}")
os.chdir(local_dir)
# Setup interface
# Unfortunately, there currently seems to be no great way to
# transfer files with the Anyscale SDK.
# So we use the session controller instead.
sdk = AnyscaleSDK(auth_token=GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"])
session_controller = SessionController(
api_client=instantiate_api_client(
cli_token=GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"],
host=GLOBAL_CONFIG["ANYSCALE_HOST"],
),
anyscale_api_client=sdk.api_client,
)
timeout = test_config["run"].get("timeout", 1800)
if "RELEASE_OVERRIDE_TIMEOUT" in os.environ:
previous_timeout = timeout
timeout = int(os.environ.get("RELEASE_OVERRIDE_TIMEOUT", str(timeout)))
logger.warning(f"Release test timeout override: {timeout} "
f"(would have been {previous_timeout})")
# If a test is long running, timeout does not mean it failed
is_long_running = test_config["run"].get("long_running", False)
build_id_override = None
if test_config["run"].get("use_connect"):
autosuspend_mins = test_config["run"].get("autosuspend_mins", 5)
assert not kick_off_only, \
"Unsupported for running with Anyscale connect."
if app_config_id_override is not None:
logger.info(
"Using connect and an app config override, waiting until "
"build finishes so we can fetch the app config in order to "
"install its pip packages locally.")
build_id_override = wait_for_build_or_raise(
sdk, app_config_id_override)
response = sdk.get_cluster_environment_build(build_id_override)
app_config = response.result.config_json
install_app_config_packages(app_config)
install_matching_ray()
elif "autosuspend_mins" in test_config["run"]:
raise ValueError(
"'autosuspend_mins' is only supported if 'use_connect' is True.")
# Add information to results dict
def _update_results(results: Dict):
if "last_update" in results:
results["last_update_diff"] = time.time() - results["last_update"]
if smoke_test:
results["smoke_test"] = True
def _process_finished_command(session_controller: SessionController,
scd_id: str,
results: Optional[Dict] = None,
runtime: int = None,
commit_url: str = None,
session_url: str = None):
logger.info("Command finished successfully.")
if results_json:
results = results or get_remote_json_content(
temp_dir=temp_dir,
session_name=session_name,
remote_file=results_json,
session_controller=session_controller,
)
else:
results = {"passed": 1}
_update_results(results)
if scd_id:
logs = get_command_logs(session_controller, scd_id,
test_config.get("log_lines", 50))
else:
logs = "No command found to fetch logs for"
if upload_artifacts:
saved_artifacts = pull_artifacts_and_store_in_cloud(
temp_dir=temp_dir,
logs=logs, # Also save logs in cloud
session_name=session_name,
test_name=test_name,
artifacts=test_config.get("artifacts", {}),
session_controller=session_controller,
)
logger.info("Fetched results and stored on the cloud. Returning.")
else:
saved_artifacts = {}
logger.info("Usually I would have fetched the results and "
"artifacts and stored them on S3.")
# Add these metadata here to avoid changing SQL schema.
results["_runtime"] = runtime
results["_session_url"] = session_url
results["_commit_url"] = commit_url
results["_stable"] = test_config.get("stable", True)
result_queue.put(
State(
"END",
time.time(),
{
"status": "finished",
"last_logs": logs,
"results": results,
"artifacts": saved_artifacts,
},
))
# When running the test script in client mode, the finish command is a
# completed local process.
def _process_finished_client_command(returncode: int, logs: str):
if upload_artifacts:
saved_artifacts = pull_artifacts_and_store_in_cloud(
temp_dir=temp_dir,
logs=logs, # Also save logs in cloud
session_name=session_name,
test_name=test_name,
artifacts=None,
session_controller=None,
)
logger.info("Stored results on the cloud. Returning.")
else:
saved_artifacts = {}
logger.info("Usually I would have fetched the results and "
"artifacts and stored them on S3.")
if results_json:
results = get_local_json_content(local_file=results_json, )
else:
results = {
"passed": int(returncode == 0),
}
results["returncode"] = returncode
_update_results(results)
result_queue.put(
State(
"END",
time.time(),
{
"status": "finished",
"last_logs": logs,
"results": results,
"artifacts": saved_artifacts,
},
))
def _run(logger):
# These values will be set as the test runs.
session_url = None
runtime = None
anyscale.conf.CLI_TOKEN = GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"]
test_uses_ray_connect = test_config["run"].get("use_connect")
session_id = None
scd_id = None
try:
# First, look for running sessions
session_id = search_running_session(sdk, project_id, session_name)
compute_tpl_name = None
app_config_id = app_config_id_override
app_config_name = None
build_id = build_id_override
if not session_id:
logger.info("No session found.")
# Start session
session_options = dict(
name=session_name, project_id=project_id)
if cluster_config is not None:
logging.info("Starting session with cluster config")
cluster_config_str = json.dumps(cluster_config)
session_options["cluster_config"] = cluster_config_str
session_options["cloud_id"] = (
GLOBAL_CONFIG["ANYSCALE_CLOUD_ID"], )
session_options["uses_app_config"] = False
else:
logging.info("Starting session with app/compute config")
# Find/create compute template
compute_tpl_id, compute_tpl_name = \
create_or_find_compute_template(
sdk, project_id, compute_tpl)
logger.info(f"Link to compute template: "
f"{anyscale_compute_tpl_url(compute_tpl_id)}")
# Find/create app config
if app_config_id is None:
(
app_config_id,
app_config_name,
) = create_or_find_app_config(sdk, project_id,
app_config)
else:
logger.info(
f"Using override app config {app_config_id}")
app_config_name = sdk.get_app_config(
app_config_id).result.name
if build_id is None:
# We might have already retrieved the build ID when
# installing app config packages locally if using
# connect, so only get the build ID if it's not set.
build_id = wait_for_build_or_raise(sdk, app_config_id)
session_options["compute_template_id"] = compute_tpl_id
session_options["build_id"] = build_id
session_options["uses_app_config"] = True
# Start session
session_id = create_and_wait_for_session(
sdk=sdk,
stop_event=stop_event,
session_name=session_name,
session_options=session_options,
)
prepare_command = test_config["run"].get("prepare")
# Write test state json
test_state_file = os.path.join(local_dir, "test_state.json")
with open(test_state_file, "wt") as f:
json.dump({
"start_time": time.time(),
"test_name": test_name
}, f)
if prepare_command or not test_uses_ray_connect:
if test_uses_ray_connect:
logger.info("Found a prepare command, so pushing it "
"to the session.")
# Rsync up
logger.info("Syncing files to session...")
session_controller.push(
session_name=session_name,
source=None,
target=None,
config=None,
all_nodes=False,
)
logger.info("Syncing test state to session...")
session_controller.push(
session_name=session_name,
source=test_state_file,
target=state_json,
config=None,
all_nodes=False,
)
session_url = anyscale_session_url(
project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"],
session_id=session_id)
_check_stop(stop_event, "file_sync")
# Optionally run preparation command
if prepare_command:
logger.info(
f"Running preparation command: {prepare_command}")
scd_id, result = run_session_command(
sdk=sdk,
session_id=session_id,
cmd_to_run=prepare_command,
result_queue=result_queue,
env_vars=env_vars,
state_str="CMD_PREPARE")
_, _ = wait_for_session_command_to_complete(
result,
sdk=sdk,
scd_id=scd_id,
stop_event=stop_event,
state_str="CMD_PREPARE")
if test_uses_ray_connect:
script_args = test_config["run"].get("args", [])
if smoke_test:
script_args += ["--smoke-test"]
min_workers = 0
for node_type in compute_tpl["worker_node_types"]:
min_workers += node_type["min_workers"]
# Build completed, use job timeout
result_queue.put(State("CMD_RUN", time.time(), None))
returncode, logs = run_job(
cluster_name=session_name,
compute_tpl_name=compute_tpl_name,
cluster_env_name=app_config_name,
job_name=session_name,
min_workers=min_workers,
script=test_config["run"]["script"],
script_args=script_args,
env_vars=env_vars,
autosuspend=autosuspend_mins)
_process_finished_client_command(returncode, logs)
return
# Run release test command
cmd_to_run = test_config["run"]["script"] + " "
args = test_config["run"].get("args", [])
if args:
cmd_to_run += " ".join(args) + " "
if smoke_test:
cmd_to_run += " --smoke-test"
scd_id, result = run_session_command(
sdk=sdk,
session_id=session_id,
cmd_to_run=cmd_to_run,
result_queue=result_queue,
env_vars=env_vars,
state_str="CMD_RUN")
if not kick_off_only:
_, runtime = wait_for_session_command_to_complete(
result,
sdk=sdk,
scd_id=scd_id,
stop_event=stop_event,
state_str="CMD_RUN")
_process_finished_command(
session_controller=session_controller,
scd_id=scd_id,
runtime=runtime,
session_url=session_url,
commit_url=commit_url)
else:
result_queue.put(
State("END", time.time(), {
"status": "kickoff",
"last_logs": ""
}))
except (ReleaseTestTimeoutError, Exception) as e:
logger.error(e, exc_info=True)
logs = str(e)
if scd_id is not None:
try:
logs = logs + "; Command logs:" + get_command_logs(
session_controller, scd_id,
test_config.get("log_lines", 50))
except Exception as e2:
logger.error(e2, exc_info=True)
# Long running tests are "finished" successfully when
# timed out
if isinstance(e, ReleaseTestTimeoutError) and is_long_running:
_process_finished_command(
session_controller=session_controller, scd_id=scd_id)
else:
timeout_type = ""
runtime = None
if isinstance(e, CommandTimeoutError):
timeout_type = "timeout"
runtime = 0
elif (isinstance(e, PrepareCommandTimeoutError)
or isinstance(e, FileSyncTimeoutError)
or isinstance(e, SessionTimeoutError)
or isinstance(e, PrepareCommandRuntimeError)
or isinstance(e, AppConfigBuildFailure)):
timeout_type = "infra_timeout"
runtime = None
elif isinstance(e, RuntimeError):
timeout_type = "runtime_error"
runtime = 0
else:
timeout_type = "unknown timeout"
runtime = None
# Add these metadata here to avoid changing SQL schema.
results = {}
results["_runtime"] = runtime
results["_session_url"] = session_url
results["_commit_url"] = commit_url
results["_stable"] = test_config.get("stable", True)
result_queue.put(
State(
"END", time.time(), {
"status": timeout_type,
"last_logs": logs,
"results": results
}))
finally:
if no_terminate:
logger.warning(
"`no_terminate` is set to True, so the session will "
"*not* be terminated!")
else:
_cleanup_session(sdk, session_id)
def _check_progress(logger):
anyscale.conf.CLI_TOKEN = GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"]
should_terminate = False
session_id = None
scd_id = None
try:
existing_session = find_session_by_test_name(
sdk=sdk,
session_controller=session_controller,
temp_dir=temp_dir,
state_json=state_json,
project_id=project_id,
test_name=test_name)
if existing_session is None:
logger.info(f"Found no existing session for {test_name}")
result_queue.put(
State("END", time.time(), {
"status": "nosession",
"last_logs": ""
}))
return
session_id, session_name, session_state = existing_session
logger.info(f"Found existing session for {test_name}: "
f"{session_name}")
scd_id, success = get_latest_running_command_id(
sdk=sdk, session_id=session_id)
latest_result = get_remote_json_content(
temp_dir=temp_dir,
session_name=session_name,
remote_file=results_json,
session_controller=session_controller,
)
# Fetch result json and check if it has been updated recently
result_time_key = test_config["run"].get("time_key", None)
maximum_update_delay = test_config["run"].get(
"max_update_delay", None)
if result_time_key and maximum_update_delay:
last_update = latest_result.get(result_time_key, None)
if not last_update:
result_queue.put(
State(
"END", time.time(), {
"status": "error",
"last_logs": f"Test did not store "
f"{result_time_key} in the "
f"results json."
}))
return
delay = time.time() - last_update
logger.info(f"Last update was at {last_update:.2f}. "
f"This was {delay:.2f} seconds ago "
f"(maximum allowed: {maximum_update_delay})")
if delay > maximum_update_delay:
raise RuntimeError(
f"Test did not update the results json within "
f"the last {maximum_update_delay} seconds.")
if time.time() - session_state["start_time"] > timeout:
# Long running test reached timeout
logger.info(
f"Test command reached timeout after {timeout} seconds")
_process_finished_command(
session_controller=session_controller,
scd_id=scd_id,
results=latest_result)
should_terminate = True
elif success:
logger.info("All commands finished.")
_process_finished_command(
session_controller=session_controller,
scd_id=scd_id,
results=latest_result)
should_terminate = True
else:
rest_time = timeout - time.time() + session_state["start_time"]
logger.info(f"Test command should continue running "
f"for {rest_time} seconds")
result_queue.put(
State("END", time.time(), {
"status": "kickoff",
"last_logs": "Test is still running"
}))
except Exception as e:
logger.error(e, exc_info=True)
logs = str(e)
if scd_id is not None:
try:
logs = get_command_logs(session_controller, scd_id,
test_config.get("log_lines", 50))
logs += f"\n{str(e)}"
except Exception as e2:
logger.error(e2, exc_info=True)
result_queue.put(
State("END", time.time(), {
"status": "error",
"last_logs": logs
}))
should_terminate = True
finally:
if should_terminate:
logger.warning("Terminating session")
_cleanup_session(sdk, session_id)
if not check_progress:
process = multiprocessing.Process(target=_run, args=(logger, ))
else:
process = multiprocessing.Process(
target=_check_progress, args=(logger, ))
build_timeout = test_config["run"].get("build_timeout", 1800)
project_url = anyscale_project_url(
project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"])
logger.info(f"Link to project: {project_url}")
msg = f"This will now run test {test_name}."
if smoke_test:
msg += " This is a smoke test."
if is_long_running:
msg += " This is a long running test."
logger.info(msg)
logger.info(f"Starting process with timeout {timeout} "
f"(build timeout {build_timeout})")
process.start()
# The timeout time will be updated after the build finished
# Build = App config + compute template build and session start
timeout_time = time.time() + build_timeout
result = {}
while process.is_alive():
try:
state: State = result_queue.get(timeout=1)
except (Empty, TimeoutError):
if time.time() > timeout_time:
stop_event.set()
logger.warning("Process timed out.")
if not is_long_running:
logger.warning("Terminating process in 10 seconds.")
time.sleep(10)
logger.warning("Terminating process now.")
process.terminate()
else:
logger.info("Process is long running. Give 2 minutes to "
"fetch result and terminate.")
start_terminate = time.time()
while time.time(
) < start_terminate + 120 and process.is_alive():
time.sleep(1)
if process.is_alive():
logger.warning("Terminating forcefully now.")
process.terminate()
else:
logger.info("Long running results collected.")
break
continue
if not isinstance(state, State):
raise RuntimeError(f"Expected `State` object, got {result}")
if state.state == "CMD_PREPARE":
# Reset timeout after build finished
timeout_time = state.timestamp + timeout
if state.state == "CMD_RUN":
# Reset timeout after prepare command or build finished
timeout_time = state.timestamp + timeout
elif state.state == "END":
result = state.data
break
while not result_queue.empty():
state = result_queue.get_nowait()
result = state.data
logger.info("Final check if everything worked.")
try:
result.setdefault("status", "error (status not found)")
except (TimeoutError, Empty):
result = {"status": "timeout", "last_logs": "Test timed out."}
logger.info(f"Final results: {result}")
log_results_and_artifacts(result)
if not keep_results_dir:
logger.info(f"Removing results dir {temp_dir}")
shutil.rmtree(temp_dir)
else:
# Write results.json
with open(os.path.join(temp_dir, "results.json"), "wt") as fp:
json.dump(result, fp)
out_dir = os.path.expanduser(GLOBAL_CONFIG["RELEASE_RESULTS_DIR"])
logger.info(f"Moving results dir {temp_dir} to persistent location "
f"{out_dir}")
shutil.rmtree(out_dir, ignore_errors=True)
shutil.copytree(temp_dir, out_dir)
logger.info(f"Dir contents: {os.listdir(out_dir)}")
return result
def run_test(test_config_file: str,
test_name: str,
project_id: str,
commit_url: str,
category: str = "unspecified",
smoke_test: bool = False,
no_terminate: bool = False,
kick_off_only: bool = False,
check_progress: bool = False,
report: bool = True,
keep_results_dir: bool = False,
session_name: Optional[str] = None,
app_config_id_override=None) -> Dict[str, Any]:
with open(test_config_file, "rt") as f:
test_configs = yaml.safe_load(f)
test_config_dict = {}
for test_config in test_configs:
name = test_config.pop("name")
test_config_dict[name] = test_config
if test_name not in test_config_dict:
raise ValueError(
f"Test with name `{test_name}` not found in test config file "
f"at `{test_config_file}`.")
test_config = test_config_dict[test_name]
if smoke_test and "smoke_test" in test_config:
smoke_test_config = test_config.pop("smoke_test")
test_config = _deep_update(test_config, smoke_test_config)
local_dir = os.path.dirname(test_config_file)
if "local_dir" in test_config:
# local_dir is relative to test_config_file
local_dir = os.path.join(local_dir, test_config["local_dir"])
if test_config["run"].get("use_connect"):
assert not kick_off_only, \
"--kick-off-only is unsupported when running with " \
"Anyscale connect."
assert not check_progress, \
"--check is unsupported when running with Anyscale connect."
if test_config.get("artifacts", {}):
logger.error(
"Saving artifacts are not yet supported when running with "
"Anyscale connect.")
result = run_test_config(
local_dir,
project_id,
test_name,
test_config,
commit_url,
session_name=session_name,
smoke_test=smoke_test,
no_terminate=no_terminate,
kick_off_only=kick_off_only,
check_progress=check_progress,
upload_artifacts=report,
keep_results_dir=keep_results_dir,
app_config_id_override=app_config_id_override)
status = result.get("status", "invalid")
if kick_off_only:
if status != "kickoff":
raise RuntimeError("Error kicking off test.")
logger.info("Kicked off test. It's now up to the `--check` "
"part of the script to track its process.")
return {}
else:
# `--check` or no kick off only
if status == "nosession":
logger.info(f"No running session found for test {test_name}, so "
f"assuming everything is fine.")
return {}
if status == "kickoff":
logger.info(f"Test {test_name} is still running.")
return {}
last_logs = result.get("last_logs", "No logs.")
test_suite = os.path.basename(test_config_file).replace(".yaml", "")
report_kwargs = dict(
test_suite=test_suite,
test_name=test_name,
status=status,
last_logs=last_logs,
results=result.get("results", {}),
artifacts=result.get("artifacts", {}),
category=category,
)
if report:
report_result(**report_kwargs)
else:
logger.info(f"Usually I would now report the following results:\n"
f"{report_kwargs}")
if has_errored(result):
raise RuntimeError(last_logs)
return report_kwargs
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"--test-config", type=str, required=True, help="Test config file")
parser.add_argument("--test-name", type=str, help="Test name in config")
parser.add_argument(
"--ray-wheels", required=False, type=str, help="URL to ray wheels")
parser.add_argument(
"--no-terminate",
action="store_true",
default=False,
help="Don't terminate session after failure")
parser.add_argument(
"--no-report",
action="store_true",
default=False,
help="Do not report any results or upload to S3")
parser.add_argument(
"--kick-off-only",
action="store_true",
default=False,
help="Kick off only (don't wait for command to finish)")
parser.add_argument(
"--check",
action="store_true",
default=False,
help="Check (long running) status")
parser.add_argument(
"--keep-results-dir",
action="store_true",
default=False,
help="Keep results in directory (named RELEASE_RESULTS_DIR), e.g. "
"for Buildkite artifact upload.")
parser.add_argument(
"--category",
type=str,
default="unspecified",
help="Category name, e.g. `release-1.3.0` (will be saved in database)")
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
parser.add_argument(
"--session-name",
required=False,
type=str,
help="Name of the session to run this test.")
parser.add_argument(
"--app-config-id-override",
required=False,
type=str,
help=("An app config ID, which will override the test config app "
"config."))
args, _ = parser.parse_known_args()
if not GLOBAL_CONFIG["ANYSCALE_PROJECT"]:
raise RuntimeError(
"You have to set the ANYSCALE_PROJECT environment variable!")
maybe_fetch_api_token()
if args.ray_wheels:
os.environ["RAY_WHEELS"] = str(args.ray_wheels)
url = str(args.ray_wheels)
elif not args.check and not os.environ.get("RAY_WHEELS"):
url = find_ray_wheels(
GLOBAL_CONFIG["RAY_REPO"],
GLOBAL_CONFIG["RAY_BRANCH"],
GLOBAL_CONFIG["RAY_VERSION"],
)
if not url:
raise RuntimeError(f"Could not find wheels for "
f"Ray {GLOBAL_CONFIG['RAY_VERSION']}, "
f"branch {GLOBAL_CONFIG['RAY_BRANCH']}")
# RAY_COMMIT is set by find_ray_wheels
elif os.environ.get("RAY_WHEELS"):
logger.info(f"Using Ray wheels provided from URL: "
f"{os.environ.get('RAY_WHEELS')}")
url = os.environ.get("RAY_WHEELS")
populate_wheels_sanity_check(os.environ.get("RAY_COMMIT", ""))
test_config_file = os.path.abspath(os.path.expanduser(args.test_config))
result_dict = run_test(
test_config_file=test_config_file,
test_name=args.test_name,
project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"],
commit_url=url,
category=args.category,
smoke_test=args.smoke_test,
no_terminate=args.no_terminate or args.kick_off_only,
kick_off_only=args.kick_off_only,
check_progress=args.check,
report=not args.no_report,
session_name=args.session_name,
keep_results_dir=args.keep_results_dir,
app_config_id_override=args.app_config_id_override,
)
if result_dict:
# If we get a result dict, check if any alerts should be raised
from alert import SUITE_TO_FN, default_handle_result
logger.info("Checking if results are valid...")
handle_result_kwargs = result_dict.copy()
handle_result_kwargs["created_on"] = None
test_suite = handle_result_kwargs.get("test_suite", None)
test_name = handle_result_kwargs.get("test_name", None)
category = handle_result_kwargs.get("category", None)
handle_fn = SUITE_TO_FN.get(test_suite, None)
if not handle_fn:
logger.warning(f"No handle for suite {test_suite}")
alert = default_handle_result(**handle_result_kwargs)
else:
alert = handle_fn(**handle_result_kwargs)
if alert:
# If we get an alert, the test failed.
raise RuntimeError(alert)
else:
logger.info(f"No alert raised for test {test_suite}/{test_name} "
f"({category}) - the test successfully passed!")
| [
[
[
7478,
7486
],
[
69158,
69166
],
[
69236,
69244
]
],
[
[
7494,
7499
],
[
11804,
11809
],
[
17643,
17648
],
[
35635,
35640
]
],
[
[
7507,
7518
],
[
16533,
16544
]
],
[
[
7526,
7530
],
[
17143,
17147
],
[
29086,
29090
]
],
[
[
7538,
7546
],
[
10360,
10368
],
[
10430,
10438
],
[
10495,
10503
],
[
10547,
10555
],
[
10624,
10632
],
[
10676,
10684
],
[
10753,
10761
],
[
10805,
10813
],
[
17594,
17602
]
],
[
[
7554,
7561
],
[
16776,
16783
]
],
[
[
7569,
7575
],
[
17214,
17220
]
],
[
[
7583,
7587
],
[
16716,
16720
],
[
18720,
18724
],
[
18863,
18867
],
[
34833,
34837
],
[
35071,
35075
],
[
65035,
65039
],
[
47841,
47845
],
[
50250,
50254
]
],
[
[
7595,
7602
],
[
8035,
8042
],
[
8071,
8078
],
[
8095,
8102
],
[
8148,
8155
],
[
47747,
47754
],
[
48162,
48169
]
],
[
[
7610,
7625
],
[
15737,
15752
],
[
29735,
29750
],
[
31326,
31341
],
[
32283,
32298
],
[
40170,
40185
],
[
40213,
40228
],
[
61792,
61807
],
[
61874,
61889
]
],
[
[
7633,
7635
],
[
71283,
71285
],
[
71398,
71400
],
[
71872,
71874
],
[
71985,
71987
],
[
72031,
72033
],
[
72094,
72096
],
[
72152,
72154
],
[
72168,
72170
],
[
8565,
8567
],
[
14031,
14033
],
[
14101,
14103
],
[
14674,
14676
],
[
15041,
15043
],
[
15084,
15086
],
[
15663,
15665
],
[
16999,
17001
],
[
17157,
17159
],
[
25755,
25757
],
[
26006,
26008
],
[
29100,
29102
],
[
34611,
34613
],
[
35335,
35337
],
[
36039,
36041
],
[
40701,
40703
],
[
40895,
40897
],
[
40999,
41001
],
[
41618,
41620
],
[
41687,
41689
],
[
64970,
64972
],
[
65076,
65078
],
[
65377,
65379
],
[
66603,
66605
],
[
66744,
66746
],
[
68457,
68459
],
[
50140,
50142
]
],
[
[
7643,
7651
],
[
13909,
13917
]
],
[
[
7659,
7665
],
[
64889,
64895
],
[
65254,
65260
],
[
65305,
65311
]
],
[
[
7673,
7683
],
[
14531,
14541
],
[
14584,
14594
],
[
25894,
25904
],
[
26378,
26388
],
[
26452,
26462
],
[
29300,
29310
],
[
29391,
29401
],
[
29423,
29433
]
],
[
[
7691,
7694
],
[
8124,
8127
],
[
12716,
12719
],
[
14637,
14640
],
[
26131,
26134
],
[
26206,
26209
],
[
29582,
29585
]
],
[
[
7702,
7710
],
[
14052,
14060
],
[
40336,
40344
]
],
[
[
7718,
7722
],
[
11389,
11393
],
[
23173,
23177
],
[
25313,
25317
],
[
27455,
27459
],
[
27711,
27715
],
[
28623,
28627
],
[
30495,
30499
],
[
30621,
30625
],
[
30909,
30913
],
[
31864,
31868
],
[
32490,
32494
],
[
32616,
32620
],
[
33092,
33096
],
[
33376,
33380
],
[
40305,
40309
],
[
62651,
62655
],
[
62847,
62851
],
[
63095,
63099
],
[
63414,
63418
],
[
63452,
63456
],
[
63557,
63561
],
[
43157,
43161
],
[
45242,
45246
],
[
46612,
46616
],
[
50296,
50300
],
[
52713,
52717
],
[
54497,
54501
],
[
56638,
56642
],
[
57761,
57765
],
[
58964,
58968
],
[
59292,
59296
],
[
59774,
59778
],
[
60563,
60567
],
[
60797,
60801
],
[
61444,
61448
]
],
[
[
7741,
7746
],
[
62810,
62815
],
[
64632,
64637
]
],
[
[
7766,
7769
],
[
12621,
12624
],
[
16682,
16685
],
[
16687,
16690
],
[
17321,
17324
],
[
17326,
17329
],
[
17512,
17515
],
[
17517,
17520
],
[
17539,
17542
],
[
17544,
17547
],
[
21163,
21166
],
[
21168,
21171
],
[
23674,
23677
],
[
23679,
23682
],
[
25739,
25742
],
[
25744,
25747
],
[
29815,
29818
],
[
29820,
29823
],
[
35251,
35254
],
[
35256,
35259
],
[
36718,
36721
],
[
36723,
36726
],
[
39242,
39245
],
[
39247,
39250
],
[
38884,
38887
],
[
38889,
38892
],
[
65938,
65941
]
],
[
[
7771,
7775
],
[
16677,
16681
],
[
16929,
16933
],
[
17316,
17320
],
[
17507,
17511
],
[
17534,
17538
],
[
19719,
19723
],
[
21158,
21162
],
[
23669,
23673
],
[
25734,
25738
],
[
28841,
28845
],
[
29810,
29814
],
[
31383,
31387
],
[
35246,
35250
],
[
36713,
36717
],
[
39237,
39241
],
[
38879,
38883
],
[
65928,
65932
],
[
43071,
43075
],
[
43431,
43435
]
],
[
[
7777,
7785
],
[
8404,
8412
],
[
15318,
15326
],
[
16920,
16928
],
[
16902,
16910
],
[
20623,
20631
],
[
21213,
21221
],
[
21228,
21236
],
[
23724,
23732
],
[
23739,
23747
],
[
26629,
26637
],
[
26611,
26619
],
[
34407,
34415
],
[
34887,
34895
],
[
35237,
35245
],
[
36688,
36696
],
[
38071,
38079
],
[
38086,
38094
],
[
39210,
39218
],
[
65861,
65869
],
[
43422,
43430
]
],
[
[
7787,
7792
],
[
21207,
21212
],
[
23718,
23723
],
[
28890,
28895
],
[
31454,
31459
],
[
36697,
36702
],
[
38065,
38070
]
],
[
[
7794,
7798
],
[
14010,
14014
],
[
28820,
28824
]
],
[
[
7807,
7811
],
[
17266,
17270
],
[
66011,
66015
]
],
[
[
7820,
7828
]
],
[
[
7836,
7849
],
[
12735,
12743
],
[
46982,
46990
],
[
57133,
57141
]
],
[
[
7875,
7897
],
[
41333,
41355
]
],
[
[
7950,
7967
],
[
33807,
33824
],
[
34450,
34467
],
[
35291,
35308
],
[
36567,
36584
],
[
41295,
41312
],
[
43313,
43330
]
],
[
[
8013,
8024
],
[
20290,
20301
],
[
20544,
20555
],
[
21099,
21110
],
[
23611,
23622
],
[
26555,
26566
],
[
29702,
29713
],
[
31193,
31204
],
[
32163,
32174
],
[
36526,
36537
],
[
37997,
38008
],
[
41209,
41220
]
],
[
[
8026,
8032
],
[
8055,
8061
],
[
8337,
8343
],
[
71910,
71916
],
[
72949,
72955
],
[
73384,
73390
],
[
73704,
73710
],
[
11265,
11271
],
[
11552,
11558
],
[
14834,
14840
],
[
15130,
15136
],
[
19623,
19629
],
[
19972,
19978
],
[
20245,
20251
],
[
20665,
20671
],
[
20944,
20950
],
[
21557,
21563
],
[
22247,
22253
],
[
22463,
22469
],
[
22943,
22949
],
[
23454,
23460
],
[
23902,
23908
],
[
24436,
24442
],
[
24669,
24675
],
[
25089,
25095
],
[
25586,
25592
],
[
27040,
27046
],
[
27511,
27517
],
[
27573,
27579
],
[
27765,
27771
],
[
28279,
28285
],
[
29008,
29014
],
[
29861,
29867
],
[
30025,
30031
],
[
30209,
30215
],
[
30424,
30430
],
[
30963,
30969
],
[
31584,
31590
],
[
31779,
31785
],
[
33146,
33152
],
[
34504,
34510
],
[
34936,
34942
],
[
35914,
35920
],
[
37143,
37149
],
[
41753,
41759
],
[
42316,
42322
],
[
61835,
61841
],
[
61941,
61947
],
[
62118,
62124
],
[
62349,
62355
],
[
62371,
62377
],
[
62924,
62930
],
[
63022,
63028
],
[
63130,
63136
],
[
63255,
63261
],
[
63638,
63644
],
[
63778,
63784
],
[
64484,
64490
],
[
64716,
64722
],
[
64833,
64839
],
[
65142,
65148
],
[
65348,
65354
],
[
67142,
67148
],
[
67875,
67881
],
[
68115,
68121
],
[
68305,
68311
],
[
68900,
68906
],
[
43626,
43632
],
[
44658,
44664
],
[
44784,
44790
],
[
46017,
46023
],
[
46131,
46137
]
],
[
[
8085,
8092
],
[
8305,
8312
],
[
8355,
8362
]
],
[
[
8136,
8145
],
[
8326,
8335
]
],
[
[
8370,
8384
],
[
8643,
8657
],
[
8768,
8782
],
[
8892,
8906
],
[
8955,
8969
],
[
9093,
9107
],
[
9152,
9166
],
[
9213,
9227
],
[
9332,
9346
],
[
9398,
9412
],
[
9539,
9553
],
[
9613,
9627
],
[
9690,
9704
],
[
9829,
9843
],
[
10067,
10081
],
[
10223,
10237
]
],
[
[
8604,
8617
],
[
71085,
71098
],
[
71471,
71484
],
[
71510,
71523
],
[
71551,
71564
],
[
71710,
71723
],
[
71784,
71797
],
[
72331,
72344
],
[
11499,
11512
],
[
11766,
11779
],
[
12821,
12834
],
[
13024,
13037
],
[
13222,
13235
],
[
13424,
13437
],
[
17184,
17197
],
[
17707,
17720
],
[
29127,
29140
],
[
30147,
30160
],
[
31717,
31730
],
[
35451,
35464
],
[
35505,
35518
],
[
41232,
41245
],
[
41379,
41392
],
[
41433,
41446
],
[
62079,
62092
],
[
65095,
65108
],
[
19202,
19215
],
[
19297,
19310
],
[
19365,
19378
],
[
47008,
47021
],
[
48019,
48032
],
[
51339,
51352
],
[
57159,
57172
]
],
[
[
10859,
10867
],
[
27498,
27506
],
[
27936,
27944
],
[
30538,
30546
],
[
31130,
31138
],
[
32533,
32541
],
[
33308,
33316
]
],
[
[
10873,
10889
],
[
11444,
11460
]
],
[
[
10900,
10925
],
[
19103,
19128
],
[
32648,
32673
],
[
33912,
33937
]
],
[
[
11467,
11488
],
[
71227,
71248
]
],
[
[
12098,
12124
],
[
33624,
33650
],
[
55802,
55828
]
],
[
[
12157,
12180
],
[
12233,
12256
],
[
12297,
12320
],
[
12360,
12383
],
[
12430,
12453
],
[
54638,
54661
],
[
55200,
55223
]
],
[
[
12213,
12232
],
[
16304,
16323
],
[
55742,
55761
]
],
[
[
12276,
12296
],
[
16166,
16186
],
[
55681,
55701
]
],
[
[
12340,
12359
],
[
16023,
16042
],
[
55479,
55498
]
],
[
[
12403,
12429
],
[
15872,
15898
],
[
55614,
55640
]
],
[
[
12501,
12522
],
[
27227,
27248
],
[
27313,
27334
],
[
28073,
28094
],
[
28448,
28469
],
[
55869,
55890
]
],
[
[
12555,
12560
],
[
31847,
31852
],
[
62758,
62763
],
[
63902,
63907
],
[
45196,
45201
],
[
46566,
46571
],
[
52696,
52701
],
[
54484,
54489
],
[
56600,
56605
],
[
57748,
57753
],
[
58922,
58927
],
[
60784,
60789
],
[
61431,
61436
]
],
[
[
12768,
12788
],
[
62038,
62058
]
],
[
[
12954,
12974
],
[
30106,
30126
],
[
31676,
31696
],
[
51286,
51306
]
],
[
[
13161,
13185
],
[
48541,
48565
]
],
[
[
13364,
13393
],
[
27109,
27138
],
[
27629,
27658
],
[
28171,
28200
],
[
28559,
28588
]
],
[
[
13562,
13571
],
[
13851,
13860
],
[
14994,
15003
]
],
[
[
13790,
13802
],
[
14937,
14949
]
],
[
[
13953,
13971
],
[
14797,
14815
]
],
[
[
14713,
14728
],
[
71442,
71457
]
],
[
[
15281,
15309
],
[
72065,
72093
]
],
[
[
15713,
15724
],
[
30860,
30871
],
[
32946,
32957
],
[
33034,
33045
],
[
51433,
51444
]
],
[
[
16461,
16473
],
[
16578,
16590
],
[
66541,
66553
]
],
[
[
16662,
16672
],
[
21522,
21532
],
[
23868,
23878
]
],
[
[
16860,
16872
],
[
39822,
39834
],
[
39962,
39974
],
[
40106,
40118
]
],
[
[
17296,
17307
],
[
69023,
69034
]
],
[
[
17404,
17417
],
[
68843,
68856
]
],
[
[
19685,
19710
],
[
64761,
64786
]
],
[
[
20268,
20284
],
[
57057,
57073
],
[
61712,
61728
]
],
[
[
20516,
20538
],
[
47248,
47270
]
],
[
[
21053,
21084
],
[
23215,
23246
],
[
48352,
48383
]
],
[
[
23571,
23596
],
[
25355,
25380
],
[
48819,
48844
]
],
[
[
25694,
25721
],
[
42773,
42800
]
],
[
[
25970,
25990
],
[
42821,
42841
]
],
[
[
26526,
26549
],
[
42566,
42589
],
[
49485,
49508
]
],
[
[
28664,
28671
],
[
52768,
52775
]
],
[
[
29660,
29687
],
[
49789,
49816
]
],
[
[
31168,
31187
],
[
51705,
51724
],
[
53633,
53652
]
],
[
[
32049,
32085
],
[
52028,
52064
],
[
53936,
53972
]
],
[
[
33770,
33786
],
[
44067,
44083
],
[
54860,
54876
],
[
61127,
61143
]
],
[
[
34311,
34334
],
[
37319,
37342
],
[
43730,
43753
],
[
58233,
58256
]
],
[
[
34852,
34874
],
[
46299,
46321
]
],
[
[
35090,
35123
],
[
44312,
44345
],
[
45714,
45747
]
],
[
[
36486,
36511
],
[
57321,
57346
]
],
[
[
37962,
37991
],
[
58125,
58154
]
],
[
[
38768,
38783
],
[
67283,
67298
]
],
[
[
65425,
65433
],
[
72225,
72233
]
],
[
[
69149,
69155
],
[
69278,
69284
],
[
69378,
69384
],
[
69455,
69461
],
[
69556,
69562
],
[
69713,
69719
],
[
69871,
69877
],
[
70040,
70046
],
[
70180,
70186
],
[
70405,
70411
],
[
70581,
70587
],
[
70686,
70692
],
[
70833,
70839
],
[
71047,
71053
]
],
[
[
71037,
71041
],
[
71258,
71262
],
[
71314,
71318
],
[
71349,
71353
],
[
71379,
71383
],
[
72187,
72191
],
[
72296,
72300
],
[
72407,
72411
],
[
72441,
72445
],
[
72479,
72483
],
[
72500,
72504
],
[
72542,
72546
],
[
72585,
72589
],
[
72616,
72620
],
[
72653,
72657
],
[
72697,
72701
],
[
72751,
72755
]
],
[
[
71043,
71044
]
],
[
[
71339,
71342
],
[
72385,
72388
]
],
[
[
71436,
71439
],
[
71606,
71609
],
[
72385,
72388
]
],
[
[
72025,
72028
],
[
72385,
72388
]
],
[
[
72133,
72149
],
[
72260,
72276
]
],
[
[
72211,
72222
],
[
72794,
72805
],
[
73029,
73040
]
],
[
[
72905,
72916
],
[
73312,
73323
]
],
[
[
72918,
72939
],
[
73456,
73477
]
],
[
[
73006,
73026
],
[
73056,
73076
],
[
73120,
73140
],
[
73185,
73205
],
[
73248,
73268
],
[
73480,
73500
],
[
73548,
73568
]
],
[
[
73107,
73117
],
[
73328,
73338
],
[
73422,
73432
],
[
73744,
73754
]
],
[
[
73173,
73182
],
[
73757,
73766
]
],
[
[
73237,
73245
],
[
73798,
73806
]
],
[
[
73300,
73309
],
[
73361,
73370
],
[
73536,
73545
]
],
[
[
73448,
73453
],
[
73582,
73587
],
[
73671,
73676
]
],
[
[
73528,
73533
],
[
73582,
73587
],
[
73671,
73676
]
]
] |
import argparse
import pysam
# parse the read group strings from a bam/sam header
# return array of read group strings
def read_groups_from_bam(bam_filename, use_libraries=False):
bam = pysam.AlignmentFile(bam_filename, "rb")
header = bam.header
results = {}
if 'RG' in header:
read_groups = header['RG']
if use_libraries:
field = 'LB'
else:
field = 'ID'
#print(read_groups)
for read_group in read_groups:
results[read_group[field]] = 1
#read_group['SM'] = sample
#print(read_group)
results_without_duplicates = [key for (key, ignored) in results.items()]
sorted_read_groups = sorted(results_without_duplicates)
return sorted_read_groups
def read_groups_and_libraries_from_bam(bam_filename):
bam = pysam.AlignmentFile(bam_filename, "rb")
header = bam.header
results = {}
if 'RG' in header:
read_groups = header['RG']
#print(read_groups)
for read_group in read_groups:
read_group_id = read_group['ID']
read_group_library = read_group['LB']
results[read_group_id] = read_group_library
return results
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Show the read groups in a bam.")
parser.add_argument('-p', "--pulldown", help="report read groups colon-delimited for pulldown", action='store_true')
parser.add_argument('-l', "--libraries", help="report libraries instead of read groups", action='store_true')
parser.add_argument('-b', "--both", help="report read groups and libraries", action='store_true')
parser.add_argument("bam", help="bam for read groups")
args = parser.parse_args()
bam_filename = args.bam
if args.both:
read_groups_to_libraries = read_groups_and_libraries_from_bam(bam_filename)
for read_group, library in read_groups_to_libraries.items():
print("{}\t{}".format(read_group, library))
else:
read_groups = read_groups_from_bam(bam_filename, args.libraries)
if args.pulldown:
print(':'.join(read_groups))
else:
for read_group in read_groups:
print(read_group)
| [
[
[
7,
15
],
[
1109,
1117
]
],
[
[
23,
28
],
[
188,
193
],
[
744,
749
]
],
[
[
124,
144
],
[
1847,
1867
]
],
[
[
687,
721
],
[
1665,
1699
]
],
[
[
1100,
1106
],
[
1180,
1186
],
[
1299,
1305
],
[
1410,
1416
],
[
1509,
1515
],
[
1574,
1580
]
],
[
[
1567,
1571
],
[
1612,
1616
],
[
1625,
1629
],
[
1882,
1886
],
[
1903,
1907
]
],
[
[
1597,
1609
],
[
1700,
1712
],
[
1868,
1880
]
],
[
[
1638,
1662
],
[
1743,
1767
]
],
[
[
1720,
1730
],
[
1802,
1812
]
],
[
[
1732,
1739
],
[
1814,
1821
]
],
[
[
1833,
1844
],
[
1936,
1947
],
[
1979,
1990
]
],
[
[
1965,
1975
],
[
2002,
2012
]
]
] |
"""add initial models
Revision ID: 18b9d421fbde
Revises:
Create Date: 2022-03-19 12:36:16.067795
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "18b9d421fbde"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"stats",
sa.Column("user", sa.BigInteger(), nullable=False),
sa.Column("count", sa.BigInteger(), nullable=True),
sa.PrimaryKeyConstraint("user"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("stats")
# ### end Alembic commands ###
| [
[
[
111,
127
],
[
409,
411
],
[
427,
429
],
[
469,
471
],
[
488,
490
],
[
529,
531
]
],
[
[
149,
151
],
[
367,
369
],
[
692,
694
]
],
[
[
194,
202
]
],
[
[
220,
233
]
],
[
[
241,
254
]
],
[
[
262,
272
]
],
[
[
286,
293
]
],
[
[
609,
618
]
]
] |
# -*- coding: utf-8 -*-
from common.base_test import BaseTest
import lemoncheesecake.api as lcc
from lemoncheesecake.matching import check_that, has_length
SUITE = {
"description": "Creating many contracts in a single transaction"
}
@lcc.prop("main", "type")
@lcc.tags("scenarios", "many_contracts_in_one_trx")
@lcc.suite("Check scenario 'Create many contracts in a single transaction'")
class CreateManyContractsInOneTrx(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.__registration_api_identifier = None
self.contract = self.get_byte_code("piggy", "code")
self.echo_acc0 = None
def setup_suite(self):
super().setup_suite()
self._connect_to_echopy_lib()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
self.__registration_api_identifier = self.get_identifier("registration")
lcc.log_info(
"API identifiers are: database='{}', registration='{}'".format(
self.__database_api_identifier, self.__registration_api_identifier
)
)
self.echo_acc0 = self.get_account_id(
self.accounts[0], self.__database_api_identifier, self.__registration_api_identifier
)
lcc.log_info("Echo account is '{}'".format(self.echo_acc0))
def teardown_suite(self):
self._disconnect_to_echopy_lib()
super().teardown_suite()
@lcc.test(
"The scenario describes creating many contracts in a single transaction "
"on the Echo network, written in Solidity."
)
def create_many_contracts_in_one_trx_scenario(self, get_random_integer_up_to_fifty):
number_of_contracts = get_random_integer_up_to_fifty
lcc.set_step("Create '{}' 'Piggy' contracts in the Echo network".format(number_of_contracts))
operation = self.echo_ops.get_contract_create_operation(
echo=self.echo, registrar=self.echo_acc0, bytecode=self.contract
)
collected_operation = self.collect_operations(operation, self.__database_api_identifier)
list_operations = []
for i in range(number_of_contracts):
list_operations.append(collected_operation)
broadcast_result = self.echo_ops.broadcast(echo=self.echo, list_operations=list_operations, log_broadcast=False)
lcc.set_step("Check that all contracts created in the Echo network")
check_that(
"in 'broadcast_result' are 'operation_results'",
broadcast_result.get("trx").get("operation_results"), has_length(number_of_contracts)
)
| [
[
[
53,
61
],
[
430,
438
]
],
[
[
70,
96
],
[
242,
245
],
[
268,
271
],
[
320,
323
],
[
1535,
1538
],
[
783,
786
],
[
1006,
1009
],
[
1364,
1367
],
[
1844,
1847
],
[
2447,
2450
]
],
[
[
134,
144
],
[
2524,
2534
]
],
[
[
146,
156
],
[
2663,
2673
]
],
[
[
158,
163
]
],
[
[
402,
429
]
]
] |
from sbaas.analysis.analysis_base import *
class stage00_query(base_analysis):
def get_structureFile_standards(self,met_id_I):
'''Querry structure file and extension from metabolomics standards'''
try:
structure = self.session.query(standards.structure_file,
standards.structure_file_extention).filter(
standards.met_id.like(met_id_I)).all();
struct_file_O = '';
struct_file_ext_O = '';
if structure:
struct_file_O = structure[0][0];
struct_file_ext_O = structure[0][1];
else:
print('no structure file found for ' + met_id_I);
exit(-1);
return struct_file_O, struct_file_ext_O
except SQLAlchemyError as e:
print(e);
def get_exactMassAndFormula_standards(self,met_id_I):
'''Querry exact mass and formula from metabolomics standards'''
try:
massformula = self.session.query(standards.exactmass,
standards.formula).filter(
standards.met_id.like(met_id_I)).all();
mass_O = '';
formula_O = '';
if massformula:
mass_O = massformula[0][0];
formula_O = massformula[0][1];
else:
print('no mass and formula found for ' + met_id_I);
exit(-1);
return mass_O, formula_O
except SQLAlchemyError as e:
print(e);
def get_Q1AndQ3MassAndMode_MSComponents(self,met_id_I):
'''Querry q1 mass, q3 mass, and ms_mode from ms_components'''
try:
mscomponents = self.session.query(MS_components.q1_mass,
MS_components.q3_mass,
MS_components.ms_mode).filter(
MS_components.met_id.like(met_id_I)).order_by(
MS_components.ms_mode.asc(),
MS_components.q1_mass.asc(),
MS_components.q3_mass.asc()).all();
mscomponents_O = [];
for msc in mscomponents:
mscomponents_1 = {};
mscomponents_1['met_id'] = met_id_I;
mscomponents_1['q1_mass'] = msc.q1_mass;
mscomponents_1['q3_mass'] = msc.q3_mass;
mscomponents_1['ms_mode'] = msc.ms_mode;
mscomponents_O.append(mscomponents_1);
return mscomponents_O;
except SQLAlchemyError as e:
print(e);
def get_row_MSComponents(self,met_id_I,ms_mode_I,ms_methodtype_I):
'''Querry row from ms_components by met_id, ms_mode, and ms_methodtype'''
try:
mscomponents = self.session.query(MS_components.q1_mass,MS_components.q3_mass,
MS_components.ms3_mass,MS_components.met_name,MS_components.dp,
MS_components.ep,MS_components.ce,MS_components.cxp,MS_components.af,
MS_components.quantifier,MS_components.ms_mode,MS_components.ion_intensity_rank,
MS_components.ion_abundance,MS_components.precursor_formula,
MS_components.product_ion_reference,MS_components.product_formula,
MS_components.production_ion_notes,MS_components.met_id,
MS_components.external_reference,MS_components.q1_mass_units,
MS_components.q3_mass_units,MS_components.ms3_mass_units,
MS_components.threshold_units,MS_components.dp_units,
MS_components.ep_units,MS_components.ce_units,
MS_components.cxp_units,MS_components.af_units,
MS_components.ms_group,MS_components.threshold,
MS_components.dwell_weight,MS_components.component_name,
MS_components.ms_include,MS_components.ms_is,MS_components.precursor_fragment,
MS_components.product_fragment,MS_components.precursor_exactmass,
MS_components.product_exactmass,MS_components.ms_methodtype).filter(
MS_components.met_id.like(met_id_I),
MS_components.ms_mode.like(ms_mode_I),
MS_components.ms_methodtype.like(ms_methodtype_I)).all();
mscomponents_O = [];
if not mscomponents:
print('bad query for row in ms_components: ')
print('met_id: ' + met_id_I + ', ms_mode_I: ' + ms_mode_I + ', ms_methodtype_I: ' + ms_methodtype_I);
exit(-1)
for msc in mscomponents:
mscomponents_1 = {};
mscomponents_1["q1_mass"] = msc.q1_mass;
mscomponents_1["q3_mass"] = msc.q3_mass;
mscomponents_1["ms3_mass"] = msc.ms3_mass;
mscomponents_1["met_name"] = msc.met_name;
mscomponents_1["dp"] = msc.dp;
mscomponents_1["ep"] = msc.ep;
mscomponents_1["ce"] = msc.ce;
mscomponents_1["cxp"] = msc.cxp;
mscomponents_1["af"] = msc.af;
mscomponents_1["quantifier"] = msc.quantifier;
mscomponents_1["ms_mode"] = msc.ms_mode;
mscomponents_1["ion_intensity_rank"] = msc.ion_intensity_rank;
mscomponents_1["ion_abundance"] = msc.ion_abundance;
mscomponents_1["precursor_formula"] = msc.precursor_formula;
mscomponents_1["product_ion_reference"] = msc.product_ion_reference;
mscomponents_1["product_formula"] = msc.product_formula;
mscomponents_1["production_ion_notes"] = msc.production_ion_notes;
mscomponents_1["met_id"] = msc.met_id;
mscomponents_1["external_reference"] = msc.external_reference;
mscomponents_1["q1_mass_units"] = msc.q1_mass_units;
mscomponents_1["q3_mass_units"] = msc.q3_mass_units;
mscomponents_1["ms3_mass_units"] = msc.ms3_mass_units;
mscomponents_1["threshold_units"] = msc.threshold_units;
mscomponents_1["dp_units"] = msc.dp_units;
mscomponents_1["ep_units"] = msc.ep_units;
mscomponents_1["ce_units"] = msc.ce_units;
mscomponents_1["cxp_units"] = msc.cxp_units;
mscomponents_1["af_units"] = msc.af_units;
mscomponents_1["ms_group"] = msc.ms_group;
mscomponents_1["threshold"] = msc.threshold;
mscomponents_1["dwell_weight"] = msc.dwell_weight;
mscomponents_1["component_name"] = msc.component_name;
mscomponents_1["ms_include"] = msc.ms_include;
mscomponents_1["ms_is"] = msc.ms_is;
mscomponents_1["precursor_fragment"] = msc.precursor_fragment;
mscomponents_1["product_fragment"] = msc.product_fragment;
mscomponents_1["precursor_exactmass"] = msc.precursor_exactmass;
mscomponents_1["product_exactmass"] = msc.product_exactmass;
mscomponents_1["ms_methodtype"] = msc.ms_methodtype;
mscomponents_O.append(mscomponents_1);
return mscomponents_O;
except SQLAlchemyError as e:
print(e);
def get_row_MSComponents_metIDAndFormula(self,met_id_I,precursor_formula_I,
product_formula_I,ms_methodtype_I):
'''Querry row from ms_components by met_id, precursor_formula, product_formula'''
try:
mscomponents = self.session.query(MS_components.q1_mass,MS_components.q3_mass,
MS_components.ms3_mass,MS_components.met_name,MS_components.dp,
MS_components.ep,MS_components.ce,MS_components.cxp,MS_components.af,
MS_components.quantifier,MS_components.ms_mode,MS_components.ion_intensity_rank,
MS_components.ion_abundance,MS_components.precursor_formula,
MS_components.product_ion_reference,MS_components.product_formula,
MS_components.production_ion_notes,MS_components.met_id,
MS_components.external_reference,MS_components.q1_mass_units,
MS_components.q3_mass_units,MS_components.ms3_mass_units,
MS_components.threshold_units,MS_components.dp_units,
MS_components.ep_units,MS_components.ce_units,
MS_components.cxp_units,MS_components.af_units,
MS_components.ms_group,MS_components.threshold,
MS_components.dwell_weight,MS_components.component_name,
MS_components.ms_include,MS_components.ms_is,MS_components.precursor_fragment,
MS_components.product_fragment,MS_components.precursor_exactmass,
MS_components.product_exactmass,MS_components.ms_methodtype).filter(
MS_components.met_id.like(met_id_I),
MS_components.precursor_formula.like(precursor_formula_I),
MS_components.product_formula.like(product_formula_I),
MS_components.ms_methodtype.like(ms_methodtype_I)).all();
mscomponents_O = [];
if not mscomponents:
print('bad query for row in ms_components: ')
print('met_id: ' + met_id_I + ', precursor_formula_I: ' + precursor_formula_I + ', product_formula_I: ' + product_formula_I + ', ms_methodtype_I: ' + ms_methodtype_I);
exit(-1)
for msc in mscomponents:
mscomponents_1 = {};
mscomponents_1["q1_mass"] = msc.q1_mass;
mscomponents_1["q3_mass"] = msc.q3_mass;
mscomponents_1["ms3_mass"] = msc.ms3_mass;
mscomponents_1["met_name"] = msc.met_name;
mscomponents_1["dp"] = msc.dp;
mscomponents_1["ep"] = msc.ep;
mscomponents_1["ce"] = msc.ce;
mscomponents_1["cxp"] = msc.cxp;
mscomponents_1["af"] = msc.af;
mscomponents_1["quantifier"] = msc.quantifier;
mscomponents_1["ms_mode"] = msc.ms_mode;
mscomponents_1["ion_intensity_rank"] = msc.ion_intensity_rank;
mscomponents_1["ion_abundance"] = msc.ion_abundance;
mscomponents_1["precursor_formula"] = msc.precursor_formula;
mscomponents_1["product_ion_reference"] = msc.product_ion_reference;
mscomponents_1["product_formula"] = msc.product_formula;
mscomponents_1["production_ion_notes"] = msc.production_ion_notes;
mscomponents_1["met_id"] = msc.met_id;
mscomponents_1["external_reference"] = msc.external_reference;
mscomponents_1["q1_mass_units"] = msc.q1_mass_units;
mscomponents_1["q3_mass_units"] = msc.q3_mass_units;
mscomponents_1["ms3_mass_units"] = msc.ms3_mass_units;
mscomponents_1["threshold_units"] = msc.threshold_units;
mscomponents_1["dp_units"] = msc.dp_units;
mscomponents_1["ep_units"] = msc.ep_units;
mscomponents_1["ce_units"] = msc.ce_units;
mscomponents_1["cxp_units"] = msc.cxp_units;
mscomponents_1["af_units"] = msc.af_units;
mscomponents_1["ms_group"] = msc.ms_group;
mscomponents_1["threshold"] = msc.threshold;
mscomponents_1["dwell_weight"] = msc.dwell_weight;
mscomponents_1["component_name"] = msc.component_name;
mscomponents_1["ms_include"] = msc.ms_include;
mscomponents_1["ms_is"] = msc.ms_is;
mscomponents_1["precursor_fragment"] = msc.precursor_fragment;
mscomponents_1["product_fragment"] = msc.product_fragment;
mscomponents_1["precursor_exactmass"] = msc.precursor_exactmass;
mscomponents_1["product_exactmass"] = msc.product_exactmass;
mscomponents_1["ms_methodtype"] = msc.ms_methodtype;
mscomponents_O.append(mscomponents_1);
return mscomponents_O[0];
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_sampleDescription(self,experiment_id_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.istechnical != True).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_batchFileInfo_experimentID(self,experiment_id_I,sample_type_I):
'''Query data from experiment and sample for batch file'''
try:
data = self.session.query(experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_type.like(sample_type_I),
sample.sample_id.like(sample_description.sample_id)).group_by(
experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).order_by(
experiment.id.asc(),
sample.sample_dilution.desc(),
sample_description.sample_name_abbreviation.asc(),
#sample.sample_name.asc(),
sample_description.sample_replicate.asc(),
sample_description.sample_desc.desc()).all();
#.order_by(
# experiment.id.asc(),
# sample.sample_dilution.desc(),
# sample_description.sample_replicate.asc(),
# sample_description.sample_desc.desc(),
# sample.sample_name.asc()).all();
data_O = [];
if data:
for d in data:
data_tmp = {};
data_tmp['id']=d.id;
data_tmp['sample_name']=d.sample_name;
data_tmp['sample_type']=d.sample_type;
data_tmp['acquisition_method_id']=d.acquisition_method_id;
data_tmp['sample_dilution']=d.sample_dilution;
data_tmp['sample_replicate']=d.sample_replicate;
data_O.append(data_tmp);
else:
print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I);
return data_O;
except SQLAlchemyError as e:
print(e);
def get_batchFileInfo_experimentIDAndExpType(self,experiment_id_I,sample_type_I,exp_type_I):
'''Query data from experiment and sample for batch file'''
try:
data = self.session.query(experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).filter(
experiment.id.like(experiment_id_I),
experiment.exp_type_id==exp_type_I,
experiment.sample_name.like(sample.sample_name),
sample.sample_type.like(sample_type_I),
sample.sample_id.like(sample_description.sample_id)).group_by(
experiment.id,
sample.sample_name,
experiment.acquisition_method_id,
sample.sample_dilution,
sample.sample_type,
sample_description.sample_replicate,
sample_description.sample_desc,
sample_description.sample_name_abbreviation).order_by(
experiment.id.asc(),
sample.sample_dilution.desc(),
sample_description.sample_name_abbreviation.asc(),
#sample.sample_name.asc(),
sample_description.sample_replicate.asc(),
sample_description.sample_desc.desc()).all();
#.order_by(
# experiment.id.asc(),
# sample.sample_dilution.desc(),
# sample_description.sample_replicate.asc(),
# sample_description.sample_desc.desc(),
# sample.sample_name.asc()).all();
data_O = [];
if data:
for d in data:
data_tmp = {};
data_tmp['id']=d.id;
data_tmp['sample_name']=d.sample_name;
data_tmp['sample_type']=d.sample_type;
data_tmp['acquisition_method_id']=d.acquisition_method_id;
data_tmp['sample_dilution']=d.sample_dilution;
data_tmp['sample_replicate']=d.sample_replicate;
data_O.append(data_tmp);
else:
print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I);
return data_O;
except SQLAlchemyError as e:
print(e);
def delete_sample_experimentIDAndSampleID_experiment(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from experiment'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(experiment).filter(
experiment.id.like(d['experiment_id']),
sample.sample_id.like(d['sample_id']),
experiment.sample_name.like(sample.sample_name)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentIDAndSampleID_sample(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample).filter(
experiment.id.like(d['experiment_id']),
sample.sample_id.like(d['sample_id']),
experiment.sample_name.like(sample.sample_name)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentIDAndSampleID_sampleDescription(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample_description'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample_description).filter(
experiment.id.like(d['experiment_id']),
sample.sample_id.like(d['sample_id']),
experiment.sample_name.like(sample.sample_name),
sample_description.sample_id.like(sample.sample_id)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentIDAndSampleID_sampleStorage(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample_storage'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample_storage).filter(
experiment.id.like(d['experiment_id']),
sample.sample_id.like(d['sample_id']),
experiment.sample_name.like(sample.sample_name),
sample_storage.sample_id.like(sample.sample_id)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentIDAndSampleID_samplePhysiologicalParameters(self,dataListDelete_I):
'''Delete specific samples from an experiment by their sample ID from sample_physiologicalparameters'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample_physiologicalParameters).filter(
experiment.id.like(d['experiment_id']),
sample.sample_id.like(d['sample_id']),
experiment.sample_name.like(sample.sample_name),
sample_physiologicalParameters.sample_id.like(sample.sample_id)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def get_calibratorIDAndLevel_sampleNameAndSampleType_sample(self,sample_name_I,sample_type_I):
'''Querry calibrator id and level from metabolomics sample'''
try:
calibratorInfo = self.session.query(sample.calibrator_id,
sample.calibrator_level).filter(
sample.sample_name.like(sample_name_I),
sample.sample_type.like(sample_type_I)).all();
id_O = None;
level_O = None;
if calibratorInfo:
id_O = calibratorInfo[0][0];
level_O = calibratorInfo[0][1];
else:
print('no calibrator id nor level found for sample_name/sample_type ' + sample_name_I + ' / ' + sample_type_I);
return id_O, level_O
except SQLAlchemyError as e:
print(e);
def get_calibratorConcentrationAndUnit_metIDAndCalibratorIDAndLevel_calibratorConcentrations(self, met_id_I, calibrator_id_I, calibrator_level_I):
'''Querry calibrator id and level from metabolomics sample'''
concentration_O = 0.0;
unit_O = None;
# 1. query the calibrator id for the metabolite
try:
calibratorID = self.session.query(
calibrator2mix.calibrator_id).filter(
mix2met_id.met_id.like(met_id_I),
mix2met_id.mix_id.like(calibrator2mix.mix_id)).all();
calibrator_id_O = None;
if calibratorID:
calibrator_id_O = calibratorID[0][0];
else:
print('no calibrator ID nor unit found for met_id ' + met_id_I);
except SQLAlchemyError as e:
print(e);
# 2. check if the calibrator id matches
if calibrator_id_O == calibrator_id_I:
# 3. query the concentration and units
try:
calibratorInfo = self.session.query(
calibrator_concentrations.calibrator_concentration,
calibrator_concentrations.concentration_units).filter(
calibrator_concentrations.met_id.like(met_id_I),
calibrator_concentrations.calibrator_level == calibrator_level_I).all();
if calibratorInfo:
concentration_O = calibratorInfo[0][0];
unit_O = calibratorInfo[0][1];
else:
print('no calibrator concentration nor unit found for met_id/calibrator_id/calibrator_level ' + met_id_I + ' / ' + str(calibrator_id_I) + ' / ' + str(calibrator_level_I));
return concentration_O, unit_O
except SQLAlchemyError as e:
print(e);
else:
return concentration_O, unit_O
def get_acqusitionMethod(self,lc_method_I,ms_mode_I,ms_methodtype_I):
'''Querry acqusition method (i.e., join tables lc_elution and ms_components)'''
try:
mscomponents = self.session.query(MS_components.component_name,
MS_components.met_id,
MS_components.met_name,
MS_components.q1_mass,
MS_components.q3_mass,
MS_components.dp,
MS_components.ep,
MS_components.ce,
MS_components.cxp,
MS_components.precursor_formula,
MS_components.product_formula,
MS_components.quantifier,
MS_components.ms_group,
MS_components.threshold,
MS_components.dwell_weight,
lc_elution.rt,
lc_elution.ms_window,
lc_elution.rt_units,
lc_elution.window_units).filter(
lc_elution.lc_method_id.like(lc_method_I),
MS_components.ms_mode.like(ms_mode_I),
MS_components.ms_methodtype.like(ms_methodtype_I),
MS_components.met_id.like(lc_elution.met_id),
MS_components.ms_include).group_by( # query only components that are included in the method
MS_components.component_name,
MS_components.met_id,
MS_components.met_name,
MS_components.q1_mass,
MS_components.q3_mass,
MS_components.dp,
MS_components.ep,
MS_components.ce,
MS_components.cxp,
MS_components.precursor_formula,
MS_components.product_formula,
MS_components.quantifier,
MS_components.ms_group,
MS_components.threshold,
MS_components.dwell_weight,
lc_elution.rt,
lc_elution.ms_window,
lc_elution.rt_units,
lc_elution.window_units).order_by(
lc_elution.rt.asc(),
MS_components.component_name.asc()).all();
mscomponents_O = [];
if not mscomponents:
print('bad query for row in ms_components: ')
print('lc_method_I: ' + lc_method_I + ', ms_mode_I: ' + ms_mode_I + ', ms_methodtype_I: ' + ms_methodtype_I);
exit(-1)
for msc in mscomponents:
mscomponents_1 = {};
mscomponents_1["q1_mass"] = msc.q1_mass;
mscomponents_1["q3_mass"] = msc.q3_mass;
mscomponents_1["met_name"] = msc.met_name;
mscomponents_1["dp"] = msc.dp;
mscomponents_1["ep"] = msc.ep;
mscomponents_1["ce"] = msc.ce;
mscomponents_1["cxp"] = msc.cxp;
mscomponents_1["quantifier"] = msc.quantifier;
mscomponents_1["met_id"] = msc.met_id;
mscomponents_1["ms_group"] = msc.ms_group;
mscomponents_1["threshold"] = msc.threshold;
mscomponents_1["dwell_weight"] = msc.dwell_weight;
mscomponents_1["component_name"] = msc.component_name;
mscomponents_1["rt"] = msc.rt;
mscomponents_1["ms_window"] = msc.ms_window;
mscomponents_1["rt_units"] = msc.rt_units;
mscomponents_1["window_units"] = msc.window_units;
mscomponents_O.append(mscomponents_1);
return mscomponents_O;
except SQLAlchemyError as e:
print(e);
def delete_sample_experimentID_experiment(self,dataListDelete_I):
'''Delete samples from an experiment from experiment'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(experiment).filter(
experiment.id.like(d['experiment_id'])).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def delete_sample_experimentID_sample(self,dataListDelete_I):
'''Delete an experiment from sample'''
deletes = [];
for d in dataListDelete_I:
try:
delete = self.session.query(sample).filter(
experiment.id.like(d['experiment_id']),
experiment.sample_name.like(sample.sample_name)).delete(
synchronize_session=False);
if delete == 0:
print('row not found')
print(d);
deletes.append(delete);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def get_nMaxBioReps_experimentIDAndSampleName_sampleDescription(self,experiment_id_I,sample_name_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample_name_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.istechnical != True).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_id_I),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_desc.like('Broth'),
sample_description.istechnical != True).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_experimentIDAndSampleNameAbbreviation_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I),
sample_description.sample_desc.like('Broth')
#sample_description.istechnical != True
).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_nMaxBioReps_experimentIDAndSampleNameAbbreviationAndExpType_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I,exp_type_I):
'''Query the maximum number of biological replicates corresponding to a given experiment'''
try:
bioReps = self.session.query(sample_description.sample_replicate).filter(
experiment.id.like(experiment_id_I),
experiment.exp_type_id==exp_type_I,
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id),
sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I),
sample_description.sample_desc.like('Broth')
#sample_description.istechnical != True
).group_by(
sample_description.sample_replicate).order_by(
sample_description.sample_replicate.desc()).all();
maxBioReps_O = 0;
if bioReps:
maxBioReps_O = max(bioReps[0]);
else:
print('no biological replicates found for experiment ' + experiment_id_I);
exit(-1);
return maxBioReps_O;
except SQLAlchemyError as e:
print(e);
def get_sampleIDs_experimentID_experiment(self,experiment_id_I):
'''Querry sample IDs that are used from the experiment'''
try:
sample_names = self.session.query(sample.sample_id).filter(
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name)).group_by(
sample.sample_id).order_by(
sample.sample_id.asc()).all();
sample_names_O = [];
for sn in sample_names: sample_names_O.append(sn.sample_id);
return sample_names_O;
except SQLAlchemyError as e:
print(e);
def get_sampleNameAbbreviation_experimentIDAndSampleID(self,experiment_id_I,sample_id_I):
'''Querry sample name abbreviation from the experiment'''
try:
sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter(
sample.sample_id.like(sample_id_I),
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id)).group_by(
sample_description.sample_name_abbreviation).order_by(
sample_description.sample_name_abbreviation.asc()).all();
sample_name_abbreviations_O = None;
sample_name_abbreviations_O = sample_name_abbreviations[0][0];
return sample_name_abbreviations_O;
except SQLAlchemyError as e:
print(e);
def get_sampleNameAbbreviation_experimentIDAndSampleName(self,experiment_id_I,sample_name_I):
'''Querry sample name abbreviation from the experiment'''
try:
sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter(
sample.sample_name.like(sample_name_I),
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_description.sample_id)).group_by(
sample_description.sample_name_abbreviation).order_by(
sample_description.sample_name_abbreviation.asc()).all();
sample_name_abbreviations_O = None;
sample_name_abbreviations_O = sample_name_abbreviations[0][0];
return sample_name_abbreviations_O;
except SQLAlchemyError as e:
print(e);
def get_sampleLabelAndBoxAndPos_experimentIDAndExperimentTypeID_sampleStorage(self,experiment_id_I,exp_type_id_I):
'''Querry sample name abbreviation from the experiment'''
try:
data = self.session.query(sample_storage.sample_id,
sample_storage.sample_label,
sample_storage.box,
sample_storage.pos).filter(
experiment.exp_type_id == exp_type_id_I,
experiment.id.like(experiment_id_I),
experiment.sample_name.like(sample.sample_name),
sample.sample_id.like(sample_storage.sample_id)).group_by(
sample_storage.sample_id,
sample_storage.sample_label,
sample_storage.box,
sample_storage.pos).order_by(
sample_storage.sample_id.asc()).all();
sampleStorage_O = [];
if data:
for d in data:
sampleStorage_O.append({'sample_id':d.sample_id,
'sample_label':d.sample_label,
'box':d.box,
'pos':d.pos});
return sampleStorage_O;
except SQLAlchemyError as e:
print(e); | [
[
[
41,
42
],
[
64,
77
],
[
266,
275
],
[
312,
321
],
[
376,
385
],
[
790,
805
],
[
1023,
1032
],
[
1064,
1073
],
[
1111,
1120
],
[
1488,
1503
],
[
1722,
1735
],
[
1765,
1778
],
[
1808,
1821
],
[
1859,
1872
],
[
1926,
1939
],
[
1975,
1988
],
[
2024,
2037
],
[
2497,
2512
],
[
2754,
2767
],
[
2776,
2789
],
[
2819,
2832
],
[
2842,
2855
],
[
2865,
2878
],
[
2903,
2916
],
[
2920,
2933
],
[
2937,
2950
],
[
2955,
2968
],
[
2993,
3006
],
[
3018,
3031
],
[
3040,
3053
],
[
3094,
3107
],
[
3122,
3135
],
[
3175,
3188
],
[
3211,
3224
],
[
3262,
3275
],
[
3297,
3310
],
[
3339,
3352
],
[
3372,
3385
],
[
3421,
3434
],
[
3449,
3462
],
[
3499,
3512
],
[
3529,
3542
],
[
3573,
3586
],
[
3596,
3609
],
[
3640,
3653
],
[
3664,
3677
],
[
3708,
3721
],
[
3731,
3744
],
[
3776,
3789
],
[
3803,
3816
],
[
3853,
3866
],
[
3878,
3891
],
[
3898,
3911
],
[
3952,
3965
],
[
3983,
3996
],
[
4038,
4051
],
[
4070,
4083
],
[
4127,
4140
],
[
4184,
4197
],
[
4243,
4256
],
[
7275,
7290
],
[
7630,
7643
],
[
7652,
7665
],
[
7695,
7708
],
[
7718,
7731
],
[
7741,
7754
],
[
7779,
7792
],
[
7796,
7809
],
[
7813,
7826
],
[
7831,
7844
],
[
7869,
7882
],
[
7894,
7907
],
[
7916,
7929
],
[
7970,
7983
],
[
7998,
8011
],
[
8051,
8064
],
[
8087,
8100
],
[
8138,
8151
],
[
8173,
8186
],
[
8215,
8228
],
[
8248,
8261
],
[
8297,
8310
],
[
8325,
8338
],
[
8375,
8388
],
[
8405,
8418
],
[
8449,
8462
],
[
8472,
8485
],
[
8516,
8529
],
[
8540,
8553
],
[
8584,
8597
],
[
8607,
8620
],
[
8652,
8665
],
[
8679,
8692
],
[
8729,
8742
],
[
8754,
8767
],
[
8774,
8787
],
[
8828,
8841
],
[
8859,
8872
],
[
8914,
8927
],
[
8946,
8959
],
[
9003,
9016
],
[
9060,
9073
],
[
9139,
9152
],
[
9214,
9227
],
[
12315,
12330
],
[
12579,
12597
],
[
12644,
12654
],
[
12701,
12711
],
[
12729,
12735
],
[
12770,
12776
],
[
12792,
12810
],
[
12843,
12861
],
[
12913,
12931
],
[
12980,
12998
],
[
13317,
13332
],
[
13556,
13566
],
[
13591,
13597
],
[
13631,
13641
],
[
13685,
13691
],
[
13729,
13735
],
[
13769,
13787
],
[
13826,
13844
],
[
13878,
13896
],
[
13951,
13961
],
[
14008,
14018
],
[
14036,
14042
],
[
14077,
14083
],
[
14137,
14143
],
[
14159,
14177
],
[
14220,
14230
],
[
14255,
14261
],
[
14295,
14305
],
[
14349,
14355
],
[
14393,
14399
],
[
14433,
14451
],
[
14490,
14508
],
[
14542,
14560
],
[
14617,
14627
],
[
14658,
14664
],
[
14709,
14727
],
[
14827,
14845
],
[
14890,
14908
],
[
15935,
15950
],
[
16194,
16204
],
[
16229,
16235
],
[
16269,
16279
],
[
16323,
16329
],
[
16367,
16373
],
[
16407,
16425
],
[
16464,
16482
],
[
16516,
16534
],
[
16589,
16599
],
[
16646,
16656
],
[
16702,
16712
],
[
16730,
16736
],
[
16771,
16777
],
[
16831,
16837
],
[
16853,
16871
],
[
16914,
16924
],
[
16949,
16955
],
[
16989,
16999
],
[
17043,
17049
],
[
17087,
17093
],
[
17127,
17145
],
[
17184,
17202
],
[
17236,
17254
],
[
17311,
17321
],
[
17352,
17358
],
[
17403,
17421
],
[
17521,
17539
],
[
17584,
17602
],
[
18629,
18644
],
[
18965,
18975
],
[
19009,
19019
],
[
19073,
19079
],
[
19136,
19146
],
[
19164,
19170
],
[
19409,
19424
],
[
19772,
19778
],
[
19812,
19822
],
[
19876,
19882
],
[
19939,
19949
],
[
19967,
19973
],
[
20212,
20227
],
[
20598,
20616
],
[
20650,
20660
],
[
20714,
20720
],
[
20777,
20787
],
[
20805,
20811
],
[
20850,
20868
],
[
20884,
20890
],
[
21127,
21142
],
[
21505,
21519
],
[
21553,
21563
],
[
21617,
21623
],
[
21680,
21690
],
[
21708,
21714
],
[
21753,
21767
],
[
21783,
21789
],
[
22026,
22041
],
[
22436,
22466
],
[
22500,
22510
],
[
22564,
22570
],
[
22627,
22637
],
[
22655,
22661
],
[
22700,
22730
],
[
22746,
22752
],
[
22989,
23004
],
[
23299,
23305
],
[
23341,
23347
],
[
23394,
23400
],
[
23454,
23460
],
[
23873,
23888
],
[
24329,
24343
],
[
24387,
24397
],
[
24441,
24451
],
[
24464,
24478
],
[
24729,
24744
],
[
25013,
25038
],
[
25089,
25114
],
[
25168,
25193
],
[
25241,
25266
],
[
25741,
25756
],
[
26069,
26082
],
[
26120,
26133
],
[
26163,
26176
],
[
26208,
26221
],
[
26252,
26265
],
[
26296,
26309
],
[
26335,
26348
],
[
26374,
26387
],
[
26413,
26426
],
[
26453,
26466
],
[
26507,
26520
],
[
26559,
26572
],
[
26606,
26619
],
[
26651,
26664
],
[
26697,
26710
],
[
26746,
26756
],
[
26782,
26792
],
[
26825,
26835
],
[
26867,
26877
],
[
26920,
26930
],
[
26983,
26996
],
[
27042,
27055
],
[
27113,
27126
],
[
27139,
27149
],
[
27179,
27192
],
[
27291,
27304
],
[
27342,
27355
],
[
27385,
27398
],
[
27430,
27443
],
[
27474,
27487
],
[
27518,
27531
],
[
27557,
27570
],
[
27596,
27609
],
[
27635,
27648
],
[
27675,
27688
],
[
27729,
27742
],
[
27781,
27794
],
[
27828,
27841
],
[
27873,
27886
],
[
27919,
27932
],
[
27968,
27978
],
[
28004,
28014
],
[
28047,
28057
],
[
28089,
28099
],
[
28144,
28154
],
[
28185,
28198
],
[
29660,
29675
],
[
29957,
29967
],
[
30001,
30011
],
[
30265,
30280
],
[
30576,
30582
],
[
30616,
30626
],
[
30680,
30690
],
[
30708,
30714
],
[
30953,
30968
],
[
31294,
31312
],
[
31359,
31369
],
[
31416,
31426
],
[
31480,
31490
],
[
31508,
31514
],
[
31549,
31555
],
[
31571,
31589
],
[
31622,
31640
],
[
31692,
31710
],
[
31759,
31777
],
[
32096,
32111
],
[
32396,
32414
],
[
32461,
32471
],
[
32518,
32528
],
[
32546,
32552
],
[
32587,
32593
],
[
32643,
32649
],
[
32665,
32683
],
[
32716,
32734
],
[
32782,
32800
],
[
32852,
32870
],
[
32919,
32937
],
[
33256,
33271
],
[
33585,
33603
],
[
33650,
33660
],
[
33707,
33717
],
[
33735,
33741
],
[
33776,
33782
],
[
33798,
33816
],
[
33849,
33867
],
[
33947,
33965
],
[
34104,
34122
],
[
34171,
34189
],
[
34508,
34523
],
[
34858,
34876
],
[
34923,
34933
],
[
34980,
34990
],
[
35036,
35046
],
[
35064,
35070
],
[
35105,
35111
],
[
35127,
35145
],
[
35178,
35196
],
[
35276,
35294
],
[
35433,
35451
],
[
35500,
35518
],
[
35837,
35852
],
[
36083,
36089
],
[
36129,
36139
],
[
36186,
36196
],
[
36214,
36220
],
[
36265,
36271
],
[
36313,
36319
],
[
36500,
36515
],
[
36777,
36795
],
[
36850,
36856
],
[
36906,
36916
],
[
36963,
36973
],
[
36991,
36997
],
[
37032,
37038
],
[
37054,
37072
],
[
37115,
37133
],
[
37190,
37208
],
[
37434,
37449
],
[
37715,
37733
],
[
37788,
37794
],
[
37848,
37858
],
[
37905,
37915
],
[
37933,
37939
],
[
37974,
37980
],
[
37996,
38014
],
[
38057,
38075
],
[
38132,
38150
],
[
38376,
38391
],
[
38657,
38671
],
[
38703,
38717
],
[
38752,
38766
],
[
38792,
38806
],
[
38840,
38850
],
[
38901,
38911
],
[
38958,
38968
],
[
38986,
38992
],
[
39027,
39033
],
[
39049,
39063
],
[
39106,
39120
],
[
39152,
39166
],
[
39201,
39215
],
[
39241,
39255
],
[
39291,
39305
],
[
39655,
39670
]
],
[
[
50,
63
]
]
] |
import io
import os
import re
import pwd
import grp
import sys
import json
import uuid
import time
import glob
import base64
import socket
import hashlib
import decimal
import logging
import tarfile
import zipfile
import binascii
import calendar
import tempfile
import threading
import subprocess
import six
import shutil
import requests
import dns.resolver
import functools
from io import BytesIO
from contextlib import closing
from datetime import datetime
from six import with_metaclass
from six.moves import cStringIO as StringIO
from six.moves.urllib.parse import urlparse
from multiprocessing.dummy import Pool
from localstack import config
from localstack.config import DEFAULT_ENCODING
from localstack.constants import ENV_DEV
from localstack.utils import bootstrap
from localstack.utils.bootstrap import FuncThread
# arrays for temporary files and resources
TMP_FILES = []
TMP_THREADS = []
TMP_PROCESSES = []
# cache clean variables
CACHE_CLEAN_TIMEOUT = 60 * 5
CACHE_MAX_AGE = 60 * 60
CACHE_FILE_PATTERN = os.path.join(tempfile.gettempdir(), '_random_dir_', 'cache.*.json')
last_cache_clean_time = {'time': 0}
mutex_clean = threading.Semaphore(1)
# misc. constants
TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S'
TIMESTAMP_FORMAT_MILLIS = '%Y-%m-%dT%H:%M:%S.%fZ'
CODEC_HANDLER_UNDERSCORE = 'underscore'
# chunk size for file downloads
DOWNLOAD_CHUNK_SIZE = 1024 * 1024
# set up logger
LOG = logging.getLogger(__name__)
# flag to indicate whether we've received and processed the stop signal
INFRA_STOPPED = False
# generic cache object
CACHE = {}
# lock for creating certificate files
SSL_CERT_LOCK = threading.RLock()
class CustomEncoder(json.JSONEncoder):
""" Helper class to convert JSON documents with datetime, decimals, or bytes. """
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
if isinstance(o, datetime):
return str(o)
if isinstance(o, six.binary_type):
return to_str(o)
try:
return super(CustomEncoder, self).default(o)
except Exception:
return None
class ShellCommandThread(FuncThread):
""" Helper class to run a shell command in a background thread. """
def __init__(self, cmd, params={}, outfile=None, env_vars={}, stdin=False,
quiet=True, inherit_cwd=False, inherit_env=True):
self.cmd = cmd
self.process = None
self.outfile = outfile or os.devnull
self.stdin = stdin
self.env_vars = env_vars
self.inherit_cwd = inherit_cwd
self.inherit_env = inherit_env
FuncThread.__init__(self, self.run_cmd, params, quiet=quiet)
def run_cmd(self, params):
def convert_line(line):
line = to_str(line or '')
return '%s\r\n' % line.strip()
def filter_line(line):
""" Return True if this line should be filtered, i.e., not printed """
return '(Press CTRL+C to quit)' in line
try:
self.process = run(self.cmd, asynchronous=True, stdin=self.stdin, outfile=self.outfile,
env_vars=self.env_vars, inherit_cwd=self.inherit_cwd, inherit_env=self.inherit_env)
if self.outfile:
if self.outfile == subprocess.PIPE:
# get stdout/stderr from child process and write to parent output
streams = ((self.process.stdout, sys.stdout), (self.process.stderr, sys.stderr))
for instream, outstream in streams:
for line in iter(instream.readline, None):
# `line` should contain a newline at the end as we're iterating,
# hence we can safely break the loop if `line` is None or empty string
if line in [None, '', b'']:
break
if not (line and line.strip()) and self.is_killed():
break
line = convert_line(line)
if filter_line(line):
continue
outstream.write(line)
outstream.flush()
self.process.wait()
else:
self.process.communicate()
except Exception as e:
if self.process and not self.quiet:
LOG.warning('Shell command error "%s": %s' % (e, self.cmd))
if self.process and not self.quiet and self.process.returncode != 0:
LOG.warning('Shell command exit code "%s": %s' % (self.process.returncode, self.cmd))
def is_killed(self):
if not self.process:
return True
if INFRA_STOPPED:
return True
# Note: Do NOT import "psutil" at the root scope, as this leads
# to problems when importing this file from our test Lambdas in Docker
# (Error: libc.musl-x86_64.so.1: cannot open shared object file)
import psutil
return not psutil.pid_exists(self.process.pid)
def stop(self, quiet=False):
# Note: Do NOT import "psutil" at the root scope, as this leads
# to problems when importing this file from our test Lambdas in Docker
# (Error: libc.musl-x86_64.so.1: cannot open shared object file)
import psutil
if not self.process:
LOG.warning("No process found for command '%s'" % self.cmd)
return
parent_pid = self.process.pid
try:
parent = psutil.Process(parent_pid)
for child in parent.children(recursive=True):
child.kill()
parent.kill()
self.process = None
except Exception:
if not quiet:
LOG.warning('Unable to kill process with pid %s' % parent_pid)
class JsonObject(object):
""" Generic JSON serializable object for simplified subclassing """
def to_json(self, indent=None):
return json.dumps(self,
default=lambda o: ((float(o) if o % 1 > 0 else int(o))
if isinstance(o, decimal.Decimal) else o.__dict__),
sort_keys=True, indent=indent)
def apply_json(self, j):
if isinstance(j, str):
j = json.loads(j)
self.__dict__.update(j)
def to_dict(self):
return json.loads(self.to_json())
@classmethod
def from_json(cls, j):
j = JsonObject.as_dict(j)
result = cls()
result.apply_json(j)
return result
@classmethod
def from_json_list(cls, l):
return [cls.from_json(j) for j in l]
@classmethod
def as_dict(cls, obj):
if isinstance(obj, dict):
return obj
return obj.to_dict()
def __str__(self):
return self.to_json()
def __repr__(self):
return self.__str__()
class CaptureOutput(object):
""" A context manager that captures stdout/stderr of the current thread. Use it as follows:
with CaptureOutput() as c:
...
print(c.stdout(), c.stderr())
"""
orig_stdout = sys.stdout
orig_stderr = sys.stderr
orig___stdout = sys.__stdout__
orig___stderr = sys.__stderr__
CONTEXTS_BY_THREAD = {}
class LogStreamIO(io.StringIO):
def write(self, s):
if isinstance(s, str) and hasattr(s, 'decode'):
s = s.decode('unicode-escape')
return super(CaptureOutput.LogStreamIO, self).write(s)
def __init__(self):
self._stdout = self.LogStreamIO()
self._stderr = self.LogStreamIO()
def __enter__(self):
# Note: import werkzeug here (not at top of file) to allow dependency pruning
from werkzeug.local import LocalProxy
ident = self._ident()
if ident not in self.CONTEXTS_BY_THREAD:
self.CONTEXTS_BY_THREAD[ident] = self
self._set(LocalProxy(self._proxy(sys.stdout, 'stdout')),
LocalProxy(self._proxy(sys.stderr, 'stderr')),
LocalProxy(self._proxy(sys.__stdout__, 'stdout')),
LocalProxy(self._proxy(sys.__stderr__, 'stderr')))
return self
def __exit__(self, type, value, traceback):
ident = self._ident()
removed = self.CONTEXTS_BY_THREAD.pop(ident, None)
if not self.CONTEXTS_BY_THREAD:
# reset pointers
self._set(self.orig_stdout, self.orig_stderr, self.orig___stdout, self.orig___stderr)
# get value from streams
removed._stdout.flush()
removed._stderr.flush()
out = removed._stdout.getvalue()
err = removed._stderr.getvalue()
# close handles
removed._stdout.close()
removed._stderr.close()
removed._stdout = out
removed._stderr = err
def _set(self, out, err, __out, __err):
sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__ = (out, err, __out, __err)
def _proxy(self, original_stream, type):
def proxy():
ident = self._ident()
ctx = self.CONTEXTS_BY_THREAD.get(ident)
if ctx:
return ctx._stdout if type == 'stdout' else ctx._stderr
return original_stream
return proxy
def _ident(self):
return threading.currentThread().ident
def stdout(self):
return self._stdout.getvalue() if hasattr(self._stdout, 'getvalue') else self._stdout
def stderr(self):
return self._stderr.getvalue() if hasattr(self._stderr, 'getvalue') else self._stderr
# ----------------
# UTILITY METHODS
# ----------------
def synchronized(lock=None):
"""
Synchronization decorator as described in
http://blog.dscpl.com.au/2014/01/the-missing-synchronized-decorator.html.
"""
def _decorator(wrapped):
@functools.wraps(wrapped)
def _wrapper(*args, **kwargs):
with lock:
return wrapped(*args, **kwargs)
return _wrapper
return _decorator
def is_string(s, include_unicode=True, exclude_binary=False):
if isinstance(s, six.binary_type) and exclude_binary:
return False
if isinstance(s, str):
return True
if include_unicode and isinstance(s, six.text_type):
return True
return False
def is_string_or_bytes(s):
return is_string(s) or isinstance(s, six.string_types) or isinstance(s, bytes)
def is_base64(s):
regex = r'^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$'
return is_string(s) and re.match(regex, s)
def md5(string):
m = hashlib.md5()
m.update(to_bytes(string))
return m.hexdigest()
def in_docker():
return config.in_docker()
def has_docker():
try:
run('docker ps')
return True
except Exception:
return False
def get_docker_container_names():
return bootstrap.get_docker_container_names()
def is_port_open(port_or_url, http_path=None, expect_success=True, protocols=['tcp']):
port = port_or_url
host = 'localhost'
protocol = 'http'
protocols = protocols if isinstance(protocols, list) else [protocols]
if isinstance(port, six.string_types):
url = urlparse(port_or_url)
port = url.port
host = url.hostname
protocol = url.scheme
nw_protocols = []
nw_protocols += ([socket.SOCK_STREAM] if 'tcp' in protocols else [])
nw_protocols += ([socket.SOCK_DGRAM] if 'udp' in protocols else [])
for nw_protocol in nw_protocols:
with closing(socket.socket(socket.AF_INET, nw_protocol)) as sock:
sock.settimeout(1)
if nw_protocol == socket.SOCK_DGRAM:
try:
if port == 53:
dnshost = '127.0.0.1' if host == 'localhost' else host
resolver = dns.resolver.Resolver()
resolver.nameservers = [dnshost]
resolver.timeout = 1
resolver.lifetime = 1
answers = resolver.query('google.com', 'A')
assert len(answers) > 0
else:
sock.sendto(bytes(), (host, port))
sock.recvfrom(1024)
except Exception:
return False
elif nw_protocol == socket.SOCK_STREAM:
result = sock.connect_ex((host, port))
if result != 0:
return False
if 'tcp' not in protocols or not http_path:
return True
url = '%s://%s:%s%s' % (protocol, host, port, http_path)
try:
response = safe_requests.get(url)
return not expect_success or response.status_code < 400
except Exception:
return False
def wait_for_port_open(port, http_path=None, expect_success=True, retries=10, sleep_time=0.5):
""" Ping the given network port until it becomes available (for a given number of retries).
If 'http_path' is set, make a GET request to this path and assert a non-error response. """
def check():
if not is_port_open(port, http_path=http_path, expect_success=expect_success):
raise Exception()
return retry(check, sleep=sleep_time, retries=retries)
def get_free_tcp_port():
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp.bind(('', 0))
addr, port = tcp.getsockname()
tcp.close()
return port
def get_service_protocol():
return 'https' if config.USE_SSL else 'http'
def timestamp(time=None, format=TIMESTAMP_FORMAT):
if not time:
time = datetime.utcnow()
if isinstance(time, six.integer_types + (float, )):
time = datetime.fromtimestamp(time)
return time.strftime(format)
def retry(function, retries=3, sleep=1, sleep_before=0, **kwargs):
raise_error = None
if sleep_before > 0:
time.sleep(sleep_before)
for i in range(0, retries + 1):
try:
return function(**kwargs)
except Exception as error:
raise_error = error
time.sleep(sleep)
raise raise_error
def dump_thread_info():
for t in threading.enumerate():
print(t)
print(run("ps aux | grep 'node\\|java\\|python'"))
def merge_recursive(source, destination):
for key, value in source.items():
if isinstance(value, dict):
# get node or create one
node = destination.setdefault(key, {})
merge_recursive(value, node)
else:
if not isinstance(destination, dict):
LOG.warning('Destination for merging %s=%s is not dict: %s' %
(key, value, destination))
destination[key] = value
return destination
def merge_dicts(*dicts, **kwargs):
""" Merge all dicts in `*dicts` into a single dict, and return the result. If any of the entries
in `*dicts` is None, and `default` is specified as keyword argument, then return `default`. """
result = {}
for d in dicts:
if d is None and 'default' in kwargs:
return kwargs['default']
if d:
result.update(d)
return result
def recurse_object(obj, func, path=''):
""" Recursively apply `func` to `obj` (may be a list, dict, or other object). """
obj = func(obj, path=path)
if isinstance(obj, list):
for i in range(len(obj)):
tmp_path = '%s[%s]' % (path or '.', i)
obj[i] = recurse_object(obj[i], func, tmp_path)
elif isinstance(obj, dict):
for k, v in obj.items():
tmp_path = '%s%s' % ((path + '.') if path else '', k)
obj[k] = recurse_object(v, func, tmp_path)
return obj
def base64_to_hex(b64_string):
return binascii.hexlify(base64.b64decode(b64_string))
def obj_to_xml(obj):
""" Return an XML representation of the given object (dict, list, or primitive).
Does NOT add a common root element if the given obj is a list.
Does NOT work for nested dict structures. """
if isinstance(obj, list):
return ''.join([obj_to_xml(o) for o in obj])
if isinstance(obj, dict):
return ''.join(['<{k}>{v}</{k}>'.format(k=k, v=obj_to_xml(v)) for (k, v) in obj.items()])
return str(obj)
def now_utc():
return mktime(datetime.utcnow())
def now():
return mktime(datetime.now())
def mktime(timestamp):
return calendar.timegm(timestamp.timetuple())
def mkdir(folder):
if not os.path.exists(folder):
try:
os.makedirs(folder)
except OSError as err:
# Ignore rare 'File exists' race conditions.
if err.errno != 17:
raise
def ensure_readable(file_path, default_perms=None):
if default_perms is None:
default_perms = 0o644
try:
with open(file_path, 'rb'):
pass
except Exception:
LOG.info('Updating permissions as file is currently not readable: %s' % file_path)
os.chmod(file_path, default_perms)
def chown_r(path, user):
""" Recursive chown """
uid = pwd.getpwnam(user).pw_uid
gid = grp.getgrnam(user).gr_gid
os.chown(path, uid, gid)
for root, dirs, files in os.walk(path):
for dirname in dirs:
os.chown(os.path.join(root, dirname), uid, gid)
for filename in files:
os.chown(os.path.join(root, filename), uid, gid)
def chmod_r(path, mode):
""" Recursive chmod """
os.chmod(path, mode)
for root, dirnames, filenames in os.walk(path):
for dirname in dirnames:
os.chmod(os.path.join(root, dirname), mode)
for filename in filenames:
os.chmod(os.path.join(root, filename), mode)
def rm_rf(path):
"""
Recursively removes a file or directory
"""
if not path or not os.path.exists(path):
return
# Running the native command can be an order of magnitude faster in Alpine on Travis-CI
if is_alpine():
try:
return run('rm -rf "%s"' % path)
except Exception:
pass
# Make sure all files are writeable and dirs executable to remove
chmod_r(path, 0o777)
# check if the file is either a normal file, or, e.g., a fifo
exists_but_non_dir = os.path.exists(path) and not os.path.isdir(path)
if os.path.isfile(path) or exists_but_non_dir:
os.remove(path)
else:
shutil.rmtree(path)
def cp_r(src, dst):
"""Recursively copies file/directory"""
if os.path.isfile(src):
shutil.copy(src, dst)
else:
shutil.copytree(src, dst)
def download(url, path, verify_ssl=True):
"""Downloads file at url to the given path"""
# make sure we're creating a new session here to
# enable parallel file downloads during installation!
s = requests.Session()
r = s.get(url, stream=True, verify=verify_ssl)
# check status code before attempting to read body
if r.status_code >= 400:
raise Exception('Failed to download %s, response code %s' % (url, r.status_code))
total = 0
try:
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
LOG.debug('Starting download from %s to %s (%s bytes)' % (url, path, r.headers.get('content-length')))
with open(path, 'wb') as f:
for chunk in r.iter_content(DOWNLOAD_CHUNK_SIZE):
total += len(chunk)
if chunk: # filter out keep-alive new chunks
f.write(chunk)
LOG.debug('Writing %s bytes (total %s) to %s' % (len(chunk), total, path))
else:
LOG.debug('Empty chunk %s (total %s) from %s' % (chunk, total, url))
f.flush()
os.fsync(f)
if os.path.getsize(path) == 0:
LOG.warning('Zero bytes downloaded from %s, retrying' % url)
download(url, path, verify_ssl)
return
LOG.debug('Done downloading %s, response code %s, total bytes %d' % (url, r.status_code, total))
finally:
LOG.debug('Cleaning up file handles for download of %s' % url)
r.close()
s.close()
def parse_chunked_data(data):
""" Parse the body of an HTTP message transmitted with chunked transfer encoding. """
data = (data or '').strip()
chunks = []
while data:
length = re.match(r'^([0-9a-zA-Z]+)\r\n.*', data)
if not length:
break
length = length.group(1).lower()
length = int(length, 16)
data = data.partition('\r\n')[2]
chunks.append(data[:length])
data = data[length:].strip()
return ''.join(chunks)
def is_number(s):
try:
float(s) # for int, long and float
return True
except (TypeError, ValueError):
return False
def is_mac_os():
return bootstrap.is_mac_os()
def is_linux():
return bootstrap.is_linux()
def is_alpine():
try:
if '_is_alpine_' not in CACHE:
CACHE['_is_alpine_'] = False
if not os.path.exists('/etc/issue'):
return False
out = to_str(subprocess.check_output('cat /etc/issue', shell=True))
CACHE['_is_alpine_'] = 'Alpine' in out
except subprocess.CalledProcessError:
return False
return CACHE['_is_alpine_']
def get_arch():
if is_mac_os():
return 'osx'
if is_alpine():
return 'alpine'
if is_linux():
return 'linux'
raise Exception('Unable to determine system architecture')
def short_uid():
return str(uuid.uuid4())[0:8]
def json_safe(item):
""" return a copy of the given object (e.g., dict) that is safe for JSON dumping """
try:
return json.loads(json.dumps(item, cls=CustomEncoder))
except Exception:
item = fix_json_keys(item)
return json.loads(json.dumps(item, cls=CustomEncoder))
def fix_json_keys(item):
""" make sure the keys of a JSON are strings (not binary type or other) """
item_copy = item
if isinstance(item, list):
item_copy = []
for i in item:
item_copy.append(fix_json_keys(i))
if isinstance(item, dict):
item_copy = {}
for k, v in item.items():
item_copy[to_str(k)] = fix_json_keys(v)
return item_copy
def save_file(file, content, append=False):
mode = 'a' if append else 'w+'
if not isinstance(content, six.string_types):
mode = mode + 'b'
with open(file, mode) as f:
f.write(content)
f.flush()
def load_file(file_path, default=None, mode=None):
if not os.path.isfile(file_path):
return default
if not mode:
mode = 'r'
with open(file_path, mode) as f:
result = f.read()
return result
def to_str(obj, encoding=DEFAULT_ENCODING, errors='strict'):
""" If ``obj`` is an instance of ``binary_type``, return
``obj.decode(encoding, errors)``, otherwise return ``obj`` """
return obj.decode(encoding, errors) if isinstance(obj, six.binary_type) else obj
def to_bytes(obj, encoding=DEFAULT_ENCODING, errors='strict'):
""" If ``obj`` is an instance of ``text_type``, return
``obj.encode(encoding, errors)``, otherwise return ``obj`` """
return obj.encode(encoding, errors) if isinstance(obj, six.text_type) else obj
def cleanup(files=True, env=ENV_DEV, quiet=True):
if files:
cleanup_tmp_files()
def cleanup_threads_and_processes(quiet=True):
for t in TMP_THREADS:
t.stop(quiet=quiet)
for p in TMP_PROCESSES:
try:
p.terminate()
except Exception as e:
print(e)
# clear lists
clear_list(TMP_THREADS)
clear_list(TMP_PROCESSES)
def clear_list(l):
while len(l):
del l[0]
def cleanup_tmp_files():
for tmp in TMP_FILES:
try:
rm_rf(tmp)
except Exception:
pass # file likely doesn't exist, or permission denied
del TMP_FILES[:]
def new_tmp_file():
""" Return a path to a new temporary file. """
tmp_file, tmp_path = tempfile.mkstemp()
os.close(tmp_file)
TMP_FILES.append(tmp_path)
return tmp_path
def new_tmp_dir():
folder = new_tmp_file()
rm_rf(folder)
mkdir(folder)
return folder
def is_ip_address(addr):
try:
socket.inet_aton(addr)
return True
except socket.error:
return False
def is_zip_file(content):
stream = BytesIO(content)
return zipfile.is_zipfile(stream)
def unzip(path, target_dir, overwrite=True):
if is_alpine():
# Running the native command can be an order of magnitude faster in Alpine on Travis-CI
flags = '-o' if overwrite else ''
return run('cd %s; unzip %s %s' % (target_dir, flags, path))
try:
zip_ref = zipfile.ZipFile(path, 'r')
except Exception as e:
LOG.warning('Unable to open zip file: %s: %s' % (path, e))
raise e
# Make sure to preserve file permissions in the zip file
# https://www.burgundywall.com/post/preserving-file-perms-with-python-zipfile-module
try:
for file_entry in zip_ref.infolist():
_unzip_file_entry(zip_ref, file_entry, target_dir)
finally:
zip_ref.close()
def _unzip_file_entry(zip_ref, file_entry, target_dir):
"""
Extracts a Zipfile entry and preserves permissions
"""
zip_ref.extract(file_entry.filename, path=target_dir)
out_path = os.path.join(target_dir, file_entry.filename)
perm = file_entry.external_attr >> 16
os.chmod(out_path, perm or 0o777)
def untar(path, target_dir):
mode = 'r:gz' if path.endswith('gz') else 'r'
with tarfile.open(path, mode) as tar:
tar.extractall(path=target_dir)
def zip_contains_jar_entries(content, jar_path_prefix=None, match_single_jar=True):
try:
with tempfile.NamedTemporaryFile() as tf:
tf.write(content)
tf.flush()
with zipfile.ZipFile(tf.name, 'r') as zf:
jar_entries = [e for e in zf.infolist() if e.filename.lower().endswith('.jar')]
if match_single_jar and len(jar_entries) == 1 and len(zf.infolist()) == 1:
return True
matching_prefix = [e for e in jar_entries if
not jar_path_prefix or e.filename.lower().startswith(jar_path_prefix)]
return len(matching_prefix) > 0
except Exception:
return False
def is_jar_archive(content):
""" Determine whether `content` contains valid zip bytes representing a JAR archive
that contains at least one *.class file and a META-INF/MANIFEST.MF file. """
try:
with tempfile.NamedTemporaryFile() as tf:
tf.write(content)
tf.flush()
with zipfile.ZipFile(tf.name, 'r') as zf:
class_files = [e for e in zf.infolist() if e.filename.endswith('.class')]
manifest_file = [e for e in zf.infolist() if e.filename.upper() == 'META-INF/MANIFEST.MF']
if not class_files or not manifest_file:
return False
except Exception:
return False
return True
def is_root():
out = run('whoami').strip()
return out == 'root'
def cleanup_resources():
cleanup_tmp_files()
cleanup_threads_and_processes()
@synchronized(lock=SSL_CERT_LOCK)
def generate_ssl_cert(target_file=None, overwrite=False, random=False, return_content=False, serial_number=None):
# Note: Do NOT import "OpenSSL" at the root scope
# (Our test Lambdas are importing this file but don't have the module installed)
from OpenSSL import crypto
def all_exist(*files):
return all([os.path.exists(f) for f in files])
if target_file and not overwrite and os.path.exists(target_file):
key_file_name = '%s.key' % target_file
cert_file_name = '%s.crt' % target_file
if all_exist(key_file_name, cert_file_name):
return target_file, cert_file_name, key_file_name
if random and target_file:
if '.' in target_file:
target_file = target_file.replace('.', '.%s.' % short_uid(), 1)
else:
target_file = '%s.%s' % (target_file, short_uid())
# create a key pair
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 2048)
# create a self-signed cert
cert = crypto.X509()
subj = cert.get_subject()
subj.C = 'AU'
subj.ST = 'Some-State'
subj.L = 'Some-Locality'
subj.O = 'LocalStack Org' # noqa
subj.OU = 'Testing'
subj.CN = 'localhost'
# Note: new requirements for recent OSX versions: https://support.apple.com/en-us/HT210176
# More details: https://www.iol.unh.edu/blog/2019/10/10/macos-catalina-and-chrome-trust
serial_number = serial_number or 1001
cert.set_version(2)
cert.set_serial_number(serial_number)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(2 * 365 * 24 * 60 * 60)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(k)
alt_names = b'DNS:localhost,DNS:test.localhost.atlassian.io,IP:127.0.0.1'
cert.add_extensions([
crypto.X509Extension(b'subjectAltName', False, alt_names),
crypto.X509Extension(b'basicConstraints', True, b'CA:false'),
crypto.X509Extension(b'keyUsage', True, b'nonRepudiation,digitalSignature,keyEncipherment'),
crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth')
])
cert.sign(k, 'SHA256')
cert_file = StringIO()
key_file = StringIO()
cert_file.write(to_str(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)))
key_file.write(to_str(crypto.dump_privatekey(crypto.FILETYPE_PEM, k)))
cert_file_content = cert_file.getvalue().strip()
key_file_content = key_file.getvalue().strip()
file_content = '%s\n%s' % (key_file_content, cert_file_content)
if target_file:
key_file_name = '%s.key' % target_file
cert_file_name = '%s.crt' % target_file
# check existence to avoid permission denied issues:
# https://github.com/localstack/localstack/issues/1607
if not all_exist(target_file, key_file_name, cert_file_name):
for i in range(2):
try:
save_file(target_file, file_content)
save_file(key_file_name, key_file_content)
save_file(cert_file_name, cert_file_content)
break
except Exception as e:
if i > 0:
raise
LOG.info('Unable to store certificate file under %s, using tmp file instead: %s' % (target_file, e))
# Fix for https://github.com/localstack/localstack/issues/1743
target_file = '%s.pem' % new_tmp_file()
key_file_name = '%s.key' % target_file
cert_file_name = '%s.crt' % target_file
TMP_FILES.append(target_file)
TMP_FILES.append(key_file_name)
TMP_FILES.append(cert_file_name)
if not return_content:
return target_file, cert_file_name, key_file_name
return file_content
def run_safe(_python_lambda, print_error=False, **kwargs):
try:
return _python_lambda(**kwargs)
except Exception as e:
if print_error:
LOG.warning('Unable to execute function: %s' % e)
def run_cmd_safe(**kwargs):
return run_safe(run, print_error=False, **kwargs)
def run(cmd, cache_duration_secs=0, **kwargs):
def do_run(cmd):
return bootstrap.run(cmd, **kwargs)
if cache_duration_secs <= 0:
return do_run(cmd)
hash = md5(cmd)
cache_file = CACHE_FILE_PATTERN.replace('*', hash)
mkdir(os.path.dirname(CACHE_FILE_PATTERN))
if os.path.isfile(cache_file):
# check file age
mod_time = os.path.getmtime(cache_file)
time_now = now()
if mod_time > (time_now - cache_duration_secs):
f = open(cache_file)
result = f.read()
f.close()
return result
result = do_run(cmd)
f = open(cache_file, 'w+')
f.write(result)
f.close()
clean_cache()
return result
def clone(item):
return json.loads(json.dumps(item))
def clone_safe(item):
return clone(json_safe(item))
def remove_non_ascii(text):
# text = unicode(text, "utf-8")
text = text.decode('utf-8', CODEC_HANDLER_UNDERSCORE)
# text = unicodedata.normalize('NFKD', text)
text = text.encode('ascii', CODEC_HANDLER_UNDERSCORE)
return text
class NetrcBypassAuth(requests.auth.AuthBase):
def __call__(self, r):
return r
class _RequestsSafe(type):
""" Wrapper around requests library, which can prevent it from verifying
SSL certificates or reading credentials from ~/.netrc file """
verify_ssl = True
def __getattr__(self, name):
method = requests.__dict__.get(name.lower())
if not method:
return method
def _wrapper(*args, **kwargs):
if 'auth' not in kwargs:
kwargs['auth'] = NetrcBypassAuth()
if not self.verify_ssl and args[0].startswith('https://') and 'verify' not in kwargs:
kwargs['verify'] = False
return method(*args, **kwargs)
return _wrapper
# create class-of-a-class
class safe_requests(with_metaclass(_RequestsSafe)):
pass
def make_http_request(url, data=None, headers=None, method='GET'):
if is_string(method):
method = requests.__dict__[method.lower()]
return method(url, headers=headers, data=data, auth=NetrcBypassAuth(), verify=False)
class SafeStringIO(io.StringIO):
""" Safe StringIO implementation that doesn't fail if str is passed in Python 2. """
def write(self, obj):
if six.PY2 and isinstance(obj, str):
obj = obj.decode('unicode-escape')
return super(SafeStringIO, self).write(obj)
def clean_cache(file_pattern=CACHE_FILE_PATTERN,
last_clean_time=last_cache_clean_time, max_age=CACHE_MAX_AGE):
mutex_clean.acquire()
time_now = now()
try:
if last_clean_time['time'] > time_now - CACHE_CLEAN_TIMEOUT:
return
for cache_file in set(glob.glob(file_pattern)):
mod_time = os.path.getmtime(cache_file)
if time_now > mod_time + max_age:
rm_rf(cache_file)
last_clean_time['time'] = time_now
finally:
mutex_clean.release()
return time_now
def truncate(data, max_length=100):
return (data[:max_length] + '...') if len(data) > max_length else data
def parallelize(func, list, size=None):
if not size:
size = len(list)
if size <= 0:
return None
pool = Pool(size)
result = pool.map(func, list)
pool.close()
pool.join()
return result
def isoformat_milliseconds(t):
try:
return t.isoformat(timespec='milliseconds')
except TypeError:
return t.isoformat()[:-3]
# Code that requires util functions from above
CACHE_FILE_PATTERN = CACHE_FILE_PATTERN.replace('_random_dir_', short_uid())
| [
[
[
7,
9
],
[
7369,
7371
],
[
33696,
33698
]
],
[
[
17,
19
],
[
1018,
1020
],
[
2524,
2526
],
[
16546,
16548
],
[
16595,
16597
],
[
17054,
17056
],
[
17220,
17222
],
[
17274,
17276
],
[
17330,
17332
],
[
17339,
17341
],
[
17421,
17423
],
[
17430,
17432
],
[
17529,
17531
],
[
17587,
17589
],
[
17647,
17649
],
[
17656,
17658
],
[
17738,
17740
],
[
17747,
17749
],
[
17885,
17887
],
[
18321,
18323
],
[
18350,
18352
],
[
18377,
18379
],
[
18429,
18431
],
[
18556,
18558
],
[
19147,
19149
],
[
19162,
19164
],
[
19198,
19200
],
[
19210,
19212
],
[
19815,
19817
],
[
19838,
19840
],
[
21107,
21109
],
[
22666,
22668
],
[
24156,
24158
],
[
25503,
25505
],
[
25595,
25597
],
[
27828,
27830
],
[
31764,
31766
],
[
31808,
31810
],
[
31880,
31882
],
[
34315,
34317
],
[
27751,
27753
]
],
[
[
27,
29
],
[
10631,
10633
],
[
20430,
20432
]
],
[
[
37,
40
],
[
17154,
17157
]
],
[
[
48,
51
],
[
17190,
17193
]
],
[
[
59,
62
],
[
7208,
7211
],
[
7237,
7240
],
[
7268,
7271
],
[
7303,
7306
],
[
3489,
3492
],
[
3524,
3527
],
[
8027,
8030
],
[
8096,
8099
],
[
8165,
8168
],
[
8238,
8241
]
],
[
[
70,
74
],
[
1650,
1654
],
[
6092,
6096
],
[
6364,
6368
],
[
6449,
6453
],
[
21789,
21793
],
[
21800,
21804
],
[
21909,
21913
],
[
21920,
21924
],
[
32257,
32261
],
[
32268,
32272
]
],
[
[
82,
86
],
[
21634,
21638
]
],
[
[
94,
98
],
[
13965,
13969
],
[
14156,
14160
]
],
[
[
106,
110
],
[
34266,
34270
]
],
[
[
118,
124
],
[
15844,
15850
]
],
[
[
132,
138
],
[
11435,
11441
],
[
11508,
11514
],
[
11616,
11622
],
[
11630,
11636
],
[
11730,
11736
],
[
12435,
12441
],
[
13386,
13392
],
[
13400,
13406
],
[
13416,
13422
],
[
24373,
24379
],
[
24427,
24433
]
],
[
[
146,
153
],
[
10677,
10684
]
],
[
[
161,
168
],
[
1807,
1814
],
[
6209,
6216
]
],
[
[
176,
183
],
[
1397,
1404
]
],
[
[
191,
198
],
[
25719,
25726
]
],
[
[
206,
213
],
[
24531,
24538
],
[
24859,
24866
],
[
26007,
26014
],
[
26839,
26846
]
],
[
[
221,
229
],
[
15827,
15835
]
],
[
[
237,
245
],
[
16475,
16483
]
],
[
[
253,
261
],
[
1031,
1039
],
[
24133,
24141
],
[
25900,
25908
],
[
26732,
26740
]
],
[
[
269,
278
],
[
1136,
1145
],
[
1610,
1619
],
[
9394,
9403
],
[
14235,
14244
]
],
[
[
286,
296
],
[
3333,
3343
],
[
21191,
21201
],
[
21308,
21318
]
],
[
[
304,
307
],
[
2018,
2021
],
[
10192,
10195
],
[
10338,
10341
],
[
10461,
10464
],
[
11254,
11257
],
[
13731,
13734
],
[
22482,
22485
],
[
23083,
23086
],
[
23359,
23362
],
[
33836,
33839
]
],
[
[
315,
321
],
[
18463,
18469
],
[
18585,
18591
],
[
18625,
18631
]
],
[
[
329,
337
],
[
32615,
32623
],
[
18864,
18872
],
[
32930,
32938
],
[
33551,
33559
]
],
[
[
345,
357
],
[
11919,
11922
]
],
[
[
365,
374
],
[
9926,
9935
]
],
[
[
390,
397
],
[
24503,
24510
]
],
[
[
421,
428
],
[
11608,
11615
]
],
[
[
450,
458
],
[
1956,
1964
],
[
13689,
13697
],
[
13778,
13786
],
[
16373,
16381
],
[
16423,
16431
]
],
[
[
475,
489
],
[
33397,
33411
]
],
[
[
512,
533
],
[
29529,
29537
],
[
29555,
29563
]
],
[
[
569,
577
],
[
11287,
11295
]
],
[
[
612,
616
],
[
34776,
34780
]
],
[
[
640,
646
],
[
10777,
10783
],
[
13577,
13583
]
],
[
[
677,
693
],
[
22860,
22876
],
[
23138,
23154
]
],
[
[
727,
734
],
[
23413,
23420
]
],
[
[
764,
773
],
[
10960,
10969
],
[
20908,
20917
],
[
20959,
20968
],
[
31588,
31597
]
],
[
[
813,
823
],
[
2212,
2222
],
[
2681,
2691
]
],
[
[
868,
877
],
[
23873,
23882
],
[
24022,
24031
],
[
24179,
24188
],
[
30959,
30968
],
[
31001,
31010
],
[
31045,
31054
]
],
[
[
883,
894
],
[
23539,
23550
],
[
23732,
23743
]
],
[
[
900,
913
],
[
23593,
23606
],
[
23760,
23773
]
],
[
[
944,
963
],
[
34196,
34215
]
],
[
[
973,
986
],
[
34075,
34088
]
],
[
[
997,
1015
],
[
34000,
34018
],
[
35092,
35110
]
],
[
[
1086,
1107
],
[
34044,
34065
]
],
[
[
1122,
1133
],
[
34096,
34107
],
[
34488,
34499
]
],
[
[
1178,
1194
],
[
13638,
13654
]
],
[
[
1217,
1240
]
],
[
[
1267,
1291
],
[
32442,
32466
],
[
32549,
32573
]
],
[
[
1340,
1359
],
[
19420,
19439
]
],
[
[
1391,
1394
],
[
4498,
4501
],
[
4647,
4650
],
[
5485,
5488
],
[
5877,
5880
],
[
14657,
14660
],
[
16963,
16966
],
[
19241,
19244
],
[
19595,
19598
],
[
19712,
19715
],
[
19878,
19881
],
[
20010,
20013
],
[
20128,
20131
],
[
24921,
24924
],
[
30584,
30587
],
[
31368,
31371
]
],
[
[
1498,
1511
],
[
4823,
4836
]
],
[
[
1544,
1549
],
[
21040,
21045
],
[
21059,
21064
],
[
21258,
21263
],
[
21371,
21376
]
],
[
[
1594,
1607
],
[
27404,
27417
]
],
[
[
1636,
1649
],
[
2103,
2116
],
[
21821,
21834
],
[
21941,
21954
]
],
[
[
2193,
2211
]
],
[
[
5948,
5958
],
[
6533,
6543
]
],
[
[
6972,
6985
],
[
7543,
7556
]
],
[
[
9723,
9735
],
[
27386,
27398
]
],
[
[
10113,
10122
],
[
10431,
10440
],
[
10614,
10623
],
[
33515,
33524
]
],
[
[
10397,
10415
]
],
[
[
10509,
10518
]
],
[
[
10656,
10659
],
[
31690,
31693
]
],
[
[
10753,
10762
]
],
[
[
10802,
10812
]
],
[
[
10919,
10945
]
],
[
[
11005,
11017
],
[
13187,
13199
]
],
[
[
12868,
12886
]
],
[
[
13355,
13372
]
],
[
[
13531,
13551
]
],
[
[
13610,
13619
]
],
[
[
13846,
13851
],
[
13301,
13306
]
],
[
[
14202,
14218
]
],
[
[
14336,
14351
],
[
14548,
14563
]
],
[
[
14832,
14843
]
],
[
[
15254,
15268
],
[
15543,
15557
],
[
15734,
15748
]
],
[
[
15789,
15802
]
],
[
[
15880,
15890
],
[
16161,
16171
],
[
16275,
16285
]
],
[
[
16344,
16351
]
],
[
[
16398,
16401
],
[
31928,
31931
],
[
34133,
34136
]
],
[
[
16445,
16451
],
[
16366,
16372
],
[
16416,
16422
]
],
[
[
16520,
16525
],
[
24297,
24302
],
[
31758,
31763
]
],
[
[
16763,
16778
]
],
[
[
17095,
17102
]
],
[
[
17476,
17483
],
[
18209,
18216
]
],
[
[
17789,
17794
],
[
23909,
23914
],
[
24279,
24284
],
[
34406,
34411
]
],
[
[
18489,
18493
]
],
[
[
18657,
18665
],
[
19951,
19959
]
],
[
[
20233,
20251
]
],
[
[
20734,
20743
]
],
[
[
20884,
20893
],
[
21417,
21426
]
],
[
[
20936,
20944
],
[
21502,
21510
]
],
[
[
20986,
20995
],
[
18021,
18030
],
[
21458,
21467
],
[
24612,
24621
]
],
[
[
21398,
21406
]
],
[
[
21606,
21615
],
[
35135,
35144
],
[
28189,
28198
],
[
28269,
28278
]
],
[
[
21659,
21668
],
[
32327,
32336
]
],
[
[
21963,
21976
],
[
21874,
21887
],
[
22191,
22204
],
[
22332,
22345
]
],
[
[
22376,
22385
],
[
30274,
30283
],
[
30331,
30340
],
[
30394,
30403
]
],
[
[
22608,
22617
]
],
[
[
22839,
22845
],
[
2055,
2061
],
[
21184,
21190
],
[
22319,
22325
],
[
29586,
29592
],
[
29665,
29671
],
[
2826,
2832
]
],
[
[
23115,
23123
],
[
10704,
10712
]
],
[
[
23389,
23396
]
],
[
[
23483,
23512
],
[
27351,
27380
]
],
[
[
23781,
23791
],
[
23721,
23731
],
[
23749,
23759
]
],
[
[
23837,
23854
],
[
23457,
23474
],
[
27327,
27344
]
],
[
[
24041,
24053
],
[
24260,
24272
],
[
30813,
30825
]
],
[
[
24232,
24243
]
],
[
[
24335,
24348
]
],
[
[
24468,
24479
]
],
[
[
24564,
24569
]
],
[
[
25307,
25324
],
[
25213,
25230
]
],
[
[
25635,
25640
]
],
[
[
25798,
25822
]
],
[
[
26512,
26526
]
],
[
[
27228,
27235
]
],
[
[
27302,
27319
]
],
[
[
27423,
27440
]
],
[
[
31201,
31209
],
[
31459,
31467
]
],
[
[
31424,
31436
]
],
[
[
31508,
31511
],
[
3096,
3099
],
[
10833,
10836
],
[
14285,
14288
],
[
18066,
18069
],
[
24778,
24781
],
[
27249,
27252
],
[
31468,
31471
]
],
[
[
32233,
32238
],
[
32321,
32326
]
],
[
[
32292,
32302
]
],
[
[
32350,
32366
]
],
[
[
32599,
32614
],
[
33642,
33657
],
[
33125,
33140
]
],
[
[
32692,
32705
],
[
33412,
33425
]
],
[
[
33383,
33396
],
[
12732,
12745
]
],
[
[
33444,
33461
]
],
[
[
33683,
33695
],
[
33938,
33950
]
],
[
[
33975,
33986
],
[
32195,
32206
]
],
[
[
34536,
34544
]
],
[
[
34649,
34660
]
],
[
[
34878,
34900
]
],
[
[
35071,
35089
],
[
31716,
31734
],
[
31780,
31798
]
]
] |
# Copyright 2017 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Logical sessions for ordering sequential operations.
Requires MongoDB 3.6.
.. versionadded:: 3.6
Causally Consistent Reads
=========================
.. code-block:: python
with client.start_session(causal_consistency=True) as session:
collection = client.db.collection
collection.update_one({'_id': 1}, {'$set': {'x': 10}}, session=session)
secondary_c = collection.with_options(
read_preference=ReadPreference.SECONDARY)
# A secondary read waits for replication of the write.
secondary_c.find_one({'_id': 1}, session=session)
If `causal_consistency` is True (the default), read operations that use
the session are causally after previous read and write operations. Using a
causally consistent session, an application can read its own writes and is
guaranteed monotonic reads, even when reading from replica set secondaries.
.. mongodoc:: causal-consistency
.. _transactions-ref:
Transactions
============
MongoDB 4.0 adds support for transactions on replica set primaries. A
transaction is associated with a :class:`ClientSession`. To start a transaction
on a session, use :meth:`ClientSession.start_transaction` in a with-statement.
Then, execute an operation within the transaction by passing the session to the
operation:
.. code-block:: python
orders = client.db.orders
inventory = client.db.inventory
with client.start_session() as session:
with session.start_transaction():
orders.insert_one({"sku": "abc123", "qty": 100}, session=session)
inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}},
{"$inc": {"qty": -100}}, session=session)
Upon normal completion of ``with session.start_transaction()`` block, the
transaction automatically calls :meth:`ClientSession.commit_transaction`.
If the block exits with an exception, the transaction automatically calls
:meth:`ClientSession.abort_transaction`.
In general, multi-document transactions only support read/write (CRUD)
operations on existing collections. However, MongoDB 4.4 adds support for
creating collections and indexes with some limitations, including an
insert operation that would result in the creation of a new collection.
For a complete description of all the supported and unsupported operations
see the `MongoDB server's documentation for transactions
<http://dochub.mongodb.org/core/transactions>`_.
A session may only have a single active transaction at a time, multiple
transactions on the same session can be executed in sequence.
.. versionadded:: 3.7
Sharded Transactions
^^^^^^^^^^^^^^^^^^^^
PyMongo 3.9 adds support for transactions on sharded clusters running MongoDB
4.2. Sharded transactions have the same API as replica set transactions.
When running a transaction against a sharded cluster, the session is
pinned to the mongos server selected for the first operation in the
transaction. All subsequent operations that are part of the same transaction
are routed to the same mongos server. When the transaction is completed, by
running either commitTransaction or abortTransaction, the session is unpinned.
.. versionadded:: 3.9
.. mongodoc:: transactions
Classes
=======
"""
import collections
import os
import sys
import uuid
from bson.binary import Binary
from bson.int64 import Int64
from bson.py3compat import abc, integer_types, reraise_instance
from bson.son import SON
from bson.timestamp import Timestamp
from pymongo import monotonic
from pymongo.errors import (ConfigurationError,
ConnectionFailure,
InvalidOperation,
OperationFailure,
PyMongoError,
ServerSelectionTimeoutError,
WTimeoutError)
from pymongo.helpers import _RETRYABLE_ERROR_CODES
from pymongo.read_concern import ReadConcern
from pymongo.read_preferences import ReadPreference, _ServerMode
from pymongo.write_concern import WriteConcern
class SessionOptions(object):
"""Options for a new :class:`ClientSession`.
:Parameters:
- `causal_consistency` (optional): If True (the default), read
operations are causally ordered within the session.
- `default_transaction_options` (optional): The default
TransactionOptions to use for transactions started on this session.
"""
def __init__(self,
causal_consistency=True,
default_transaction_options=None):
self._causal_consistency = causal_consistency
if default_transaction_options is not None:
if not isinstance(default_transaction_options, TransactionOptions):
raise TypeError(
"default_transaction_options must be an instance of "
"pymongo.client_session.TransactionOptions, not: %r" %
(default_transaction_options,))
self._default_transaction_options = default_transaction_options
@property
def causal_consistency(self):
"""Whether causal consistency is configured."""
return self._causal_consistency
@property
def default_transaction_options(self):
"""The default TransactionOptions to use for transactions started on
this session.
.. versionadded:: 3.7
"""
return self._default_transaction_options
class TransactionOptions(object):
"""Options for :meth:`ClientSession.start_transaction`.
:Parameters:
- `read_concern` (optional): The
:class:`~pymongo.read_concern.ReadConcern` to use for this transaction.
If ``None`` (the default) the :attr:`read_preference` of
the :class:`MongoClient` is used.
- `write_concern` (optional): The
:class:`~pymongo.write_concern.WriteConcern` to use for this
transaction. If ``None`` (the default) the :attr:`read_preference` of
the :class:`MongoClient` is used.
- `read_preference` (optional): The read preference to use. If
``None`` (the default) the :attr:`read_preference` of this
:class:`MongoClient` is used. See :mod:`~pymongo.read_preferences`
for options. Transactions which read must use
:attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`.
- `max_commit_time_ms` (optional): The maximum amount of time to allow a
single commitTransaction command to run. This option is an alias for
maxTimeMS option on the commitTransaction command. If ``None`` (the
default) maxTimeMS is not used.
.. versionchanged:: 3.9
Added the ``max_commit_time_ms`` option.
.. versionadded:: 3.7
"""
def __init__(self, read_concern=None, write_concern=None,
read_preference=None, max_commit_time_ms=None):
self._read_concern = read_concern
self._write_concern = write_concern
self._read_preference = read_preference
self._max_commit_time_ms = max_commit_time_ms
if read_concern is not None:
if not isinstance(read_concern, ReadConcern):
raise TypeError("read_concern must be an instance of "
"pymongo.read_concern.ReadConcern, not: %r" %
(read_concern,))
if write_concern is not None:
if not isinstance(write_concern, WriteConcern):
raise TypeError("write_concern must be an instance of "
"pymongo.write_concern.WriteConcern, not: %r" %
(write_concern,))
if not write_concern.acknowledged:
raise ConfigurationError(
"transactions do not support unacknowledged write concern"
": %r" % (write_concern,))
if read_preference is not None:
if not isinstance(read_preference, _ServerMode):
raise TypeError("%r is not valid for read_preference. See "
"pymongo.read_preferences for valid "
"options." % (read_preference,))
if max_commit_time_ms is not None:
if not isinstance(max_commit_time_ms, integer_types):
raise TypeError(
"max_commit_time_ms must be an integer or None")
@property
def read_concern(self):
"""This transaction's :class:`~pymongo.read_concern.ReadConcern`."""
return self._read_concern
@property
def write_concern(self):
"""This transaction's :class:`~pymongo.write_concern.WriteConcern`."""
return self._write_concern
@property
def read_preference(self):
"""This transaction's :class:`~pymongo.read_preferences.ReadPreference`.
"""
return self._read_preference
@property
def max_commit_time_ms(self):
"""The maxTimeMS to use when running a commitTransaction command.
.. versionadded:: 3.9
"""
return self._max_commit_time_ms
def _validate_session_write_concern(session, write_concern):
"""Validate that an explicit session is not used with an unack'ed write.
Returns the session to use for the next operation.
"""
if session:
if write_concern is not None and not write_concern.acknowledged:
# For unacknowledged writes without an explicit session,
# drivers SHOULD NOT use an implicit session. If a driver
# creates an implicit session for unacknowledged writes
# without an explicit session, the driver MUST NOT send the
# session ID.
if session._implicit:
return None
else:
raise ConfigurationError(
'Explicit sessions are incompatible with '
'unacknowledged write concern: %r' % (
write_concern,))
return session
class _TransactionContext(object):
"""Internal transaction context manager for start_transaction."""
def __init__(self, session):
self.__session = session
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.__session.in_transaction:
if exc_val is None:
self.__session.commit_transaction()
else:
self.__session.abort_transaction()
class _TxnState(object):
NONE = 1
STARTING = 2
IN_PROGRESS = 3
COMMITTED = 4
COMMITTED_EMPTY = 5
ABORTED = 6
class _Transaction(object):
"""Internal class to hold transaction information in a ClientSession."""
def __init__(self, opts):
self.opts = opts
self.state = _TxnState.NONE
self.sharded = False
self.pinned_address = None
self.recovery_token = None
def active(self):
return self.state in (_TxnState.STARTING, _TxnState.IN_PROGRESS)
def reset(self):
self.state = _TxnState.NONE
self.sharded = False
self.pinned_address = None
self.recovery_token = None
def _reraise_with_unknown_commit(exc):
"""Re-raise an exception with the UnknownTransactionCommitResult label."""
exc._add_error_label("UnknownTransactionCommitResult")
reraise_instance(exc, trace=sys.exc_info()[2])
def _max_time_expired_error(exc):
"""Return true if exc is a MaxTimeMSExpired error."""
return isinstance(exc, OperationFailure) and exc.code == 50
# From the transactions spec, all the retryable writes errors plus
# WriteConcernFailed.
_UNKNOWN_COMMIT_ERROR_CODES = _RETRYABLE_ERROR_CODES | frozenset([
64, # WriteConcernFailed
50, # MaxTimeMSExpired
])
# From the Convenient API for Transactions spec, with_transaction must
# halt retries after 120 seconds.
# This limit is non-configurable and was chosen to be twice the 60 second
# default value of MongoDB's `transactionLifetimeLimitSeconds` parameter.
_WITH_TRANSACTION_RETRY_TIME_LIMIT = 120
def _within_time_limit(start_time):
"""Are we within the with_transaction retry limit?"""
return monotonic.time() - start_time < _WITH_TRANSACTION_RETRY_TIME_LIMIT
class ClientSession(object):
"""A session for ordering sequential operations."""
def __init__(self, client, server_session, options, authset, implicit):
# A MongoClient, a _ServerSession, a SessionOptions, and a set.
self._client = client
self._server_session = server_session
self._options = options
self._authset = authset
self._cluster_time = None
self._operation_time = None
# Is this an implicitly created session?
self._implicit = implicit
self._transaction = _Transaction(None)
def end_session(self):
"""Finish this session. If a transaction has started, abort it.
It is an error to use the session after the session has ended.
"""
self._end_session(lock=True)
def _end_session(self, lock):
if self._server_session is not None:
try:
if self.in_transaction:
self.abort_transaction()
finally:
self._client._return_server_session(self._server_session, lock)
self._server_session = None
def _check_ended(self):
if self._server_session is None:
raise InvalidOperation("Cannot use ended session")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._end_session(lock=True)
@property
def client(self):
"""The :class:`~pymongo.mongo_client.MongoClient` this session was
created from.
"""
return self._client
@property
def options(self):
"""The :class:`SessionOptions` this session was created with."""
return self._options
@property
def session_id(self):
"""A BSON document, the opaque server session identifier."""
self._check_ended()
return self._server_session.session_id
@property
def cluster_time(self):
"""The cluster time returned by the last operation executed
in this session.
"""
return self._cluster_time
@property
def operation_time(self):
"""The operation time returned by the last operation executed
in this session.
"""
return self._operation_time
def _inherit_option(self, name, val):
"""Return the inherited TransactionOption value."""
if val:
return val
txn_opts = self.options.default_transaction_options
val = txn_opts and getattr(txn_opts, name)
if val:
return val
return getattr(self.client, name)
def with_transaction(self, callback, read_concern=None, write_concern=None,
read_preference=None, max_commit_time_ms=None):
"""Execute a callback in a transaction.
This method starts a transaction on this session, executes ``callback``
once, and then commits the transaction. For example::
def callback(session):
orders = session.client.db.orders
inventory = session.client.db.inventory
orders.insert_one({"sku": "abc123", "qty": 100}, session=session)
inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}},
{"$inc": {"qty": -100}}, session=session)
with client.start_session() as session:
session.with_transaction(callback)
To pass arbitrary arguments to the ``callback``, wrap your callable
with a ``lambda`` like this::
def callback(session, custom_arg, custom_kwarg=None):
# Transaction operations...
with client.start_session() as session:
session.with_transaction(
lambda s: callback(s, "custom_arg", custom_kwarg=1))
In the event of an exception, ``with_transaction`` may retry the commit
or the entire transaction, therefore ``callback`` may be invoked
multiple times by a single call to ``with_transaction``. Developers
should be mindful of this possiblity when writing a ``callback`` that
modifies application state or has any other side-effects.
Note that even when the ``callback`` is invoked multiple times,
``with_transaction`` ensures that the transaction will be committed
at-most-once on the server.
The ``callback`` should not attempt to start new transactions, but
should simply run operations meant to be contained within a
transaction. The ``callback`` should also not commit the transaction;
this is handled automatically by ``with_transaction``. If the
``callback`` does commit or abort the transaction without error,
however, ``with_transaction`` will return without taking further
action.
When ``callback`` raises an exception, ``with_transaction``
automatically aborts the current transaction. When ``callback`` or
:meth:`~ClientSession.commit_transaction` raises an exception that
includes the ``"TransientTransactionError"`` error label,
``with_transaction`` starts a new transaction and re-executes
the ``callback``.
When :meth:`~ClientSession.commit_transaction` raises an exception with
the ``"UnknownTransactionCommitResult"`` error label,
``with_transaction`` retries the commit until the result of the
transaction is known.
This method will cease retrying after 120 seconds has elapsed. This
timeout is not configurable and any exception raised by the
``callback`` or by :meth:`ClientSession.commit_transaction` after the
timeout is reached will be re-raised. Applications that desire a
different timeout duration should not use this method.
:Parameters:
- `callback`: The callable ``callback`` to run inside a transaction.
The callable must accept a single argument, this session. Note,
under certain error conditions the callback may be run multiple
times.
- `read_concern` (optional): The
:class:`~pymongo.read_concern.ReadConcern` to use for this
transaction.
- `write_concern` (optional): The
:class:`~pymongo.write_concern.WriteConcern` to use for this
transaction.
- `read_preference` (optional): The read preference to use for this
transaction. If ``None`` (the default) the :attr:`read_preference`
of this :class:`Database` is used. See
:mod:`~pymongo.read_preferences` for options.
:Returns:
The return value of the ``callback``.
.. versionadded:: 3.9
"""
start_time = monotonic.time()
while True:
self.start_transaction(
read_concern, write_concern, read_preference,
max_commit_time_ms)
try:
ret = callback(self)
except Exception as exc:
if self.in_transaction:
self.abort_transaction()
if (isinstance(exc, PyMongoError) and
exc.has_error_label("TransientTransactionError") and
_within_time_limit(start_time)):
# Retry the entire transaction.
continue
raise
if not self.in_transaction:
# Assume callback intentionally ended the transaction.
return ret
while True:
try:
self.commit_transaction()
except PyMongoError as exc:
if (exc.has_error_label("UnknownTransactionCommitResult")
and _within_time_limit(start_time)
and not _max_time_expired_error(exc)):
# Retry the commit.
continue
if (exc.has_error_label("TransientTransactionError") and
_within_time_limit(start_time)):
# Retry the entire transaction.
break
raise
# Commit succeeded.
return ret
def start_transaction(self, read_concern=None, write_concern=None,
read_preference=None, max_commit_time_ms=None):
"""Start a multi-statement transaction.
Takes the same arguments as :class:`TransactionOptions`.
.. versionchanged:: 3.9
Added the ``max_commit_time_ms`` option.
.. versionadded:: 3.7
"""
self._check_ended()
if self.in_transaction:
raise InvalidOperation("Transaction already in progress")
read_concern = self._inherit_option("read_concern", read_concern)
write_concern = self._inherit_option("write_concern", write_concern)
read_preference = self._inherit_option(
"read_preference", read_preference)
if max_commit_time_ms is None:
opts = self.options.default_transaction_options
if opts:
max_commit_time_ms = opts.max_commit_time_ms
self._transaction.opts = TransactionOptions(
read_concern, write_concern, read_preference, max_commit_time_ms)
self._transaction.reset()
self._transaction.state = _TxnState.STARTING
self._start_retryable_write()
return _TransactionContext(self)
def commit_transaction(self):
"""Commit a multi-statement transaction.
.. versionadded:: 3.7
"""
self._check_ended()
retry = False
state = self._transaction.state
if state is _TxnState.NONE:
raise InvalidOperation("No transaction started")
elif state in (_TxnState.STARTING, _TxnState.COMMITTED_EMPTY):
# Server transaction was never started, no need to send a command.
self._transaction.state = _TxnState.COMMITTED_EMPTY
return
elif state is _TxnState.ABORTED:
raise InvalidOperation(
"Cannot call commitTransaction after calling abortTransaction")
elif state is _TxnState.COMMITTED:
# We're explicitly retrying the commit, move the state back to
# "in progress" so that in_transaction returns true.
self._transaction.state = _TxnState.IN_PROGRESS
retry = True
try:
self._finish_transaction_with_retry("commitTransaction", retry)
except ConnectionFailure as exc:
# We do not know if the commit was successfully applied on the
# server or if it satisfied the provided write concern, set the
# unknown commit error label.
exc._remove_error_label("TransientTransactionError")
_reraise_with_unknown_commit(exc)
except WTimeoutError as exc:
# We do not know if the commit has satisfied the provided write
# concern, add the unknown commit error label.
_reraise_with_unknown_commit(exc)
except OperationFailure as exc:
if exc.code not in _UNKNOWN_COMMIT_ERROR_CODES:
# The server reports errorLabels in the case.
raise
# We do not know if the commit was successfully applied on the
# server or if it satisfied the provided write concern, set the
# unknown commit error label.
_reraise_with_unknown_commit(exc)
finally:
self._transaction.state = _TxnState.COMMITTED
def abort_transaction(self):
"""Abort a multi-statement transaction.
.. versionadded:: 3.7
"""
self._check_ended()
state = self._transaction.state
if state is _TxnState.NONE:
raise InvalidOperation("No transaction started")
elif state is _TxnState.STARTING:
# Server transaction was never started, no need to send a command.
self._transaction.state = _TxnState.ABORTED
return
elif state is _TxnState.ABORTED:
raise InvalidOperation("Cannot call abortTransaction twice")
elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY):
raise InvalidOperation(
"Cannot call abortTransaction after calling commitTransaction")
try:
self._finish_transaction_with_retry("abortTransaction", False)
except (OperationFailure, ConnectionFailure):
# The transactions spec says to ignore abortTransaction errors.
pass
finally:
self._transaction.state = _TxnState.ABORTED
def _finish_transaction_with_retry(self, command_name, explict_retry):
"""Run commit or abort with one retry after any retryable error.
:Parameters:
- `command_name`: Either "commitTransaction" or "abortTransaction".
- `explict_retry`: True when this is an explict commit retry attempt,
ie the application called session.commit_transaction() twice.
"""
# This can be refactored with MongoClient._retry_with_session.
try:
return self._finish_transaction(command_name, explict_retry)
except ServerSelectionTimeoutError:
raise
except ConnectionFailure as exc:
try:
return self._finish_transaction(command_name, True)
except ServerSelectionTimeoutError:
# Raise the original error so the application can infer that
# an attempt was made.
raise exc
except OperationFailure as exc:
if exc.code not in _RETRYABLE_ERROR_CODES:
raise
try:
return self._finish_transaction(command_name, True)
except ServerSelectionTimeoutError:
# Raise the original error so the application can infer that
# an attempt was made.
raise exc
def _finish_transaction(self, command_name, retrying):
opts = self._transaction.opts
wc = opts.write_concern
cmd = SON([(command_name, 1)])
if command_name == "commitTransaction":
if opts.max_commit_time_ms:
cmd['maxTimeMS'] = opts.max_commit_time_ms
# Transaction spec says that after the initial commit attempt,
# subsequent commitTransaction commands should be upgraded to use
# w:"majority" and set a default value of 10 seconds for wtimeout.
if retrying:
wc_doc = wc.document
wc_doc["w"] = "majority"
wc_doc.setdefault("wtimeout", 10000)
wc = WriteConcern(**wc_doc)
if self._transaction.recovery_token:
cmd['recoveryToken'] = self._transaction.recovery_token
with self._client._socket_for_writes(self) as sock_info:
return self._client.admin._command(
sock_info,
cmd,
session=self,
write_concern=wc,
parse_write_concern_error=True)
def _advance_cluster_time(self, cluster_time):
"""Internal cluster time helper."""
if self._cluster_time is None:
self._cluster_time = cluster_time
elif cluster_time is not None:
if cluster_time["clusterTime"] > self._cluster_time["clusterTime"]:
self._cluster_time = cluster_time
def advance_cluster_time(self, cluster_time):
"""Update the cluster time for this session.
:Parameters:
- `cluster_time`: The
:data:`~pymongo.client_session.ClientSession.cluster_time` from
another `ClientSession` instance.
"""
if not isinstance(cluster_time, abc.Mapping):
raise TypeError(
"cluster_time must be a subclass of collections.Mapping")
if not isinstance(cluster_time.get("clusterTime"), Timestamp):
raise ValueError("Invalid cluster_time")
self._advance_cluster_time(cluster_time)
def _advance_operation_time(self, operation_time):
"""Internal operation time helper."""
if self._operation_time is None:
self._operation_time = operation_time
elif operation_time is not None:
if operation_time > self._operation_time:
self._operation_time = operation_time
def advance_operation_time(self, operation_time):
"""Update the operation time for this session.
:Parameters:
- `operation_time`: The
:data:`~pymongo.client_session.ClientSession.operation_time` from
another `ClientSession` instance.
"""
if not isinstance(operation_time, Timestamp):
raise TypeError("operation_time must be an instance "
"of bson.timestamp.Timestamp")
self._advance_operation_time(operation_time)
def _process_response(self, reply):
"""Process a response to a command that was run with this session."""
self._advance_cluster_time(reply.get('$clusterTime'))
self._advance_operation_time(reply.get('operationTime'))
if self.in_transaction and self._transaction.sharded:
recovery_token = reply.get('recoveryToken')
if recovery_token:
self._transaction.recovery_token = recovery_token
@property
def has_ended(self):
"""True if this session is finished."""
return self._server_session is None
@property
def in_transaction(self):
"""True if this session has an active multi-statement transaction.
.. versionadded:: 3.10
"""
return self._transaction.active()
@property
def _pinned_address(self):
"""The mongos address this transaction was created on."""
if self._transaction.active():
return self._transaction.pinned_address
return None
def _pin_mongos(self, server):
"""Pin this session to the given mongos Server."""
self._transaction.sharded = True
self._transaction.pinned_address = server.description.address
def _unpin_mongos(self):
"""Unpin this session from any pinned mongos address."""
self._transaction.pinned_address = None
def _txn_read_preference(self):
"""Return read preference of this transaction or None."""
if self.in_transaction:
return self._transaction.opts.read_preference
return None
def _apply_to(self, command, is_retryable, read_preference):
self._check_ended()
self._server_session.last_use = monotonic.time()
command['lsid'] = self._server_session.session_id
if not self.in_transaction:
self._transaction.reset()
if is_retryable:
command['txnNumber'] = self._server_session.transaction_id
return
if self.in_transaction:
if read_preference != ReadPreference.PRIMARY:
raise InvalidOperation(
'read preference in a transaction must be primary, not: '
'%r' % (read_preference,))
if self._transaction.state == _TxnState.STARTING:
# First command begins a new transaction.
self._transaction.state = _TxnState.IN_PROGRESS
command['startTransaction'] = True
if self._transaction.opts.read_concern:
rc = self._transaction.opts.read_concern.document
else:
rc = {}
if (self.options.causal_consistency
and self.operation_time is not None):
rc['afterClusterTime'] = self.operation_time
if rc:
command['readConcern'] = rc
command['txnNumber'] = self._server_session.transaction_id
command['autocommit'] = False
def _start_retryable_write(self):
self._check_ended()
self._server_session.inc_transaction_id()
class _ServerSession(object):
def __init__(self, pool_id):
# Ensure id is type 4, regardless of CodecOptions.uuid_representation.
self.session_id = {'id': Binary(uuid.uuid4().bytes, 4)}
self.last_use = monotonic.time()
self._transaction_id = 0
self.dirty = False
self.pool_id = pool_id
def mark_dirty(self):
"""Mark this session as dirty.
A server session is marked dirty when a command fails with a network
error. Dirty sessions are later discarded from the server session pool.
"""
self.dirty = True
def timed_out(self, session_timeout_minutes):
idle_seconds = monotonic.time() - self.last_use
# Timed out if we have less than a minute to live.
return idle_seconds > (session_timeout_minutes - 1) * 60
@property
def transaction_id(self):
"""Positive 64-bit integer."""
return Int64(self._transaction_id)
def inc_transaction_id(self):
self._transaction_id += 1
class _ServerSessionPool(collections.deque):
"""Pool of _ServerSession objects.
This class is not thread-safe, access it while holding the Topology lock.
"""
def __init__(self, *args, **kwargs):
super(_ServerSessionPool, self).__init__(*args, **kwargs)
self.pool_id = 0
def reset(self):
self.pool_id += 1
self.clear()
def pop_all(self):
ids = []
while self:
ids.append(self.pop().session_id)
return ids
def get_server_session(self, session_timeout_minutes):
# Although the Driver Sessions Spec says we only clear stale sessions
# in return_server_session, PyMongo can't take a lock when returning
# sessions from a __del__ method (like in Cursor.__die), so it can't
# clear stale sessions there. In case many sessions were returned via
# __del__, check for stale sessions here too.
self._clear_stale(session_timeout_minutes)
# The most recently used sessions are on the left.
while self:
s = self.popleft()
if not s.timed_out(session_timeout_minutes):
return s
return _ServerSession(self.pool_id)
def return_server_session(self, server_session, session_timeout_minutes):
self._clear_stale(session_timeout_minutes)
if not server_session.timed_out(session_timeout_minutes):
self.return_server_session_no_lock(server_session)
def return_server_session_no_lock(self, server_session):
# Discard sessions from an old pool to avoid duplicate sessions in the
# child process after a fork.
if server_session.pool_id == self.pool_id and not server_session.dirty:
self.appendleft(server_session)
def _clear_stale(self, session_timeout_minutes):
# Clear stale sessions. The least recently used are on the right.
while self:
if self[-1].timed_out(session_timeout_minutes):
self.pop()
else:
# The remaining sessions also haven't timed out.
break
| [
[
[
3780,
3791
],
[
33958,
33969
]
],
[
[
3799,
3801
]
],
[
[
3809,
3812
],
[
11849,
11852
]
],
[
[
3820,
3824
],
[
33085,
33089
]
],
[
[
3850,
3856
],
[
33078,
33084
]
],
[
[
3880,
3885
],
[
33834,
33839
]
],
[
[
3913,
3916
],
[
28598,
28601
]
],
[
[
3918,
3931
],
[
8773,
8786
]
],
[
[
3933,
3949
],
[
11821,
11837
]
],
[
[
3971,
3974
],
[
26923,
26926
]
],
[
[
4002,
4011
],
[
28774,
28783
],
[
29574,
29583
]
],
[
[
4033,
4042
],
[
12651,
12660
],
[
19457,
19466
],
[
31483,
31492
],
[
33133,
33142
],
[
33577,
33586
]
],
[
[
4071,
4089
],
[
8222,
8240
],
[
10285,
10303
]
],
[
[
4119,
4136
],
[
23293,
23310
],
[
25251,
25268
],
[
26086,
26103
]
],
[
[
4166,
4182
],
[
13929,
13945
],
[
21437,
21453
],
[
22487,
22503
],
[
22822,
22838
],
[
24587,
24603
],
[
24885,
24901
],
[
25030,
25046
],
[
31862,
31878
]
],
[
[
4212,
4228
],
[
11989,
12005
],
[
23856,
23872
],
[
25233,
25249
],
[
26402,
26418
]
],
[
[
4258,
4270
],
[
19840,
19852
],
[
20349,
20361
]
],
[
[
4300,
4327
],
[
26024,
26051
],
[
26216,
26243
],
[
26608,
26635
]
],
[
[
4357,
4370
],
[
23638,
23651
]
],
[
[
4400,
4422
],
[
12147,
12169
],
[
26458,
26480
]
],
[
[
4456,
4467
],
[
7641,
7652
]
],
[
[
4505,
4519
],
[
31816,
31830
]
],
[
[
4521,
4532
],
[
8455,
8466
]
],
[
[
4567,
4579
],
[
7936,
7948
],
[
27505,
27517
]
],
[
[
4588,
4602
]
],
[
[
5965,
5983
],
[
5236,
5254
],
[
21952,
21970
]
],
[
[
9591,
9622
]
],
[
[
10495,
10514
],
[
22190,
22209
]
],
[
[
10961,
10970
],
[
11271,
11280
],
[
11438,
11447
],
[
11458,
11467
],
[
11524,
11533
],
[
22118,
22127
],
[
22453,
22462
],
[
22553,
22562
],
[
22573,
22582
],
[
22718,
22727
],
[
22785,
22794
],
[
22942,
22951
],
[
23141,
23150
],
[
24319,
24328
],
[
24553,
24562
],
[
24652,
24661
],
[
24789,
24798
],
[
24848,
24857
],
[
24963,
24972
],
[
24984,
24993
],
[
25419,
25428
],
[
32048,
32057
],
[
32168,
32177
]
],
[
[
11096,
11108
],
[
13274,
13286
]
],
[
[
11644,
11672
],
[
23589,
23617
],
[
23807,
23835
],
[
24230,
24258
]
],
[
[
11874,
11897
],
[
20547,
20570
]
],
[
[
12117,
12144
],
[
23912,
23939
]
],
[
[
12503,
12537
],
[
12683,
12717
]
],
[
[
12550,
12568
],
[
19959,
19977
],
[
20480,
20498
],
[
20761,
20779
]
],
[
[
12726,
12739
]
],
[
[
32909,
32923
],
[
35115,
35129
]
],
[
[
33939,
33957
],
[
34159,
34177
]
]
] |
#ARC027f
def main():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
if __name__ == '__main__':
main() | [
[
[
13,
17
],
[
130,
134
]
]
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import sys
import bigram
# usage: ./parseJson.py "corpus.json path" "output directory"
def concatString(path):
corpus = ""
with open(path, 'r', encoding='UTF-8') as f:
for line in f.readlines():
corpus += line
return corpus
corpus = concatString(sys.argv[1])
data = json.loads(corpus)
output_path = sys.argv[2]
topics = {}
for doc in data:
string = bigram.get_words(doc["body"])
topic_num = doc["topic"]
if topic_num not in topics:
topics[topic_num] = []
topics[topic_num].append(string + "\n")
print("Finish traversing corpus.json")
for topic_index in topics.keys():
path = "%s/%d.txt" % (output_path, topic_index)
with open(path, 'w', encoding='UTF-8') as f:
f.writelines(topics[topic_index])
print("Generated %d files." % len(topics))
| [
[
[
54,
58
],
[
362,
366
]
],
[
[
66,
69
],
[
342,
345
],
[
395,
398
]
],
[
[
77,
83
],
[
449,
455
]
],
[
[
153,
165
],
[
329,
341
]
],
[
[
320,
326
],
[
373,
379
]
],
[
[
355,
359
],
[
430,
434
]
],
[
[
381,
392
],
[
716,
727
]
],
[
[
407,
413
],
[
532,
538
],
[
548,
554
],
[
575,
581
],
[
675,
681
],
[
812,
818
],
[
868,
874
]
],
[
[
423,
426
],
[
466,
469
],
[
495,
498
]
],
[
[
440,
446
],
[
600,
606
]
],
[
[
483,
492
],
[
515,
524
],
[
555,
564
],
[
582,
591
]
],
[
[
660,
671
],
[
729,
740
],
[
819,
830
]
],
[
[
694,
698
],
[
756,
760
]
],
[
[
788,
789
],
[
799,
800
]
]
] |
import torch.nn as nn
from ..functions import F_affine2d, F_affine3d
class STN2d(nn.Module):
def __init__(self, local_net):
super(STN2d, self).__init__()
self.local_net = local_net
def forward(self, x):
params = self.local_net(x)
x_transformed = F_affine2d(x[0], params.view(2,3))
return x_transformed
class STN3d(nn.Module):
def __init__(self, local_net):
self.local_net = local_net
def forward(self, x):
params = self.local_net(x)
x_transformed = F_affine3d(x, params.view(3,4))
return x_transformed
| [
[
[
8,
22
],
[
85,
87
],
[
370,
372
]
],
[
[
48,
58
],
[
292,
302
]
],
[
[
60,
70
],
[
539,
549
]
],
[
[
79,
84
],
[
147,
152
]
],
[
[
364,
369
]
]
] |
#!/usr/bin/env python3
"""Set up file for running tests."""
import unittest
def test():
loader = unittest.TestLoader()
testSuite = loader.discover('linkograph.tests')
runner = unittest.TextTestRunner()
runner.run(testSuite)
| [
[
[
69,
77
],
[
104,
112
],
[
191,
199
]
],
[
[
83,
87
]
]
] |
from frazzl import Service
from ariadne import QueryType
schema = """
type Query {
getTest2: Test2
}
type Test2 {
test1: String
}
"""
query = QueryType()
def resolve_getTest2(*args, **kwargs):
return
query.set_field("getTest2", resolve_getTest2)
testService = Service("testService2", schema, query)
| [
[
[
19,
26
],
[
276,
283
]
],
[
[
47,
56
],
[
152,
161
]
],
[
[
57,
63
],
[
300,
306
]
],
[
[
144,
149
],
[
215,
220
],
[
308,
313
]
],
[
[
168,
184
],
[
243,
259
]
],
[
[
262,
273
]
]
] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_py3 import Resource
class VirtualMachineScaleSet(Resource):
"""Describes a Virtual Machine Scale Set.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id
:vartype id: str
:ivar name: Resource name
:vartype name: str
:ivar type: Resource type
:vartype type: str
:param location: Required. Resource location
:type location: str
:param tags: Resource tags
:type tags: dict[str, str]
:param sku: The virtual machine scale set sku.
:type sku: ~azure.mgmt.compute.v2018_06_01.models.Sku
:param plan: Specifies information about the marketplace image used to
create the virtual machine. This element is only used for marketplace
images. Before you can use a marketplace image from an API, you must
enable the image for programmatic use. In the Azure portal, find the
marketplace image that you want to use and then click **Want to deploy
programmatically, Get Started ->**. Enter any required information and
then click **Save**.
:type plan: ~azure.mgmt.compute.v2018_06_01.models.Plan
:param upgrade_policy: The upgrade policy.
:type upgrade_policy: ~azure.mgmt.compute.v2018_06_01.models.UpgradePolicy
:param virtual_machine_profile: The virtual machine profile.
:type virtual_machine_profile:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachineScaleSetVMProfile
:ivar provisioning_state: The provisioning state, which only appears in
the response.
:vartype provisioning_state: str
:param overprovision: Specifies whether the Virtual Machine Scale Set
should be overprovisioned.
:type overprovision: bool
:ivar unique_id: Specifies the ID which uniquely identifies a Virtual
Machine Scale Set.
:vartype unique_id: str
:param single_placement_group: When true this limits the scale set to a
single placement group, of max size 100 virtual machines.
:type single_placement_group: bool
:param zone_balance: Whether to force strictly even Virtual Machine
distribution cross x-zones in case there is zone outage.
:type zone_balance: bool
:param platform_fault_domain_count: Fault Domain count for each placement
group.
:type platform_fault_domain_count: int
:param identity: The identity of the virtual machine scale set, if
configured.
:type identity:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachineScaleSetIdentity
:param zones: The virtual machine scale set zones.
:type zones: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'provisioning_state': {'readonly': True},
'unique_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'plan': {'key': 'plan', 'type': 'Plan'},
'upgrade_policy': {'key': 'properties.upgradePolicy', 'type': 'UpgradePolicy'},
'virtual_machine_profile': {'key': 'properties.virtualMachineProfile', 'type': 'VirtualMachineScaleSetVMProfile'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'overprovision': {'key': 'properties.overprovision', 'type': 'bool'},
'unique_id': {'key': 'properties.uniqueId', 'type': 'str'},
'single_placement_group': {'key': 'properties.singlePlacementGroup', 'type': 'bool'},
'zone_balance': {'key': 'properties.zoneBalance', 'type': 'bool'},
'platform_fault_domain_count': {'key': 'properties.platformFaultDomainCount', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'VirtualMachineScaleSetIdentity'},
'zones': {'key': 'zones', 'type': '[str]'},
}
def __init__(self, *, location: str, tags=None, sku=None, plan=None, upgrade_policy=None, virtual_machine_profile=None, overprovision: bool=None, single_placement_group: bool=None, zone_balance: bool=None, platform_fault_domain_count: int=None, identity=None, zones=None, **kwargs) -> None:
super(VirtualMachineScaleSet, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.plan = plan
self.upgrade_policy = upgrade_policy
self.virtual_machine_profile = virtual_machine_profile
self.provisioning_state = None
self.overprovision = overprovision
self.unique_id = None
self.single_placement_group = single_placement_group
self.zone_balance = zone_balance
self.platform_fault_domain_count = platform_fault_domain_count
self.identity = identity
self.zones = zones
| [
[
[
500,
508
],
[
540,
548
]
],
[
[
517,
539
],
[
4940,
4962
]
]
] |
#!/usr/bin/env python3
from itertools import chain
from setuptools import setup
from snakeoil.dist import distutils_extensions as pkgdist
pkgdist_setup, pkgdist_cmds = pkgdist.setup()
setup(**dict(
pkgdist_setup,
license='BSD',
author='Tim Harder',
author_email='[email protected]',
description='collection of tools for Gentoo development',
url='https://github.com/pkgcore/pkgdev',
data_files=list(chain(
pkgdist.data_mapping('share/bash-completion/completions', 'completion/bash'),
pkgdist.data_mapping('share/zsh/site-functions', 'completion/zsh'),
)),
classifiers=[
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
],
))
| [
[
[
46,
51
],
[
433,
438
]
],
[
[
76,
81
],
[
189,
194
]
],
[
[
108,
139
],
[
171,
178
],
[
448,
455
],
[
534,
541
]
],
[
[
141,
154
],
[
207,
220
]
],
[
[
156,
168
]
]
] |
'''
* File: settings.py
* Author: George Ungureanu <[email protected]>
* Purpose: This file contains methods for collecting configuration options
and initialize the settings object which holds the parameters
throughout the program execution.
* License: BSD3
'''
'''
Copyright (c) 2014, George Ungureanu
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import __init__
import os
import re
import utils
import logging
## Model class for storing configuration parameters
#
# This class is a container for the configuration settins and
# provides methods to gather or parse from two main sources: the
# configuration file and the comman-line arguments
class Settings:
## Class constructor
# @param Settings $self
# The object pointer
# @param ArgumentParser $args
# The comman-line arguments
def __init__(self, args):
self.logger = logging.getLogger('f2dot.settings')
self.logger.debug('Configuring the runtime execution...')
self.runPath = os.path.dirname(os.path.abspath(__file__))
self.configFileName = args.mode + '.conf'
# if -g option chosen
if args.generate_config:
path = args.output
if not path:
path = os.getcwd()
self.createConfFile(path, force=True)
self.logger.info('Generated config file in ' + path)
os._exit(1)
# set paths & names
self.inPathAndFile = os.path.abspath(args.input)
self.inPath, self.inFile = os.path.split(self.inPathAndFile)
if args.output:
self.outPath = os.path.abspath(args.output)
else:
self.outPath = self.inPath
# resolve config file
if args.config:
self.confFile = os.path.abspath(args.config)
else:
self.confFile = self.createConfFile(self.inPath, force=False)
self.logger.info("Using the configuration in %s", self.confFile)
for line in open(self.confFile):
if line.strip().startswith("# works with : f2dot"):
confVer = line.strip().split("# works with : f2dot-",1)[1]
if not confVer == __init__.__version__:
self.logger.warn('The config file was created by another version '
+ 'of the tool. Errors may occur.')
self.settingDict = {}
self.constraintDict = {}
# loading default settings & constraints
for line in utils.getConfigInSection(os.path.join(self.runPath,'config','general.conf'), '[default settings]'):
tag, value = utils.strBeforeAfter(line,"=")
self.settingDict[tag] = value
for line in utils.getConfigInSection(os.path.join(self.runPath,'config',self.configFileName), '[default settings]'):
tag, value = utils.strBeforeAfter(line,"=")
self.settingDict[tag] = value
for line in utils.getConfigInSection(os.path.join(self.runPath,'config','general.conf'), '[setting constraints]'):
tag, value = utils.strBeforeAfter(line,"=")
self.constraintDict[tag] = value
for line in utils.getConfigInSection(os.path.join(self.runPath,'config',self.configFileName), '[setting constraints]'):
tag, value = utils.strBeforeAfter(line,"=")
self.constraintDict[tag] = value
# loading custom settings and comparing them against the constraints
for line in utils.getConfigInSection(self.confFile):
tag, value = utils.strBeforeAfter(line,"=")
if tag in self.constraintDict:
if self.constraintDict[tag]:
pattern=re.compile(self.constraintDict[tag])
if not pattern.match(value):
self.logger.warn("The value for %s (%s) does not match pattern %s. Choosing the default value: %s",
tag, value, self.constraintDict[tag], self.settingDict[tag])
continue
self.settingDict[tag] = value
if args.format:
self.settingDict['FORMAT'] = args.format
if args.prog:
self.settingDict['PROG'] = args.prog
self.outPathAndFile = os.path.join(self.outPath, utils.getFileName(self.inFile) + '.' + self.settingDict['FORMAT'])
self.logger.debug('Runtime configuration successful')
## Creates a config file in the specified path.
# @param str $path
# The directory where the configuration file should be
# @param bool $force
# \cTrue to overwrite existing configuration file
# @return A string with the absolute path to the config file
def createConfFile(self, path, force=False):
confFile=os.path.join(path, self.configFileName)
if (os.path.isfile(confFile)) and not force:
return confFile
with open(confFile,'w') as f:
header = '' +\
'# file : ' + self.configFileName + ' \n' +\
'# description : automatically generated configuration file\n' +\
'# usage : change the right-hand values as suggested \n' +\
'# works with : f2dot-' + __init__.__version__ + '\n' +\
'# ####################################################################\n'
f.write(header)
utils.copySection(os.path.join(self.runPath,'config','general.conf'), confFile, '[default settings]')
utils.copySection(os.path.join(self.runPath,'config',self.configFileName), confFile, '[default settings]')
return confFile
## Method to enable treating a Settings object as a dictionary.
# @param str $key
# the setting name, as defined in the .conf file
# @return The value of the config parameter with the name 'key'
def __getitem__(self, key):
return self.settingDict[key]
## Prints the current settings
# @param Settings $self The object pointer
def printSettings(self):
msg = 'The current settings are:\n' \
+ '\t* runPath : ' + self.runPath + '\n' \
+ '\t* inPathAndFile : ' + self.inPathAndFile + '\n' \
+ '\t* inPath : ' + self.inPath + '\n' \
+ '\t* inFile : ' + self.inFile + '\n' \
+ '\t* outPath : ' + self.outPath + '\n' \
+ '\t* outPathAndFile : ' + self.outPathAndFile + '\n' \
+ '\t* confFileName : ' + self.outPathAndFile + '\n' \
+ '\t* confFile : ' + self.configFileName + '\n'
for key, value in self.settingDict.iteritems():
msg = msg + '\t* ' + key + " : " + value + '\n'
return msg
## @var logger
# Logger (logging object)
## @var runPath
# The path where the runnable is located (str)
## @var inPathAndFile
# The full path to the input file (str)
## @var inFile
# Input file name (str)
## @var outPath
# Absolute path to the output directory (str)
## @var configFileName
# Name of the configuration file based on the parse mode (str)
## @var confFile
# Absolte path to the configuration file (str)
## @var outPathAndFile
# Absolute path to the output file (str)
## @var settingDict
# Dictionary containing all other settings (dict)
## @var constraintDict
# Dictionary containing lists with allowed values for the same keys in settingDict
| [
[
[
1809,
1817
],
[
3387,
3395
],
[
6015,
6023
]
],
[
[
1826,
1828
],
[
2427,
2429
],
[
2443,
2445
],
[
2615,
2617
],
[
2727,
2729
],
[
2785,
2787
],
[
2842,
2844
],
[
2912,
2914
],
[
3041,
3043
],
[
3661,
3663
],
[
3856,
3858
],
[
4056,
4058
],
[
4257,
4259
],
[
5150,
5152
],
[
5636,
5638
],
[
5682,
5684
],
[
6164,
6166
],
[
6268,
6270
]
],
[
[
1836,
1838
],
[
4678,
4680
]
],
[
[
1846,
1851
],
[
3636,
3641
],
[
3752,
3757
],
[
3831,
3836
],
[
3952,
3957
],
[
4031,
4036
],
[
4150,
4155
],
[
4232,
4237
],
[
4356,
4361
],
[
4509,
4514
],
[
4566,
4571
],
[
5177,
5182
],
[
6146,
6151
],
[
6250,
6255
]
],
[
[
1859,
1866
],
[
2314,
2321
]
],
[
[
2111,
2119
]
]
] |
from __future__ import print_function
from functools import wraps
import logging
try:
import ujson as json
except ImportError:
import json
from flask import Flask as _Flask
from flask.globals import _request_ctx_stack
from werkzeug.wrappers import Response
from werkzeug.datastructures import Headers
from werkzeug.exceptions import HTTPException
_Request = _Flask.request_class
class cached_property(object):
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class ApiError(Exception):
status_code = 500
error = 'internal-error'
def __init__(self, error=None, status_code=None, **kwargs):
self.status_code = status_code or self.status_code
self.error = error or self.error
self.details = kwargs
def to_json(self):
data = {'error': self.error}
self.details and data.update(self.details)
return data
class Request(_Request):
def __init__(self, *args, **kwargs):
_Request.__init__(self, *args, **kwargs)
self._response = None
@cached_property
def response(self):
self._response = HeaderResponse()
return self._response
def process_response(self, response):
headers = self._response and self._response.headers
if headers:
response.headers._list.extend(headers)
return response
class HeaderResponse(Response):
def __init__(self):
self.headers = Headers()
class Flask(_Flask):
request_class = Request
def __init__(self, *args, **kwargs):
_Flask.__init__(self, *args, **kwargs)
self.url_map.strict_slashes = False
self.endpoint_counter = 0
self._logger = logging.getLogger(self.logger_name)
def route(self, rule, endpoint=None, weight=None, **options):
if weight is not None:
weight = False, -9999, weight
def decorator(func):
lendpoint = endpoint
if not lendpoint:
lendpoint = '{}_{}'.format(func.__name__, self.endpoint_counter)
self.endpoint_counter += 1
self.add_url_rule(rule, lendpoint, func, **options)
if weight:
self.url_map._rules[-1].match_compare_key = lambda: weight
return func
return decorator
def api(self, *args, **kwargs):
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
try:
result = func(*args, **kwargs)
except ApiError as e:
result = e
except HTTPException as e:
result = e
except Exception:
self.logger.exception('Unhandled error')
result = ApiError()
if isinstance(result, Response):
return result
elif isinstance(result, ApiError):
code = result.status_code
result = result.to_json()
else:
code = 200
return self.response_class(json.dumps(result, ensure_ascii=False), code,
content_type='application/json')
return self.route(*args, **kwargs)(inner)
return decorator
def process_response(self, response):
response = _request_ctx_stack.top.request.process_response(response)
return _Flask.process_response(self, response)
def print_routes(self, sort=False):
rules = self.url_map.iter_rules()
if sort:
rules = sorted(rules, key=lambda r: r.rule)
for rule in rules:
func = self.view_functions[rule.endpoint]
print('{:10} {}\t{}.{}'.format(
','.join(rule.methods),
rule.rule,
func.__module__,
func.__name__))
| [
[
[
23,
37
]
],
[
[
60,
65
],
[
2580,
2585
]
],
[
[
73,
80
],
[
1897,
1904
]
],
[
[
98,
111
],
[
3305,
3309
]
],
[
[
143,
147
],
[
3305,
3309
]
],
[
[
167,
182
],
[
369,
375
],
[
1670,
1676
],
[
1757,
1763
],
[
3641,
3647
]
],
[
[
209,
227
],
[
3568,
3586
]
],
[
[
258,
266
],
[
1588,
1596
],
[
3021,
3029
]
],
[
[
303,
310
],
[
1646,
1653
]
],
[
[
343,
356
],
[
2796,
2809
]
],
[
[
358,
366
],
[
1118,
1126
],
[
1178,
1186
]
],
[
[
398,
413
],
[
1255,
1270
]
],
[
[
703,
711
],
[
2727,
2735
],
[
2971,
2979
],
[
3106,
3114
]
],
[
[
1110,
1117
],
[
1699,
1706
]
],
[
[
1573,
1587
],
[
1320,
1334
]
],
[
[
1664,
1669
]
]
] |
from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
def index(request):
return HttpResponse('TEST URL')
| [
[
[
29,
35
]
],
[
[
88,
100
],
[
133,
145
]
],
[
[
106,
111
]
]
] |
from pyrepo import correlations as corrs
from scipy.stats import pearsonr
import unittest
import numpy as np
# Test for Spearman rank correlation coefficient
class Test_Spearman(unittest.TestCase):
def test_spearman(self):
"""Test based on paper Sałabun, W., & Urbaniak, K. (2020, June). A new coefficient of rankings similarity
in decision-making problems. In International Conference on Computational Science
(pp. 632-645). Springer, Cham."""
R = np.array([1, 2, 3, 4, 5])
Q = np.array([1, 3, 2, 4, 5])
test_result = corrs.spearman(R, Q)
real_result = 0.9
self.assertEqual(test_result, real_result)
# Test for Weighted Spearman rank correlation coefficient
class Test_Weighted_Spearman(unittest.TestCase):
def test_weighted_spearman(self):
"""Test based on paper Sałabun, W., & Urbaniak, K. (2020, June). A new coefficient of rankings similarity
in decision-making problems. In International Conference on Computational Science
(pp. 632-645). Springer, Cham."""
R = np.array([1, 2, 3, 4, 5])
Q = np.array([1, 3, 2, 4, 5])
test_result = corrs.weighted_spearman(R, Q)
real_result = 0.8833
self.assertEqual(np.round(test_result, 4), real_result)
# Test for Similarity rank coefficient WS
class Test_WS(unittest.TestCase):
def test_ws(self):
"""Test based on paper Sałabun, W., & Urbaniak, K. (2020, June). A new coefficient of rankings similarity
in decision-making problems. In International Conference on Computational Science
(pp. 632-645). Springer, Cham."""
R = np.array([1, 2, 3, 4, 5])
Q = np.array([1, 3, 2, 4, 5])
test_result = corrs.WS_coeff(R, Q)
real_result = 0.8542
self.assertEqual(np.round(test_result, 4), real_result)
# Test for Pearson correlation coefficient
class Test_Pearson(unittest.TestCase):
def test_pearson(self):
"""Test based on paper Sałabun, W., & Urbaniak, K. (2020, June). A new coefficient of rankings similarity
in decision-making problems. In International Conference on Computational Science
(pp. 632-645). Springer, Cham."""
R = np.array([1, 2, 3, 4, 5])
Q = np.array([1, 3, 2, 4, 5])
test_result = corrs.pearson_coeff(R, Q)
real_result, _ = pearsonr(R, Q)
self.assertEqual(test_result, real_result)
def main():
test_spearman_coeff = Test_Spearman()
test_spearman_coeff.test_spearman()
test_weighted_spearman_coeff = Test_Weighted_Spearman()
test_weighted_spearman_coeff.test_weighted_spearman()
test_pearson_coeff = Test_Pearson()
test_pearson_coeff.test_pearson()
test_ws = Test_WS()
test_ws.test_ws()
if __name__ == '__main__':
main() | [
[
[
19,
40
],
[
577,
582
],
[
1170,
1175
],
[
1742,
1747
],
[
2316,
2321
]
],
[
[
65,
73
],
[
2367,
2375
]
],
[
[
81,
89
],
[
180,
188
],
[
764,
772
],
[
1351,
1359
],
[
1920,
1928
]
],
[
[
97,
108
],
[
491,
493
],
[
529,
531
],
[
1084,
1086
],
[
1122,
1124
],
[
1254,
1256
],
[
1656,
1658
],
[
1694,
1696
],
[
1817,
1819
],
[
2230,
2232
],
[
2268,
2270
]
],
[
[
166,
179
],
[
2473,
2486
]
],
[
[
741,
763
],
[
2565,
2587
]
],
[
[
1343,
1350
],
[
2742,
2749
]
],
[
[
1907,
1919
],
[
2674,
2686
]
],
[
[
2439,
2443
],
[
2807,
2811
]
]
] |
import numpy as np
import pandas as pd
""" Contains core classes and methods for initializing a Assembly System, the inputs are provided in assemblyconfig file in utilities"""
class AssemblySystem:
"""Assembly System Class
:param assembly_type: Type of assembly Single-Station/Multi-Station
:type assembly_system: str (required)
:param assembly_kccs: Number of KCCs for the assembly
:type assembly_kccs: int (required)
:param assembly_kpis: Number of Kpis for the assembly
:type assembly_kpis: int (required)
"""
def __init__(self,assembly_type,assembly_kccs,assembly_kpis):
self.assembly_type=assembly_type
self.assembly_kccs=assembly_kccs
self.assembly_kpis=assembly_kpis
class PartType(AssemblySystem):
"""Part System Class, inherits the Assembly System Class, additional parameters for this class include
:param voxel_dim: Dimension of the voxel
:type assembly_system: int (required)
:param voxel_dim: Dimension of the voxel Channel, single channel output - 1 or multi channel - 2,3 (use 1 for deviations in one direction, 2 or 3 if data for multiple deviation directions are present)
:type assembly_system: int (required)
:param voxel_dim: Dimension of the voxel
:type assembly_system: int (required)
The class contains two functions - get_nominal_cop and get_nominal_cop_database
"""
def __init__(self,assembly_type,assembly_kccs,assembly_kpis,part_name,part_type,voxel_dim,voxel_channels,point_dim):
super().__init__(assembly_type,assembly_kccs,assembly_kpis)
self.part_name=part_name
self.part_type=part_type
self.voxel_dim=voxel_dim
self.voxel_channels=voxel_channels
self.point_dim=point_dim
def get_nominal_cop(self,file_name):
"""Import nominal cloud-of-point of the assembly from a text/csv file
:param file_name: Name of the input file
:type file_name: str (required)
:returns: numpy array of nominal COP
:rtype: numpy.array [point_dim,3]
"""
df=pd.read_csv(file_name, sep=',',header=None)
nominal_cop=df.values
return nominal_cop
def get_nominal_cop_database(self,conn_str,table_name):
"""Import nominal cloud-of-point of the assembly from a SQL database assumes the table only contains three columns of the nominal COPs in order of the Node IDs
:param conn_str: Connection String for Database
:type conn_str: str (required)
:param table_name: Name of table in the database
:type table_name: str (required)
:returns: numpy array of dim points * 3
:rtype: numpy.array [point_dim,3]
"""
engine = create_engine(conn_str)
squery ='select * from '+table_name
df_nom = pd.read_sql_query(squery,con=engine)
df_nom = df_nom.values
return df_nom
class VRMSimulationModel(PartType):
"""VRM Simulation Model class inherits the part type class, additional parameters of this class include
:param noise_level: The level of artificial noise to be added to simulated data, typically set to 0.1 mm from the measurement system class depending on the scanner
:type noise_level: float (required)
:param noise_type: The type of noise to be added, can be Gaussian or uniform , for Gaussian noise_level is set as standard deviation and mean as zero for uniform the min and max are set -noise_level and +noise_level respectively
:type noise_type: str (optional)
:param convergency_flag: Flag to denote if the simulation model had converged while simulating, is set to 1 by default
:type convergency_flag: int (optional)
The class contains one function kpi_calculator that needs to be defined by the user depending on the assembly output
"""
def __init__(self,assembly_type,assembly_kccs,assembly_kpis,part_name,part_type,voxel_dim,voxel_channels,point_dim,noise_level,noise_type='uniform',convergency_flag=1):
super().__init__(assembly_type,assembly_kccs,assembly_kpis,part_name,part_type,voxel_dim,voxel_channels,point_dim)
self.noise_level=noise_level
self.noise_type=noise_type
self.convergency_flag=convergency_flag
def kpi_calculator(self,cop_data,kpi_params=[]):
""" User defined function to calculate KPI from Cloud of Point Data [KPI]=f(Cop)
:param cop_data: CoP data for a given sample
:type cop_data: np_array [point_dim,3] (required)
:param kpi_params: Various parameters required to calculate the KPI, can be blank if no parameters are required to calculate KPI from CoP
:type kpi_params: list (optional)
:returns: list of multivariate KPIs for the given CoP
:rtype: list
"""
kpi=[None]*self.assembly_kpis
#define function here
return kpi | [
[
[
7,
18
]
],
[
[
26,
38
],
[
1948,
1950
],
[
2608,
2610
]
],
[
[
183,
197
],
[
716,
730
]
],
[
[
707,
715
],
[
2712,
2720
]
],
[
[
2693,
2711
]
]
] |
# automatically generated, do not modify
# namespace: NamespaceA
import flatbuffers
class SecondTableInA(object):
__slots__ = ['_tab']
# SecondTableInA
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# SecondTableInA
def ReferToC(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .TableInC import TableInC
obj = TableInC()
obj.Init(self._tab.Bytes, x)
return obj
return None
def SecondTableInAStart(builder): builder.StartObject(1)
def SecondTableInAAddReferToC(builder, referToC): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(referToC), 0)
def SecondTableInAEnd(builder): return builder.EndObject()
| [
[
[
74,
85
],
[
214,
225
],
[
306,
317
],
[
750,
761
]
],
[
[
93,
107
]
],
[
[
608,
627
]
],
[
[
665,
690
]
],
[
[
815,
832
]
]
] |
from setuptools import setup, find_packages
setup(
name='w3lib',
version='1.12.0',
license='BSD',
description='Library of web-related functions',
author='Scrapy project',
author_email='[email protected]',
url='https://github.com/scrapy/w3lib',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_zafe=False,
platforms=['Any'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP',
],
install_requires=['six >= 1.4.1'],
)
| [
[
[
23,
28
],
[
46,
51
]
],
[
[
30,
43
],
[
285,
298
]
]
] |
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib
# Look pretty...
# matplotlib.style.use('ggplot')
plt.style.use('ggplot')
#
# TODO: Load up the Seeds Dataset into a Dataframe
# It's located at 'Datasets/wheat.data'
#
wheat_df = pd.read_csv('/home/dipanjan/DAT210x/Module3/Datasets/wheat.data', index_col=0);
#
# TODO: Create a 2d scatter plot that graphs the
# area and perimeter features
#
# .. your code here ..
wheat_df.plot.scatter(x='area', y='perimeter')
#
# TODO: Create a 2d scatter plot that graphs the
# groove and asymmetry features
#
# .. your code here ..
wheat_df.plot.scatter(x='groove', y='asymmetry')
#
# TODO: Create a 2d scatter plot that graphs the
# compactness and width features
#
# .. your code here ..
wheat_df.plot.scatter(x='compactness', y='width')
# BONUS TODO:
# After completing the above, go ahead and run your program
# Check out the results, and see what happens when you add
# in the optional display parameter marker with values of
# either '^', '.', or 'o'.
wheat_df.plot.scatter(x='compactness', y='width', marker='o')
plt.show()
| [
[
[
7,
19
],
[
254,
256
]
],
[
[
27,
51
],
[
121,
124
],
[
1093,
1096
]
],
[
[
59,
69
]
],
[
[
243,
251
],
[
443,
451
],
[
600,
608
],
[
760,
768
],
[
1030,
1038
]
]
] |
import numpy as np
import os, sys
sys.path.append(os.path.dirname(__file__))
from diis_solver import diis_solver, diis_solver_uhf
sys.path.pop()
import jk
import xform
def homo_lumo_mix(C, nocc, beta):
"""
Mix a portion of LUMO to HOMO.
Used when generating spin-unrestricted guess.
"""
if beta < 0. or beta > 1.:
raise Exception("Mixing beta must be in [0, 1]")
Cb = C.copy()
homo = C[:, nocc - 1]
lumo = C[:, nocc]
Cb[:, nocc - 1] = (1. - beta) ** 0.5 * homo + beta ** 0.5 * lumo
return Cb
def get_dm(C, nel):
D = C[:, :nel]
D = D @ D.T
return D
def get_JK(is_fitted, g, D):
if(is_fitted):
# FINISH LATER
X = np.einsum("Pls,ls->P", g, D)
J = np.einsum("mnP,P->mn", np.swapaxes(g, 0, 2), X)
Z = np.einsum("Pns,ls->Pnl", g, D)
K = np.einsum('mlP,Pnl->mn', np.swapaxes(g, 0, 2), Z)
return (J, K)
else:
#J = np.einsum("pqrs,rs->pq", g, D)
#K = np.einsum("prqs,rs->pq", g, D)
J, K = jk.getJK_np_Dshift(g, D - np.diag(np.diag(D) * 0.5))
return (J, K)
def get_JK_uhf(is_fitted, g, Ds):
"""
Ds = [Da, Db]
"""
Da, Db = Ds[0], Ds[1]
Dtot = Da + Db
if (is_fitted == True):
X = np.einsum("Pls,ls->P", g, Dtot)
Jtot = np.einsum("mnP,P->mn", np.swapaxes(g, 0, 2), X)
Za = np.einsum("Pns,ls->Pnl", g, Da)
Ka = np.einsum('mlP,Pnl->mn', np.swapaxes(g, 0, 2), Za)
Zb = np.einsum("Pns,ls->Pnl", g, Db)
Kb = np.einsum('mlP,Pnl->mn', np.swapaxes(g, 0, 2), Zb)
return Jtot, Ka, Kb
else:
Jtot = np.einsum("pqrs, rs -> pq", g, Dtot)
Ka = np.einsum("prqs, rs -> pq", g, Da)
Kb = np.einsum("prqs, rs -> pq", g, Db)
return Jtot, Ka, Kb
def get_fock(H, g, D):
J, K = get_JK(len(g.shape) == 3, g, D)
return H + 2 * J - K
def diis_update(F_prev_list, r_prev_list):
c = diis_solver(r_prev_list) # GET THE COEFFICIENTS!!
out = 0 * F_prev_list[0]
for i, element in enumerate(F_prev_list):
out += c[i] * element
return out
def oda_update(dF, dD, dE):
"""
ODA update:
lbd = 0.5 - dE / E_deriv
"""
E_deriv = np.sum(dF * dD)
lbd = 0.5 * (1. - dE / E_deriv)
if lbd < 0 or lbd > 1:
lbd = 0.9999 if dE < 0 else 1.e-4
return lbd
def get_fock_uhf(H, g, Ds):
"""
DIIS update given previous Fock matrices and error vectors.
Note that if there are less than two F's, return normal F.
"""
Jtot, Ka, Kb = get_JK_uhf(len(g.shape) == 3, g, Ds)
return H + Jtot - Ka, H + Jtot - Kb
def diis_update_uhf(F_prev_lists, r_prev_lists):
c = diis_solver_uhf(r_prev_lists[0], r_prev_lists[1])
Fa = 0 * F_prev_lists[0][0]
for i, element in enumerate(F_prev_lists[0]):
Fa += c[i] * element
Fb = 0 * F_prev_lists[0][0]
for i, element in enumerate(F_prev_lists[1]):
Fb += c[i] * element
return Fa, Fb
def oda_update_uhf(dFs, dDs, dE):
"""
ODA update:
lbd = 0.5 - dE / E_deriv
"""
if type(dFs) is not list:
raise Exception("arg1 and arg2 are list of alpha/beta matrices.")
E_deriv = np.sum(dFs[0] * dDs[0] + dFs[1] * dDs[1])
lbd = 0.5 * (1. - dE / E_deriv)
if lbd < 0 or lbd > 1:
lbd = 0.9999 if dE < 0 else 1.e-4
return lbd
def diag(F, A):
Fp = A.T @ F @ A
eps, Cp = np.linalg.eigh(Fp)
C = A @ Cp
return eps, C
def get_SCF_err(S, D, F):
err_v = S @ D @ F - F @ D @ S
err = np.mean(err_v ** 2) ** 0.5
return err, err_v
def get_SCF_energy(H, F, D, unrestricted):
"""
Calculates the energy.
"""
if unrestricted == True:
if type(F) is not list or type(D) is not list:
raise Exception("For UHF, F and D must have type list.")
Fa, Fb = F[0], F[1]
Da, Db = D[0], D[1]
Dtot = Da + Db
return np.sum(Dtot * H + Da * Fa + Db * Fb) * 0.5
else:
return np.sum((H + F) * D)
def xform_2(H, A):
"""
Basis xform for 2-tensor
"""
if len(H.shape) != 2:
raise Exception("Dimension error: arg1 should be a matrix")
return A.T @ H @ A
def xform_4(g, A):
"""
Basis xform for 4-tensor
"""
if len(g.shape) != 4:
raise Exception("""
Dimension error: arg1 should be a four-tensor.
Note that you should set is_fitted to be False.
""")
#return np.einsum("pi, qj, pqrs, rk, sl -> ijkl", A, A, g, A, A, optimize=True)
return xform.xform_4_np(g, A)
| [
[
[
7,
18
],
[
697,
699
],
[
738,
740
],
[
761,
763
],
[
798,
800
],
[
841,
843
],
[
866,
868
],
[
1052,
1054
],
[
1060,
1062
],
[
1256,
1258
],
[
1303,
1305
],
[
1326,
1328
],
[
1364,
1366
],
[
1409,
1411
],
[
1434,
1436
],
[
1473,
1475
],
[
1518,
1520
],
[
1543,
1545
],
[
1622,
1624
],
[
1672,
1674
],
[
1720,
1722
],
[
2208,
2210
],
[
3181,
3183
],
[
3396,
3398
],
[
3520,
3522
],
[
3904,
3906
],
[
3972,
3974
]
],
[
[
26,
28
],
[
50,
52
]
],
[
[
30,
33
],
[
34,
37
],
[
130,
133
]
],
[
[
101,
112
],
[
1929,
1940
]
],
[
[
114,
129
],
[
2672,
2687
]
],
[
[
152,
154
],
[
1026,
1028
]
],
[
[
162,
167
],
[
4524,
4529
]
],
[
[
174,
187
]
],
[
[
548,
554
]
],
[
[
618,
624
],
[
1819,
1825
]
],
[
[
1107,
1117
],
[
2536,
2546
]
],
[
[
1789,
1797
]
],
[
[
1882,
1893
]
],
[
[
2105,
2115
]
],
[
[
2350,
2362
]
],
[
[
2619,
2634
]
],
[
[
2968,
2982
]
],
[
[
3349,
3353
]
],
[
[
3454,
3465
]
],
[
[
3575,
3589
]
],
[
[
3998,
4005
]
],
[
[
4182,
4189
]
]
] |
import pytest
from .funcs import assert_query
QUERY = '''
{
__schema {
types {
kind
name
fields {
name
}
}
queryType {
fields {
name
}
}
mutationType {
fields {
name
}
}
subscriptionType {
fields {
name
}
}
}
}
'''
##__________________________________________________________________||
params = [
pytest.param(
{"query": QUERY},
{"Authorization": "Bearer 90b2ee5fed25506df04fd37343bb68d1803dd97f"},
id="admin",
),
pytest.param(
{"query": QUERY},
{"Authorization": "Bearer 0fb8c9e16d6f7c4961c4c49212bf197d79f14080"},
id="private",
),
pytest.param(
{"query": QUERY},
{"Authorization": "Bearer 1a2d18f270df3abacfb85c5413b668f97794b4ce"},
id="public-wrong-token",
),
pytest.param(
{"query": QUERY},
{},
id="public-no-token",
),
]
@pytest.mark.parametrize("data, headers", params)
@pytest.mark.asyncio
async def test_schema(app_users, snapshot, data, headers):
await assert_query(app_users, snapshot, data, headers)
##__________________________________________________________________||
| [
[
[
7,
13
],
[
433,
439
],
[
582,
588
],
[
733,
739
],
[
895,
901
],
[
989,
995
],
[
1039,
1045
]
],
[
[
34,
46
],
[
1128,
1140
]
],
[
[
48,
53
],
[
465,
470
],
[
614,
619
],
[
765,
770
],
[
927,
932
]
],
[
[
418,
424
],
[
1030,
1036
]
],
[
[
1059,
1176
]
]
] |
"""
MIT License
Copyright 2021 Hannes Holey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from setuptools import setup, find_packages
with open("requirements.txt", "r") as fh:
requirements = [line.strip() for line in fh]
setup(name='hans',
description='Height-Averaged Navier-Stokes (HANS) solver for 2D lubrication problems',
author='Hannes Holey',
author_email='[email protected]',
url='http://github.com/hannes-holey/hans',
license="MIT",
packages=find_packages(),
package_data={'': ['ChangeLog.md']},
include_package_data=True,
scripts=['cli/plot1D_evolution.py',
'cli/plot1D_last.py',
'cli/plot2D_last.py',
'cli/plot_scalar.py',
'cli/read_config.py',
'cli/animate1D.py',
'cli/animate2D.py'],
test_suite='tests',
tests_require=["pytest>=4"],
install_requires=requirements,
python_requires=">=3.6",
use_scm_version=True,
setup_requires=['setuptools_scm>=3.5.0'],
zip_safe=False)
| [
[
[
1098,
1103
],
[
1212,
1217
]
],
[
[
1105,
1118
],
[
1481,
1494
]
],
[
[
1158,
1160
],
[
1207,
1209
]
],
[
[
1166,
1178
],
[
1919,
1931
]
]
] |
"""
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from .format_converter import FileBasedAnnotationConverter, ConverterReturn
from ..representation import MultiLabelRecognitionAnnotation
from ..utils import read_xml, check_file_existence
from ..config import StringField, PathField, ConfigError
class CVATMultilabelAttributesRecognitionConverter(FileBasedAnnotationConverter):
__provider__ = 'cvat_multilabel_binary_attributes_recognition'
annotation_types = (MultiLabelRecognitionAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update({
'label': StringField(description='specific label for attribute collection'),
'images_dir': PathField(
is_directory=True, optional=True,
description='path to dataset images, used only for content existence check'
)
})
return configuration_parameters
def configure(self):
super().configure()
self.label = self.get_value_from_config('label')
self.images_dir = self.get_value_from_config('images_dir') or self.annotation_file.parent
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
annotation = read_xml(self.annotation_file)
meta = annotation.find('meta')
size = int(meta.find('task').find('size').text)
label = self.select_label(meta)
label_to_id = {attribute.find('name').text: idx for idx, attribute in enumerate(label.iter('attribute'))}
num_attributes = len(label_to_id)
annotations = []
content_errors = None if not check_content else []
for image_id, image in enumerate(annotation.iter('image')):
identifier = image.attrib['name'].split('/')[-1]
if check_content:
if not check_file_existence(self.images_dir / identifier):
content_errors.append('{}: does not exist'.format(self.images_dir / identifier))
for bbox in image:
if 'label' not in bbox.attrib.keys() or bbox.attrib['label'] != self.label:
continue
bbox_rect = [
float(bbox.attrib['xtl']), float(bbox.attrib['ytl']),
float(bbox.attrib['xbr']), float(bbox.attrib['ybr'])
]
attributes = -np.ones(num_attributes)
for attribute in bbox.iter('attribute'):
attribute_name = attribute.attrib['name']
attribute_label = label_to_id[attribute_name]
attributes[attribute_label] = 1 if attribute.text == 'T' else 0
attributes_annotation = MultiLabelRecognitionAnnotation(identifier, attributes)
attributes_annotation.metadata['rect'] = bbox_rect
annotations.append(attributes_annotation)
if progress_callback is not None and image_id % progress_interval == 0:
progress_callback(image_id * 100 / size)
return ConverterReturn(annotations, self.generate_meta(label_to_id), content_errors)
@staticmethod
def generate_meta(attribute_values_mapping):
return {'label_map': {value: key for key, value in attribute_values_mapping.items()}}
def select_label(self, meta):
label = [label for label in meta.iter('label') if label.find('name').text == self.label]
if not label:
raise ConfigError('{} does not present in annotation'.format(self.label))
return label[0]
| [
[
[
585,
596
],
[
2977,
2979
]
],
[
[
627,
655
],
[
895,
923
]
],
[
[
657,
672
],
[
3657,
3672
]
],
[
[
702,
733
],
[
1017,
1048
],
[
3310,
3341
]
],
[
[
754,
762
],
[
1858,
1866
]
],
[
[
764,
784
],
[
2447,
2467
]
],
[
[
806,
817
],
[
1214,
1225
]
],
[
[
819,
828
],
[
1308,
1317
]
],
[
[
830,
841
],
[
4069,
4080
]
],
[
[
850,
894
]
]
] |
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import datetime
import json
import sys
import xml.parsers.expat
import xml.dom.minidom
import colorama
from awscli.compat import six
from awscli.customizations.history.commands import HistorySubcommand
from awscli.customizations.history.filters import RegexFilter
class Formatter(object):
def __init__(self, output=None, include=None, exclude=None):
"""Formats and outputs CLI history events
:type output: File-like obj
:param output: The stream to write the formatted event to. By default
sys.stdout is used.
:type include: list
:param include: A filter specifying which event to only be displayed.
This parameter is mutually exclusive with exclude.
:type exclude: list
:param exclude: A filter specifying which events to exclude from being
displayed. This parameter is mutually exclusive with include.
"""
self._output = output
if self._output is None:
self._output = sys.stdout
if include and exclude:
raise ValueError(
'Either input or exclude can be provided but not both')
self._include = include
self._exclude = exclude
def display(self, event_record):
"""Displays a formatted version of the event record
:type event_record: dict
:param event_record: The event record to format and display.
"""
if self._should_display(event_record):
self._display(event_record)
def _display(self, event_record):
raise NotImplementedError('_display()')
def _should_display(self, event_record):
if self._include:
return event_record['event_type'] in self._include
elif self._exclude:
return event_record['event_type'] not in self._exclude
else:
return True
class DetailedFormatter(Formatter):
_SIG_FILTER = RegexFilter(
'Signature=([a-z0-9]{4})[a-z0-9]{60}',
r'Signature=\1...',
)
_SECTIONS = {
'CLI_VERSION': {
'title': 'AWS CLI command entered',
'values': [
{'description': 'with AWS CLI version'}
]
},
'CLI_ARGUMENTS': {
'values': [
{'description': 'with arguments'}
]
},
'API_CALL': {
'title': 'API call made',
'values': [
{
'description': 'to service',
'payload_key': 'service'
},
{
'description': 'using operation',
'payload_key': 'operation'
},
{
'description': 'with parameters',
'payload_key': 'params',
'value_format': 'dictionary'
}
]
},
'HTTP_REQUEST': {
'title': 'HTTP request sent',
'values': [
{
'description': 'to URL',
'payload_key': 'url'
},
{
'description': 'with method',
'payload_key': 'method'
},
{
'description': 'with headers',
'payload_key': 'headers',
'value_format': 'dictionary',
'filters': [_SIG_FILTER]
},
{
'description': 'with body',
'payload_key': 'body',
'value_format': 'http_body'
}
]
},
'HTTP_RESPONSE': {
'title': 'HTTP response received',
'values': [
{
'description': 'with status code',
'payload_key': 'status_code'
},
{
'description': 'with headers',
'payload_key': 'headers',
'value_format': 'dictionary'
},
{
'description': 'with body',
'payload_key': 'body',
'value_format': 'http_body'
}
]
},
'PARSED_RESPONSE': {
'title': 'HTTP response parsed',
'values': [
{
'description': 'parsed to',
'value_format': 'dictionary'
}
]
},
'CLI_RC': {
'title': 'AWS CLI command exited',
'values': [
{'description': 'with return code'}
]
},
}
_COMPONENT_COLORS = {
'title': colorama.Style.BRIGHT,
'description': colorama.Fore.CYAN
}
def __init__(self, output=None, include=None, exclude=None, colorize=True):
super(DetailedFormatter, self).__init__(output, include, exclude)
self._request_id_to_api_num = {}
self._num_api_calls = 0
self._colorize = colorize
self._value_pformatter = SectionValuePrettyFormatter()
if self._colorize:
colorama.init(autoreset=True, strip=False)
def _display(self, event_record):
section_definition = self._SECTIONS.get(event_record['event_type'])
if section_definition is not None:
self._display_section(event_record, section_definition)
def _display_section(self, event_record, section_definition):
if 'title' in section_definition:
self._display_title(section_definition['title'], event_record)
for value_definition in section_definition['values']:
self._display_value(value_definition, event_record)
def _display_title(self, title, event_record):
formatted_title = self._format_section_title(title, event_record)
self._write_output(formatted_title)
def _display_value(self, value_definition, event_record):
value_description = value_definition['description']
event_record_payload = event_record['payload']
value = event_record_payload
if 'payload_key' in value_definition:
value = event_record_payload[value_definition['payload_key']]
formatted_value = self._format_description(value_description)
formatted_value += self._format_value(
value, event_record, value_definition.get('value_format')
)
if 'filters' in value_definition:
for text_filter in value_definition['filters']:
formatted_value = text_filter.filter_text(formatted_value)
self._write_output(formatted_value)
def _write_output(self, content):
if isinstance(content, six.text_type):
content = content.encode('utf-8')
self._output.write(content)
def _format_section_title(self, title, event_record):
formatted_title = title
api_num = self._get_api_num(event_record)
if api_num is not None:
formatted_title = ('[%s] ' % api_num) + formatted_title
formatted_title = self._color_if_configured(formatted_title, 'title')
formatted_title += '\n'
formatted_timestamp = self._format_description('at time')
formatted_timestamp += self._format_value(
event_record['timestamp'], event_record, value_format='timestamp')
return '\n' + formatted_title + formatted_timestamp
def _get_api_num(self, event_record):
request_id = event_record['request_id']
if request_id:
if request_id not in self._request_id_to_api_num:
self._request_id_to_api_num[
request_id] = self._num_api_calls
self._num_api_calls += 1
return self._request_id_to_api_num[request_id]
def _format_description(self, value_description):
return self._color_if_configured(
value_description + ': ', 'description')
def _format_value(self, value, event_record, value_format=None):
if value_format:
formatted_value = self._value_pformatter.pformat(
value, value_format, event_record)
else:
formatted_value = str(value)
return formatted_value + '\n'
def _color_if_configured(self, text, component):
if self._colorize:
color = self._COMPONENT_COLORS[component]
return color + text + colorama.Style.RESET_ALL
return text
class SectionValuePrettyFormatter(object):
def pformat(self, value, value_format, event_record):
return getattr(self, '_pformat_' + value_format)(value, event_record)
def _pformat_timestamp(self, event_timestamp, event_record=None):
return datetime.datetime.fromtimestamp(
event_timestamp/1000.0).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
def _pformat_dictionary(self, obj, event_record=None):
return json.dumps(obj=obj, sort_keys=True, indent=4)
def _pformat_http_body(self, body, event_record):
if not body:
return 'There is no associated body'
elif event_record['payload'].get('streaming', False):
return 'The body is a stream and will not be displayed'
elif self._is_xml(body):
# TODO: Figure out a way to minimize the number of times we have
# to parse the XML. Currently at worst, it will take three times.
# One to determine if it is XML, another to stip whitespace, and
# a third to convert to make it pretty. This is an issue as it
# can cause issues when there are large XML payloads such as
# an s3 ListObjects call.
return self._get_pretty_xml(body)
elif self._is_json_structure(body):
return self._get_pretty_json(body)
else:
return body
def _get_pretty_xml(self, body):
# The body is parsed and whitespace is stripped because some services
# like ec2 already return pretty XML and if toprettyxml() was applied
# to it, it will add even more newlines and spaces on top of it.
# So this just removes all whitespace from the start to prevent the
# chance of adding to much newlines and spaces when toprettyxml()
# is called.
stripped_body = self._strip_whitespace(body)
xml_dom = xml.dom.minidom.parseString(stripped_body)
return xml_dom.toprettyxml(indent=' '*4, newl='\n')
def _get_pretty_json(self, body):
# The json body is loaded so it can be dumped in a format that
# is desired.
obj = json.loads(body)
return self._pformat_dictionary(obj)
def _is_xml(self, body):
try:
xml.dom.minidom.parseString(body)
except xml.parsers.expat.ExpatError:
return False
return True
def _strip_whitespace(self, xml_string):
xml_dom = xml.dom.minidom.parseString(xml_string)
return ''.join(
[line.strip() for line in xml_dom.toxml().splitlines()]
)
def _is_json_structure(self, body):
if body.startswith('{'):
try:
json.loads(body)
return True
except json.decoder.JSONDecodeError:
return False
return False
class ShowCommand(HistorySubcommand):
NAME = 'show'
DESCRIPTION = (
'Shows the various events related to running a specific CLI command. '
'If this command is ran without any positional arguments, it will '
'display the events for the last CLI command ran.'
)
FORMATTERS = {
'detailed': DetailedFormatter
}
ARG_TABLE = [
{'name': 'command_id', 'nargs': '?', 'default': 'latest',
'positional_arg': True,
'help_text': (
'The ID of the CLI command to show. If this positional argument '
'is omitted, it will show the last the CLI command ran.')},
{'name': 'include', 'nargs': '+',
'help_text': (
'Specifies which events to **only** include when showing the '
'CLI command. This argument is mutually exclusive with '
'``--exclude``.')},
{'name': 'exclude', 'nargs': '+',
'help_text': (
'Specifies which events to exclude when showing the '
'CLI command. This argument is mutually exclusive with '
'``--include``.')},
{'name': 'format', 'choices': FORMATTERS.keys(),
'default': 'detailed', 'help_text': (
'Specifies which format to use in showing the events for '
'the specified CLI command. The following formats are '
'supported:\n\n'
'<ul>'
'<li> detailed - This the default format. It prints out a '
'detailed overview of the CLI command ran. It displays all '
'of the key events in the command lifecycle where each '
'important event has a title and its important values '
'underneath. The events are ordered by timestamp and events of '
'the same API call are associated together with the '
'[``api_id``] notation where events that share the same '
'``api_id`` belong to the lifecycle of the same API call.'
'</li>'
'</ul>'
)
}
]
def _run_main(self, parsed_args, parsed_globals):
self._connect_to_history_db()
try:
self._validate_args(parsed_args)
with self._get_output_stream() as output_stream:
formatter = self._get_formatter(
parsed_args, parsed_globals, output_stream)
for record in self._get_record_iterator(parsed_args):
formatter.display(record)
finally:
self._close_history_db()
return 0
def _validate_args(self, parsed_args):
if parsed_args.exclude and parsed_args.include:
raise ValueError(
'Either --exclude or --include can be provided but not both')
def _get_formatter(self, parsed_args, parsed_globals, output_stream):
format_type = parsed_args.format
formatter_kwargs = {
'include': parsed_args.include,
'exclude': parsed_args.exclude,
'output': output_stream
}
if format_type == 'detailed':
formatter_kwargs['colorize'] = self._should_use_color(
parsed_globals)
return self.FORMATTERS[format_type](**formatter_kwargs)
def _get_record_iterator(self, parsed_args):
if parsed_args.command_id == 'latest':
return self._db_reader.iter_latest_records()
else:
return self._db_reader.iter_records(parsed_args.command_id)
| [
[
[
572,
580
],
[
9367,
9375
]
],
[
[
588,
592
],
[
9549,
9553
],
[
11234,
11238
],
[
11788,
11792
],
[
11852,
11856
]
],
[
[
600,
603
],
[
1575,
1578
]
],
[
[
611,
628
]
],
[
[
636,
651
],
[
10985,
10988
],
[
11351,
11354
],
[
11400,
11403
],
[
11539,
11542
]
],
[
[
660,
668
],
[
5346,
5354
],
[
5392,
5400
],
[
5781,
5789
],
[
9055,
9063
]
],
[
[
696,
699
],
[
7353,
7356
]
],
[
[
751,
768
],
[
11952,
11969
]
],
[
[
819,
830
],
[
2495,
2506
]
],
[
[
839,
848
],
[
2465,
2474
]
],
[
[
2447,
2464
],
[
12269,
12286
],
[
5512,
5529
]
],
[
[
9108,
9135
],
[
5712,
5739
]
],
[
[
11940,
11951
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import unicode_literals
import platform as pf
from . import core
class PlatformCollector(object):
"""Collector for python platform information"""
def __init__(self, registry=core.REGISTRY, platform=None):
self._platform = pf if platform is None else platform
info = self._info()
system = self._platform.system()
if system == "Java":
info.update(self._java())
self._metrics = [
self._add_metric("python_info", "Python platform information", info)
]
if registry:
registry.register(self)
def collect(self):
return self._metrics
@staticmethod
def _add_metric(name, documentation, data):
labels = data.keys()
values = [data[k] for k in labels]
g = core.GaugeMetricFamily(name, documentation, labels=labels)
g.add_metric(values, 1)
return g
def _info(self):
major, minor, patchlevel = self._platform.python_version_tuple()
return {
"version": self._platform.python_version(),
"implementation": self._platform.python_implementation(),
"major": major,
"minor": minor,
"patchlevel": patchlevel
}
def _java(self):
java_version, _, vminfo, osinfo = self._platform.java_ver()
vm_name, vm_release, vm_vendor = vminfo
return {
"jvm_version": java_version,
"jvm_release": vm_release,
"jvm_vendor": vm_vendor,
"jvm_name": vm_name
}
PLATFORM_COLLECTOR = PlatformCollector()
"""PlatformCollector in default Registry REGISTRY"""
| [
[
[
65,
81
]
],
[
[
90,
104
],
[
301,
303
]
],
[
[
120,
124
],
[
245,
249
],
[
852,
856
]
],
[
[
133,
150
],
[
1638,
1655
]
],
[
[
1617,
1635
]
]
] |
from client_database_connection import mycursor
import os
sql = "INSERT INTO free_node (node_id) VALUES (%s)"
val = (node_id)
mycursor.execute(sql, val)
command = 'python get_code_when_free.py'
os.system(command) | [
[
[
39,
47
],
[
134,
142
]
],
[
[
56,
58
],
[
204,
206
]
],
[
[
64,
67
],
[
151,
154
]
],
[
[
117,
120
],
[
156,
159
]
],
[
[
162,
169
],
[
214,
221
]
]
] |
Subsets and Splits