code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Copyright 2017-2020 47 Degrees Open Source <https://www.47deg.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package higherkindness.mu.rpc package testing import io.grpc._ object interceptors { class NoopInterceptor extends ClientInterceptor { def interceptCall[ReqT, RespT]( method: MethodDescriptor[ReqT, RespT], callOptions: CallOptions, next: Channel ): ClientCall[ReqT, RespT] = next.newCall(method, callOptions) } }
frees-io/freestyle-rpc
modules/testing/src/main/scala/higherkindness/mu/rpc/testing/interceptors.scala
Scala
apache-2.0
989
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** @author John Miller * @version 1.2 * @date Mon Feb 2 16:43:07 EST 2015 * @see LICENSE (MIT style license file). */ // FIX - currently only works for Inverse package scalation.analytics.par import math.pow import scalation.linalgebra.{MatriD, VectorD} import scalation.linalgebra.par.{Fac_Cholesky, Fac_QR, MatrixD} import scalation.plot.Plot import scalation.util.{Error, time} import scalation.analytics.Predictor import scalation.analytics.RegTechnique._ //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `RidgeRegression` class supports multiple linear regression. In this * case, 'x' is multi-dimensional [x_1, ... x_k]. Both the input matrix 'x' and * the response vector 'y' are centered (zero mean). Fit the parameter vector * 'b' in the regression equation * <p> * y = b dot x + e = b_1 * x_1 + ... b_k * x_k + e * <p> * where 'e' represents the residuals (the part not explained by the model). * Use Least-Squares (minimizing the residuals) to fit the parameter vector * <p> * b = x_pinv * y [ alternative: b = solve (y) ] * <p> * where 'x_pinv' is the pseudo-inverse. Three techniques are provided: * <p> * Fac_QR // QR Factorization: slower, more stable (default) * Fac_Cholesky // Cholesky Factorization: faster, less stable (reasonable choice) * Inverse // Inverse/Gaussian Elimination, classical textbook technique (outdated) * <p> * This version uses parallel processing to speed up execution. * see http://statweb.stanford.edu/~tibs/ElemStatLearn/ * @param x the centered input/design m-by-n matrix NOT augmented with a first column of ones * @param y the centered response vector * @param lambda the shrinkage parameter (0 => OLS) in the penalty term 'lambda * b dot b' * @param technique the technique used to solve for b in x.t*x*b = x.t*y */ class RidgeRegression (x: MatrixD, y: VectorD, lambda: Double = 0.1, technique: RegTechnique = Inverse) extends Predictor with Error { if (y != null && x.dim1 != y.dim) flaw ("constructor", "dimensions of x and y are incompatible") if (x.dim1 <= x.dim2) flaw ("constructor", "not enough data rows in matrix to use regression") private val DEBUG = true // debug flag private val k = x.dim2 - 1 // number of variables (k = n-1) private val m = x.dim1.toDouble // number of data points (rows) private val r_df = (m-1.0) / (m-k-1.0) // ratio of degrees of freedom private var rSquared = -1.0 // coefficient of determination (quality of fit) private var rBarSq = -1.0 // Adjusted R-squared private var fStat = -1.0 // F statistic (quality of fit) private val fac = technique match { // select the factorization technique case Fac_QR => new Fac_QR (x) // QR Factorization case Fac_Cholesky => new Fac_Cholesky (x.t * x) // Cholesky Factorization case _ => null // don't factor, use inverse } // match private val x_pinv = technique match { // pseudo-inverse of x case Fac_QR => val (q, r) = fac.factor (); r.inverse * q.t case Fac_Cholesky => fac.factor (); null // don't compute it directly case _ => (xtx).inverse * x.t // classic textbook technique } // match //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Compute x.t * x and add lambda to the diagonal */ def xtx: MatrixD = { val a = x.t * x; for (i <- 0 until a.dim1) a(i, i) += lambda; a } //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Train the predictor by fitting the parameter vector (b-vector) in the * multiple regression equation * y = b dot x + e = [b_1, ... b_k] dot [x_1, ... x_k] + e * using the least squares method. */ def train () { b = if (x_pinv == null) fac.solve (y) else x_pinv * y // x parameter vector [b_1, ... b_k] val e = y - x * b // residual/error vector val sse = e dot e // residual/error sum of squares val sst = (y dot y) - pow (y.sum, 2) / m // total sum of squares val ssr = sst - sse // regression sum of squares rSquared = ssr / sst // coefficient of determination (R-squared) rBarSq = 1.0 - (1.0-rSquared) * r_df // R-bar-squared (adjusted R-squared) fStat = ssr * (m-k-1.0) / (sse * k) // F statistic (msr / mse) } // train //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Retrain the predictor by fitting the parameter vector (b-vector) in the * multiple regression equation * yy = b dot x + e = [b_1, ... b_k] dot [x_1, ... x_k] + e * using the least squares method. * @param yy the new response vector */ def train (yy: VectorD) { b = if (x_pinv == null) fac.solve (yy) else x_pinv * yy // x parameter vector [b_1, ... b_k] val e = yy - x * b // residual/error vector val sse = e dot e // residual/error sum of squares val sst = (yy dot yy) - pow (yy.sum, 2) / m // total sum of squares val ssr = sst - sse // regression sum of squares rSquared = ssr / sst // coefficient of determination rBarSq = 1.0 - (1.0-rSquared) * r_df // R-bar-squared (adjusted R-squared) fStat = ssr * (m-k-1.0) / (sse * k) // F statistic (msr / mse) } // train //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Return the fit (parameter vector b, quality of fit including rSquared). */ def fit: VectorD = VectorD (rSquared, rBarSq, fStat) //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Predict the value of y = f(z) by evaluating the formula below. * @param z the new vector to predict */ def predict (z: VectorD): Double = b dot z //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Predict the value of y = f(z) by evaluating the formula y = b dot z for * each row of matrix z. * @param z the new matrix to predict */ def predict (z: MatriD): VectorD = z * b /* { val ypp = new VectorD (z.dim1) for (i <- 0 until z.dim1) ypp(i) = predict (z(i)) ypp } // predict */ //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Perform backward elimination to remove the least predictive variable * from the model, returning the variable to eliminate, the new parameter * vector, the new R-squared value and the new F statistic. */ def backElim (): Tuple3 [Int, VectorD, VectorD] = { var j_max = -1 // index of variable to eliminate var b_max: VectorD = null // parameter values for best solution var ft_max = VectorD (3); ft_max.set (-1.0) // optimize on quality of fit (ft(0) is rSquared) for (j <- 1 to k) { val keep = m.toInt // i-value large enough to not exclude any rows in slice val rg_j = new RidgeRegression (x.sliceExclude (keep, j), y) // regress with x_j removed rg_j.train () val b = rg_j.coefficient val ft = rg_j.fit if (ft(0) > ft_max(0)) { j_max = j; b_max = b; ft_max = ft } } // for (j_max, b_max, ft_max) } // backElim //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Compute the Variance Inflation Factor (VIF) for each variable to test * for multi-colinearity by regressing xj against the rest of the variables. * A VIF over 10 indicates that over 90% of the varaince of xj can be predicted * from the other variables, so xj is a candidate for removal from the model. */ def vif: VectorD = { val vifV = new VectorD (k) // VIF vector for (j <- 1 to k) { val keep = m.toInt // i-value large enough to not exclude any rows in slice val x_j = x.col(j) // x_j is jth column in x val rg_j = new RidgeRegression (x.sliceExclude (keep, j), x_j) // regress with x_j removed rg_j.train () vifV(j-1) = 1.0 / (1.0 - rg_j.fit(0)) // store vif for x_1 in vifV(0) } // for vifV } // vif } // RidgeRegression class //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `RidgeRegression` companion object is used to center the input matrix 'x'. * This is done by subtracting the column means from each value. */ object RidgeRegression { //:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** Center the input matrix 'x' to zero mean, columnwise, by subtracting the mean. * @param x the input matrix to center * @param mu_x the vector of column means of matrix x */ def center (x: MatrixD, mu_x: VectorD): MatrixD = { val x_c = new MatrixD (x.dim1, x.dim2) for (j <- 0 until x.dim2) { x_c.setCol (j, x.col(j) - mu_x(j)) // subtract column means } // for x_c } // center } // RidgeRegression object //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `RidgeRegressionTest` object tests `RidgeRegression` class using the following * regression equation. * <p> * y = b dot x = b_1*x_1 + b_2*x_2. * <p> * Test regression and backward elimination. * @see http://statmaster.sdu.dk/courses/st111/module03/index.html */ object RidgeRegressionTest extends App { import RidgeRegression.center // 5 data points: x_1 coordinate, x_2 coordinate val x = new MatrixD ((5, 2), 36.0, 66.0, // 5-by-3 matrix 37.0, 68.0, 47.0, 64.0, 32.0, 53.0, 1.0, 101.0) val y = VectorD (745.0, 895.0, 442.0, 440.0, 1598.0) val z = VectorD (20.0, 80.0) println ("x = " + x + "\\ny = " + y + "\\nz = " + z) // Compute centered (zero mean) versions of x, y and z val mu_x = x.mean // columnwise mean of x val mu_y = y.mean // mean of y val mu_z = z.mean // mean of z val x_c = center (x, mu_x) // centered x (columnwise) val y_c = y - mu_y // centered y val z_c = z - mu_z // centered y println ("x_c = " + x_c + "\\ny_c = " + y_c + "\\nz_c = " + z_c) val rrg = new RidgeRegression (x_c, y_c) rrg.train () println ("fit = " + rrg.fit) val yp = rrg.predict (z_c) + mu_y // predict y for one point println ("predict (" + z + ") = " + yp) val yyp = rrg.predict (x_c) + mu_y // predict y for several points println ("predict (" + x + ") = " + yyp) new Plot (x.col(0), y, yyp) new Plot (x.col(1), y, yyp) println ("reduced model: fit = " + rrg.backElim ()) // eliminate least predictive variable } // RidgeRegressionTest object //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `RidgeRegressionTest2` object tests `RidgeRegression` class using the following * regression equation. * <p> * y = b dot x = b_1*x1 + b_2*x_2. * <p> * Test regression using QR Decomposition and Gaussian Elimination for computing * the pseudo-inverse. */ object RidgeRegressionTest2 extends App { // 4 data points: constant term, x_1 coordinate, x_2 coordinate val x = new MatrixD ((4, 2), 1.0, 1.0, // 4-by-3 matrix 1.0, 2.0, 2.0, 1.0, 2.0, 2.0) val y = VectorD (6.0, 8.0, 7.0, 9.0) val z = VectorD (2.0, 3.0) var rrg: RidgeRegression = null println ("x = " + x) println ("y = " + y) println ("-------------------------------------------------") println ("Fit the parameter vector b using QR Factorization") rrg = new RidgeRegression (x, y) rrg.train () println ("fit = " + rrg.fit) val yp = rrg.predict (z) // predict y for on3 point println ("predict (" + z + ") = " + yp) val yyp = rrg.predict (x) // predict y for several points println ("predict (" + x + ") = " + yyp) new Plot (x.col(1), y, yyp) new Plot (x.col(2), y, yyp) } // RidgeRegressionTest2 object //::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: /** The `RidgeRegressionTest3` object tests the multi-colinearity method in the * `RidgeRegression` class using the following regression equation. * <p> * y = b dot x = b_1*x_1 + b_2*x_2 + b_3*x_3 + b_4 * x_4 * <p> * @see online.stat.psu.edu/online/development/stat501/12multicollinearity/05multico_vif.html * @see online.stat.psu.edu/online/development/stat501/data/bloodpress.txt */ object RidgeRegressionTest3 extends App { // 20 data points: x_1 x_2 x_3 x_4 // Age Weight Dur Stress val x = new MatrixD ((20, 4), 47.0, 85.4, 5.1, 33.0, 49.0, 94.2, 3.8, 14.0, 49.0, 95.3, 8.2, 10.0, 50.0, 94.7, 5.8, 99.0, 51.0, 89.4, 7.0, 95.0, 48.0, 99.5, 9.3, 10.0, 49.0, 99.8, 2.5, 42.0, 47.0, 90.9, 6.2, 8.0, 49.0, 89.2, 7.1, 62.0, 48.0, 92.7, 5.6, 35.0, 47.0, 94.4, 5.3, 90.0, 49.0, 94.1, 5.6, 21.0, 50.0, 91.6, 10.2, 47.0, 45.0, 87.1, 5.6, 80.0, 52.0, 101.3, 10.0, 98.0, 46.0, 94.5, 7.4, 95.0, 46.0, 87.0, 3.6, 18.0, 46.0, 94.5, 4.3, 12.0, 48.0, 90.5, 9.0, 99.0, 56.0, 95.7, 7.0, 99.0) // response BP val y = VectorD (105.0, 115.0, 116.0, 117.0, 112.0, 121.0, 121.0, 110.0, 110.0, 114.0, 114.0, 115.0, 114.0, 106.0, 125.0, 114.0, 106.0, 113.0, 110.0, 122.0) val rrg = new RidgeRegression (x, y) time { rrg.train () } println ("fit = " + rrg.fit) // fit model y = b_1*x_1 + b_2*x_2 + b_3*x_3 + b_4*x_4 println ("vif = " + rrg.vif) // test multi-colinearity (VIF) } // RidgeRegressionTest3 object
mvnural/scalation
src/main/scala/scalation/analytics/par/RidgeRegression.scala
Scala
mit
16,101
// See LICENSE.txt for license details. package examples import chisel3._ import chisel3.util.Enum class Parity extends Module { val io = IO(new Bundle { val in = Input(Bool()) val out = Output(Bool()) }) val s_even :: s_odd :: Nil = Enum(2) val state = RegInit(s_even) when (io.in) { when (state === s_even) { state := s_odd } .otherwise { state := s_even } } io.out := (state === s_odd) }
timtian090/Playground
chiselTutorial/src/main/scala/examples/Parity.scala
Scala
mit
439
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.example.loadmodel import java.nio.file.Paths import com.intel.analytics.bigdl.models.inception.Inception_v1_NoAuxClassifier import com.intel.analytics.bigdl.nn.Module import com.intel.analytics.bigdl.optim.{Top1Accuracy, Top5Accuracy, Validator} import com.intel.analytics.bigdl.utils.Engine import org.apache.log4j.Logger import org.apache.spark.SparkContext import scopt.OptionParser import scala.language.existentials /** * ModelValidator provides an integrated example to load models, * and test over imagenet validation dataset * (running as a local Java program, or a standard Spark program). */ object ModelValidator { val logger = Logger.getLogger(getClass) /** * This is a trait meaning the model type. * There are three sorts of model type, which * are torch model [[TorchModel]], caffe model * [[CaffeModel]] and BigDL model [[BigDlModel]]. */ sealed trait ModelType case object TorchModel extends ModelType case object CaffeModel extends ModelType case object BigDlModel extends ModelType case class TestLocalParams( folder: String = "./", modelType: ModelType = null, modelName: String = "", caffeDefPath: Option[String] = None, modelPath: String = "", batchSize: Int = 32, meanFile: Option[String] = None ) val testLocalParser = new OptionParser[TestLocalParams]("BigDL Load Model Example") { head("BigDL Load Model Example") opt[String]('f', "folder") .text("where you put your local image files") .action((x, c) => c.copy(folder = x)) opt[String]('m', "modelName") .text("the model name you want to test") .required() .action((x, c) => c.copy(modelName = x.toLowerCase())) opt[String]('t', "modelType") .text("torch, caffe or bigdl") .required() .action((x, c) => x.toLowerCase() match { case "torch" => c.copy(modelType = TorchModel) case "caffe" => c.copy(modelType = CaffeModel) case "bigdl" => c.copy(modelType = BigDlModel) case _ => throw new IllegalArgumentException("only torch, caffe or bigdl supported") } ) opt[String]("caffeDefPath") .text("caffe define path") .action((x, c) => c.copy(caffeDefPath = Some(x))) opt[String]("modelPath") .text("model path") .action((x, c) => c.copy(modelPath = x)) opt[Int]('b', "batchSize") .text("batch size") .action((x, c) => c.copy(batchSize = x)) opt[String]("meanFile") .text("mean file") .action((x, c) => c.copy(meanFile = Some(x))) } def main(args: Array[String]): Unit = { testLocalParser.parse(args, TestLocalParams()).foreach(param => { val conf = Engine.createSparkConf() conf.setAppName("BigDL Image Classifier Example") val sc = new SparkContext(conf) Engine.init val valPath = param.folder val (model, validateDataSet) = param.modelType match { case CaffeModel => param.modelName match { case "alexnet" => (Module.loadCaffe[Float](AlexNet(1000), param.caffeDefPath.get, param.modelPath), AlexNetPreprocessor.rdd(valPath, param.batchSize, param.meanFile.get, sc)) case "inception" => (Module.loadCaffe[Float](Inception_v1_NoAuxClassifier(1000), param.caffeDefPath.get, param.modelPath), InceptionPreprocessor.rdd(valPath, param.batchSize, sc)) } case TorchModel => param.modelName match { case "resnet" => (Module.loadTorch[Float](param.modelPath), ResNetPreprocessor.rdd(valPath, param.batchSize, sc)) } case _ => throw new IllegalArgumentException(s"${ param.modelType } is not" + s"supported in this example, please use alexnet/inception/resnet") } println(model) val result = model.evaluate( validateDataSet, Array(new Top1Accuracy[Float](), new Top5Accuracy[Float]()), Some(param.batchSize)) result.foreach(r => { logger.info(s"${ r._2 } is ${ r._1 }") }) }) } }
psyyz10/BigDL
spark/dl/src/main/scala/com/intel/analytics/bigdl/example/loadmodel/ModelValidator.scala
Scala
apache-2.0
4,810
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.sclearn.dataset.spark.sql.types import java.util.Objects import org.json4s.JsonAST.JValue import org.json4s.JsonDSL._ /** * The data type for User Defined Types (UDTs). * * This interface allows a user to make their own classes more interoperable with SparkSQL; * e.g., by creating a [[UserDefinedType]] for a class X, it becomes possible to create * a `DataFrame` which has class X in the schema. * * For SparkSQL to recognize UDTs, the UDT must be annotated with * [[SQLUserDefinedType]]. * * The conversion via `serialize` occurs when instantiating a `DataFrame` from another RDD. * The conversion via `deserialize` occurs when reading from a `DataFrame`. * * Note: This was previously a developer API in Spark 1.x. We are making this private in Spark 2.0 * because we will very likely create a new version of this that works better with Datasets. */ private[spark] abstract class UserDefinedType[UserType >: Null] extends DataType with Serializable { /** Underlying storage type for this UDT */ def sqlType: DataType /** Paired Python UDT class, if exists. */ def pyUDT: String = null /** Serialized Python UDT class, if exists. */ def serializedPyClass: String = null /** * Convert the user type to a SQL datum */ def serialize(obj: UserType): Any /** Convert a SQL datum to the user type */ def deserialize(datum: Any): UserType override private[sql] def jsonValue: JValue = { ("type" -> "udt") ~ ("class" -> this.getClass.getName) ~ ("pyClass" -> pyUDT) ~ ("sqlType" -> sqlType.jsonValue) } /** * Class object for the UserType */ def userClass: java.lang.Class[UserType] override def defaultSize: Int = sqlType.defaultSize /** * For UDT, asNullable will not change the nullability of its internal sqlType and just returns * itself. */ override private[spark] def asNullable: UserDefinedType[UserType] = this override private[sql] def acceptsType(dataType: DataType) = dataType match { case other: UserDefinedType[_] => this.getClass == other.getClass || this.userClass.isAssignableFrom(other.userClass) case _ => false } override def sql: String = sqlType.sql override def hashCode(): Int = getClass.hashCode() override def equals(other: Any): Boolean = other match { case that: UserDefinedType[_] => this.acceptsType(that) case _ => false } override def catalogString: String = sqlType.simpleString } /** * The user defined type in Python. * * Note: This can only be accessed via Python UDF, or accessed as serialized object. */ private[sql] class PythonUserDefinedType( val sqlType: DataType, override val pyUDT: String, override val serializedPyClass: String) extends UserDefinedType[Any] { /* The serialization is handled by UDT class in Python */ override def serialize(obj: Any): Any = obj override def deserialize(datam: Any): Any = datam /* There is no Java class for Python UDT */ override def userClass: java.lang.Class[Any] = null override private[sql] def jsonValue: JValue = { ("type" -> "udt") ~ ("pyClass" -> pyUDT) ~ ("serializedClass" -> serializedPyClass) ~ ("sqlType" -> sqlType.jsonValue) } override def equals(other: Any): Boolean = other match { case that: PythonUserDefinedType => pyUDT == that.pyUDT case _ => false } override def hashCode(): Int = Objects.hashCode(pyUDT) }
sclearn/sclearn
sc/src/main/scala/io/github/sclearn/dataset/spark/sql/types/UserDefinedType.scala
Scala
apache-2.0
4,251
package model import collection.mutable.{HashMap, MultiMap,Set} class RouteMap { val map: MultiMap[Int,String]=new HashMap[Int, Set[String]] with MultiMap[Int, String] def routes(port: Int): Set[String] = { map.get(port).getOrElse(Set[String]()) } }
brendanobra/haproxy-rest-config
app/model/RouteMap.scala
Scala
mit
269
/*********************************************************************** * Copyright (c) 2013-2020 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.tools.status import java.util import java.util.Collections import com.beust.jcommander._ import org.geotools.data.DataStore import org.locationtech.geomesa.tools.status.GetSftConfigCommand.{Spec, TypeSafe} import org.locationtech.geomesa.tools.{Command, DataStoreCommand, RequiredTypeNameParam} import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.opengis.feature.simple.SimpleFeatureType trait GetSftConfigCommand[DS <: DataStore] extends DataStoreCommand[DS] { override val name: String = "get-sft-config" override def params: GetSftConfigParams override def execute(): Unit = { import scala.collection.JavaConversions._ Command.user.info(s"Retrieving SFT for type name '${params.featureName}'") val sft = withDataStore(getSchema) if (sft == null) { throw new ParameterException(s"Schema '${params.featureName}' does not exist in the provided datastore") } params.format.map(_.toLowerCase).foreach { case TypeSafe => Command.output.info(SimpleFeatureTypes.toConfigString(sft, !params.excludeUserData, params.concise)) case Spec => Command.output.info(SimpleFeatureTypes.encodeType(sft, !params.excludeUserData)) // shouldn't happen due to parameter validation case f => throw new ParameterException(s"Invalid format '$f'. Valid values are '$TypeSafe' and '$Spec'") } } def getSchema(ds: DS): SimpleFeatureType = ds.getSchema(params.featureName) } object GetSftConfigCommand { val Spec = "spec" val TypeSafe = "config" } // @Parameters(commandDescription = "Get the SimpleFeatureType of a feature") trait GetSftConfigParams extends RequiredTypeNameParam { @Parameter(names = Array("--concise"), description = "Render in concise format", required = false) var concise: Boolean = false @Parameter(names = Array("--format"), description = "Output formats (allowed values are spec or config)", required = false, validateValueWith = classOf[FormatValidator]) var format: java.util.List[String] = Collections.singletonList(Spec) @Parameter(names = Array("--exclude-user-data"), description = "Exclude user data", required = false) var excludeUserData: Boolean = false } class FormatValidator extends IValueValidator[java.util.List[String]] { override def validate(name: String, value: util.List[String]): Unit = { import scala.collection.JavaConversions._ if (value == null || value.isEmpty || value.map(_.toLowerCase ).exists(v => v != Spec && v != TypeSafe)) { throw new ParameterException(s"Invalid value for format: ${Option(value).map(_.mkString(",")).orNull}") } } }
aheyne/geomesa
geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/status/GetSftConfigCommand.scala
Scala
apache-2.0
3,111
/* * Copyright 2014 porter <https://github.com/eikek/porter> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package porter.model import scala.language.implicitConversions import scala.annotation.tailrec import java.util.UUID /** * An identifier is a string consisting of only letters (a-z and A-Z) * digits (0-9) the underscore '_' and dash '-'. All other characters * are not allowed. */ sealed trait Ident extends Serializable { def name: String def is(otherName:String) = name == otherName } object Ident { private val chars = ('a' to 'z').toSet ++ ('A' to 'Z') ++ ('0' to '9') ++ Set('_', '-', '@', '.', ':', '/') @SerialVersionUID(20131121) private case class Impl(name: String) extends Ident { override lazy val toString = s"Ident($name)" } implicit def apply(name: String): Ident = { require(isValid(name), s"Invalid identifier: '$name'") Impl(name) } def unapply(s: Ident): Option[String] = Some(s.name) /** * Checks whether the given string is a valid identifier. * * @param s * @return */ def isValid(s: String) = { @tailrec def loop(i: Int): Boolean = { if (i >= s.length) i > 0 else if (chars contains s.charAt(i)) loop(i+1) else false } loop(0) } /** * Creates an [[porter.model.Ident]] from the given string. If the * string is not a valid identifier, [[scala.None]] is returned. * * @param id * @return */ def fromString(id: String): Option[Ident] = if (isValid(id)) Some(Impl(id)) else None /** * Converts the given string to a valid identifier by removing all * invalid characters from the string. If the resulting string is * empty, [[scala.None]] is returned, otherwise the [[porter.model.Ident]] * is created from the resulting string. * * @param s * @return */ def convertString(s: String): Option[Ident] = { val conv = s filter chars.contains if (conv.isEmpty) None else Some(Impl(conv)) } /** * Creates some random identifier. * @return */ def randomIdent = Ident(UUID.randomUUID().toString.replace("-", "")) } object ValidIdent { def unapply(s: String) = Ident.fromString(s) map (_.name) }
eikek/porter
api/src/main/scala/porter/model/Ident.scala
Scala
apache-2.0
2,715
package breeze.stats.distributions import breeze.numerics.{exp, log} /** * http://en.wikipedia.org/wiki/Laplace_distribution * * @author dlwh **/ case class Laplace(location: Double, scale: Double)(implicit rand: RandBasis = Rand) extends ContinuousDistr[Double] with Moments[Double, Double] with HasCdf { def mean: Double = location def mode: Double = location def variance: Double = 2 * scale * scale def entropy: Double = 1 + log(2 * scale) def logNormalizer: Double = 2 * scale /** * Gets one sample from the distribution. Equivalent to sample() */ def draw(): Double = { // from numpy val u = rand.uniform.draw() if(u < 0.5) { location + scale * log(2 * u) } else { location - scale * log(2 *(1 - u)) } } def unnormalizedLogPdf(x: Double): Double = { -math.abs(x-location)/scale } def probability(x: Double, y: Double): Double = { cdf(y) - cdf(x) } def cdf(x: Double) = x match { case Double.NegativeInfinity => 0.0 case Double.PositiveInfinity => 1.0 case x if x < location => 0.5 * exp(unnormalizedLogPdf(x)) case x => 1 - 0.5 * exp(unnormalizedLogPdf(x)) } }
dlwh/breeze
math/src/main/scala/breeze/stats/distributions/Laplace.scala
Scala
apache-2.0
1,184
package org.elasticmq import org.elasticmq.actor.queue.InternalMessage import org.elasticmq.util.NowProvider import org.joda.time.DateTime import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.collection.mutable class FifoDeduplicationIdsHistoryTest extends AnyFunSuite with Matchers { test("Should store added deduplication IDs") { val history = FifoDeduplicationIdsHistory.newHistory() val updatedHistory = history .addNew(newInternalMessage(Some(DeduplicationId("1")), DateTime.now())) .addNew(newInternalMessage(Some(DeduplicationId("2")), DateTime.now())) .addNew(newInternalMessage(Some(DeduplicationId("3")), DateTime.now())) .addNew(newInternalMessage(Some(DeduplicationId("4")), DateTime.now())) updatedHistory.messagesByDeduplicationId.keys shouldBe Set( DeduplicationId("1"), DeduplicationId("2"), DeduplicationId("3"), DeduplicationId("4") ) updatedHistory.deduplicationIdsByCreationDate .map(_.id) shouldBe List(DeduplicationId("1"), DeduplicationId("2"), DeduplicationId("3"), DeduplicationId("4")) } test("History should not override given deduplication ID entry if such one already exists") { val history = FifoDeduplicationIdsHistory.newHistory() val dateTimeInPast = DateTime.now().minusMinutes(10) val updatedHistory = history .addNew(newInternalMessage(Some(DeduplicationId("1")), dateTimeInPast)) .addNew(newInternalMessage(Some(DeduplicationId("1")), DateTime.now())) updatedHistory.messagesByDeduplicationId.keys shouldBe Set( DeduplicationId("1") ) updatedHistory.deduplicationIdsByCreationDate shouldBe List( DeduplicationIdWithCreationDate(DeduplicationId("1"), dateTimeInPast) ) } test("History should show if given deduplication ID was already used or not") { val history = FifoDeduplicationIdsHistory.newHistory() val updatedHistory = history .addNew(newInternalMessage(Some(DeduplicationId("1")), DateTime.now())) .addNew(newInternalMessage(Some(DeduplicationId("2")), DateTime.now())) .addNew(newInternalMessage(Some(DeduplicationId("3")), DateTime.now())) .addNew(newInternalMessage(Some(DeduplicationId("4")), DateTime.now())) updatedHistory.wasRegistered(Some(DeduplicationId("1"))) should be(defined) updatedHistory.wasRegistered(Some(DeduplicationId("4"))) should be(defined) updatedHistory.wasRegistered(Some(DeduplicationId("7"))) should not be defined updatedHistory.wasRegistered(None) should not be defined } test("History should erase all entries that were created 5 or more minutes ago") { val history = FifoDeduplicationIdsHistory.newHistory() val now = DateTime.now() val updatedHistory = history .addNew(newInternalMessage(Some(DeduplicationId("1")), now.minusMinutes(20))) .addNew(newInternalMessage(Some(DeduplicationId("2")), now.minusMinutes(10))) .addNew(newInternalMessage(Some(DeduplicationId("3")), now.minusMinutes(5).minusSeconds(1))) .addNew(newInternalMessage(Some(DeduplicationId("4")), now.minusMinutes(4).minusSeconds(59))) .addNew(newInternalMessage(Some(DeduplicationId("5")), now)) .cleanOutdatedMessages(new NowProvider) updatedHistory.messagesByDeduplicationId.keys shouldBe Set(DeduplicationId("4"), DeduplicationId("5")) updatedHistory.deduplicationIdsByCreationDate shouldBe List( DeduplicationIdWithCreationDate(DeduplicationId("4"), now.minusMinutes(4).minusSeconds(59)), DeduplicationIdWithCreationDate(DeduplicationId("5"), now) ) } test("Cleaning outdated IDs should stop at first ID which was created in last 5 minutes") { val history = FifoDeduplicationIdsHistory.newHistory() val now = DateTime.now() val updatedHistory = history .addNew(newInternalMessage(Some(DeduplicationId("1")), now.minusMinutes(20))) .addNew(newInternalMessage(Some(DeduplicationId("2")), now.minusMinutes(4))) .addNew(newInternalMessage(Some(DeduplicationId("3")), now.minusMinutes(5))) .addNew(newInternalMessage(Some(DeduplicationId("4")), now.minusMinutes(4).minusSeconds(59))) .addNew(newInternalMessage(Some(DeduplicationId("5")), now)) .cleanOutdatedMessages(new NowProvider) updatedHistory.messagesByDeduplicationId.keys shouldBe Set( DeduplicationId("2"), DeduplicationId("3"), DeduplicationId("4"), DeduplicationId("5") ) updatedHistory.deduplicationIdsByCreationDate shouldBe List( DeduplicationIdWithCreationDate(DeduplicationId("2"), now.minusMinutes(4)), DeduplicationIdWithCreationDate(DeduplicationId("3"), now.minusMinutes(5)), DeduplicationIdWithCreationDate(DeduplicationId("4"), now.minusMinutes(4).minusSeconds(59)), DeduplicationIdWithCreationDate(DeduplicationId("5"), now) ) } def newInternalMessage(maybeDeduplicationId: Option[DeduplicationId], created: DateTime): InternalMessage = InternalMessage( id = "1", deliveryReceipts = mutable.Buffer.empty, nextDelivery = 100L, content = "", messageAttributes = Map.empty, created = created, orderIndex = 0, firstReceive = NeverReceived, receiveCount = 0, isFifo = true, messageGroupId = None, messageDeduplicationId = maybeDeduplicationId, tracingId = None, sequenceNumber = None ) }
adamw/elasticmq
core/src/test/scala/org/elasticmq/FifoDeduplicationIdsHistoryTest.scala
Scala
apache-2.0
5,440
package chana.jpql import akka.actor.ActorSystem import akka.testkit.ImplicitSender import akka.testkit.TestKit import chana.jpql.JPQLReducer.AskReducedResult import chana.jpql.nodes.JPQLParser import chana.jpql.nodes.Statement import chana.jpql.rats.JPQLGrammar import chana.schema.SchemaBoard import com.typesafe.config.ConfigFactory import java.io.StringReader import org.apache.avro.Schema import org.apache.avro.generic.GenericData.Record import org.scalatest.BeforeAndAfterAll import org.scalatest.Matchers import org.scalatest.WordSpecLike import xtc.tree.Node import scala.concurrent.duration._ class JPQLReducerEvaluatorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll { def this() = this(ActorSystem("ChanaSystem", ConfigFactory.parseString(""" akka.actor { provider = "akka.cluster.ClusterActorRefProvider" } akka.remote.netty.tcp.hostname = "localhost" # set port to random to by pass the ports that will be occupied by ChanaClusterSpec test akka.remote.netty.tcp.port = 0 """))) import chana.avro.AvroRecords._ override def afterAll { TestKit.shutdownActorSystem(system) } val schemaBoard = new SchemaBoard { val entityToSchema = Map("account" -> schema) def schemaOf(entityName: String) = entityToSchema.get(entityName) } def records() = { for (id <- 0 to 9) yield { val record = initAccount() record.put("registerTime", id.toLong) record.put("lastLoginTime", (id % 3).toLong) record.put("id", id.toString) val chargeRecord = chargeRecordBuilder.build() chargeRecord.put("time", id * 1000L) chargeRecord.put("amount", id * 100.0) record.put("lastChargeRecord", chargeRecord) record } } def parse(query: String) = { val reader = new StringReader(query) val grammar = new JPQLGrammar(reader, "<current>") val r = grammar.pJPQL(0) val rootNode = r.semanticValue[Node] info("\\n\\n## " + query + " ##") val parser = new JPQLParser(rootNode) val stmt = parser.visitRoot() //info("\\nParsed:\\n" + stmt) val metaEval = new JPQLMetadataEvaluator("2", schemaBoard) val projectionSchema = metaEval.collectMetadata(stmt, null).head info("Projection Schema:\\n" + projectionSchema) (stmt, projectionSchema) } def gatherProjection(entityId: String, stmt: Statement, projectionSchema: Schema, record: Record) = { val e = new JPQLMapperEvaluator(record.getSchema, projectionSchema) val projection = e.gatherProjection(entityId, stmt, record) projection match { case x: BinaryProjection => info("\\nCollected: " + x.id + ", " + RecordProjection(chana.avro.avroDecode[Record](x.projection, projectionSchema).get)) case x: RemoveProjection => info("\\nCollected: " + x) } projection } "JPQLReduceEvaluator" must { "query fields" in { val q = "SELECT a.registerTime FROM account a " + "WHERE a.registerTime >= 5 ORDER BY a.registerTime" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List(5), List(6), List(7), List(8), List(9))) } } "query deep fields" in { val q = "SELECT a.registerTime, a.lastChargeRecord.time FROM account a " + "WHERE a.registerTime >= 5 ORDER BY a.registerTime" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List(5, 5000), List(6, 6000), List(7, 7000), List(8, 8000), List(9, 9000))) } } "query fields order desc" in { val q = "SELECT a.registerTime FROM account a " + "WHERE a.registerTime >= 5 ORDER BY a.registerTime DESC" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List(9), List(8), List(7), List(6), List(5))) } } "query fields order by string desc" in { val q = "SELECT a.id, a.registerTime FROM account a " + "WHERE a.registerTime >= 5 ORDER BY a.id DESC" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List("9", 9), List("8", 8), List("7", 7), List("6", 6), List("5", 5))) } } "query aggregate functions" in { val q = "SELECT COUNT(a.id), AVG(a.registerTime), SUM(a.registerTime), MAX(a.registerTime), MIN(a.registerTime) FROM account a " + "WHERE a.registerTime >= 5 ORDER BY a.id DESC" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List(5.0, 7.0, 35.0, 9.0, 0.0), List(5.0, 7.0, 35.0, 9.0, 0.0), List(5.0, 7.0, 35.0, 9.0, 0.0), List(5.0, 7.0, 35.0, 9.0, 0.0), List(5.0, 7.0, 35.0, 9.0, 0.0))) } } "query with groupby" in { val q = "SELECT AVG(a.registerTime) FROM account a " + "WHERE a.registerTime > 1 GROUP BY a.lastLoginTime ORDER BY a.id" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List(5.5), List(5.0), List(6.0))) } } "query with groupby and having" in { val q = "SELECT AVG(a.registerTime) FROM account a " + "WHERE a.registerTime > 1 GROUP BY a.lastLoginTime HAVING a.registerTime > 5 ORDER BY a.id" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult expectMsgPF(2.seconds) { case result: Array[List[_]] => result should be(Array(List(5.5), List(5.0), List(6.0))) } } "query with join" in { val q = "SELECT a.registerTime, a.chargeRecords FROM account a JOIN a.chargeRecords c " + "WHERE a.registerTime >= 5 AND c.time > 1 ORDER BY a.registerTime" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult import scala.collection.JavaConversions._ expectMsgPF(2.seconds) { case result: Array[List[_]] => result map { xs => xs.map { case chargeRecords: java.util.Collection[Record] @unchecked => chargeRecords map { x => (x.get("time"), x.get("amount")) } case x => x } } should be(Array(List(5, List((2, -50.0))), List(6, List((2, -50.0))), List(7, List((2, -50.0))), List(8, List((2, -50.0))), List(9, List((2, -50.0))))) } } "query with join and INDEX fucntion" in { val q = "SELECT a.registerTime, a.chargeRecords FROM account a JOIN a.chargeRecords c " + "WHERE a.registerTime >= 5 AND c.time > 0 AND INDEX(c) = 2 ORDER BY a.registerTime" val (stmt, projectionSchema) = parse(q) val reducer = system.actorOf(JPQLReducer.props("test", stmt, projectionSchema)) records() foreach { record => reducer ! gatherProjection(record.get("id").asInstanceOf[String], stmt, projectionSchema, record) } reducer ! AskReducedResult import scala.collection.JavaConversions._ expectMsgPF(2.seconds) { case result: Array[List[_]] => result map { xs => xs.map { case chargeRecords: java.util.Collection[Record] @unchecked => chargeRecords map { x => (x.get("time"), x.get("amount")) } case x => x } } should be(Array(List(5, List((2, -50.0))), List(6, List((2, -50.0))), List(7, List((2, -50.0))), List(8, List((2, -50.0))), List(9, List((2, -50.0))))) } } } }
matthewtt/chana
src/test/scala/chana/jpql/JPQLReducerEvaluatorSpec.scala
Scala
apache-2.0
9,605
package org.jetbrains.plugins.scala.actions import java.lang.String import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition import com.intellij.openapi.project.Project import com.intellij.openapi.project.DumbAware import com.intellij.ide.actions.{CreateFileFromTemplateDialog, CreateTemplateInPackageAction} import org.jetbrains.plugins.scala.icons.Icons import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile import com.intellij.psi._ import codeStyle.CodeStyleManager import org.jetbrains.annotations.NonNls import com.intellij.openapi.module.Module import com.intellij.openapi.actionSystem._ import com.intellij.openapi.util.text.StringUtil import org.jetbrains.plugins.scala.{ScalaFileType, ScalaBundle} import com.intellij.openapi.roots.ProjectRootManager import com.intellij.ide.IdeView import java.util.Properties import org.jetbrains.plugins.scala.config.ScalaFacet import com.intellij.ide.fileTemplates.{FileTemplateManager, FileTemplate, JavaTemplateUtil} import com.intellij.openapi.fileTypes.ex.FileTypeManagerEx import com.intellij.openapi.fileTypes.FileType import com.intellij.openapi.ui.InputValidatorEx /** * User: Alexander Podkhalyuzin * Date: 15.09.2009 */ class NewScalaTypeDefinitionAction extends CreateTemplateInPackageAction[ScTypeDefinition]( ScalaBundle.message("newclass.menu.action.text"), ScalaBundle.message("newclass.menu.action.description"), Icons.CLASS, true) with DumbAware { protected def buildDialog(project: Project, directory: PsiDirectory, builder: CreateFileFromTemplateDialog.Builder) { builder.addKind("Class", Icons.CLASS, "Scala Class") builder.addKind("Object", Icons.OBJECT, "Scala Object") builder.addKind("Trait", Icons.TRAIT, "Scala Trait") for (template <- FileTemplateManager.getInstance.getAllTemplates) { if (isScalaTemplate(template) && checkPackageExists(directory)) { builder.addKind(template.getName, Icons.FILE_TYPE_LOGO, template.getName) } } builder.setTitle("Create New Scala Class") builder.setValidator(new InputValidatorEx { def getErrorText(inputString: String): String = { if (inputString.length > 0 && !JavaPsiFacade.getInstance(project).getNameHelper.isQualifiedName(inputString)) { return "This is not a valid Scala qualified name" } null } def checkInput(inputString: String): Boolean = { true } def canClose(inputString: String): Boolean = { !StringUtil.isEmptyOrSpaces(inputString) && getErrorText(inputString) == null } }) } private def isScalaTemplate(template: FileTemplate): Boolean = { val fileType: FileType = FileTypeManagerEx.getInstanceEx.getFileTypeByExtension(template.getExtension) fileType == ScalaFileType.SCALA_FILE_TYPE } def getActionName(directory: PsiDirectory, newName: String, templateName: String): String = { ScalaBundle.message("newclass.menu.action.text") } def getNavigationElement(createdElement: ScTypeDefinition): PsiElement = createdElement.extendsBlock def doCreate(directory: PsiDirectory, newName: String, templateName: String): ScTypeDefinition = { val file: PsiFile = createClassFromTemplate(directory, newName, templateName) file match { case scalaFile: ScalaFile => val typeDefinitions = scalaFile.typeDefinitions if (typeDefinitions.length == 1) return typeDefinitions(0) case _ => } null } override def isAvailable(dataContext: DataContext): Boolean = { super.isAvailable(dataContext) && isUnderSourceRoots(dataContext) } private def isUnderSourceRoots(dataContext: DataContext): Boolean = { val module: Module = dataContext.getData(LangDataKeys.MODULE.getName).asInstanceOf[Module] if (!ScalaFacet.isPresentIn(module)) { return false } val view = dataContext.getData(LangDataKeys.IDE_VIEW.getName).asInstanceOf[IdeView] val project = dataContext.getData(CommonDataKeys.PROJECT.getName).asInstanceOf[Project] if (view != null && project != null) { val projectFileIndex = ProjectRootManager.getInstance(project).getFileIndex val dirs = view.getDirectories for (dir <- dirs) { val aPackage = JavaDirectoryService.getInstance.getPackage(dir) if (projectFileIndex.isInSourceContent(dir.getVirtualFile) && aPackage != null) { return true } } } false } private def createClassFromTemplate(directory: PsiDirectory, className: String, templateName: String, parameters: String*): PsiFile = { NewScalaTypeDefinitionAction.createFromTemplate(directory, className, className + SCALA_EXTENSION, templateName, parameters: _*) } private val SCALA_EXTENSION = ".scala" def checkPackageExists(directory: PsiDirectory) = { JavaDirectoryService.getInstance.getPackage(directory) != null } } object NewScalaTypeDefinitionAction { @NonNls private[actions] val NAME_TEMPLATE_PROPERTY: String = "NAME" @NonNls private[actions] val LOW_CASE_NAME_TEMPLATE_PROPERTY: String = "lowCaseName" def createFromTemplate(directory: PsiDirectory, name: String, fileName: String, templateName: String, parameters: String*): PsiFile = { val template: FileTemplate = FileTemplateManager.getInstance.getInternalTemplate(templateName) val project = directory.getProject val properties: Properties = new Properties(FileTemplateManager.getInstance.getDefaultProperties(project)) JavaTemplateUtil.setPackageNameAttribute(properties, directory) properties.setProperty(NAME_TEMPLATE_PROPERTY, name) properties.setProperty(LOW_CASE_NAME_TEMPLATE_PROPERTY, name.substring(0, 1).toLowerCase + name.substring(1)) var i: Int = 0 while (i < parameters.length) { { properties.setProperty(parameters(i), parameters(i + 1)) } i += 2 } var text: String = null try { text = template.getText(properties) } catch { case e: Exception => { throw new RuntimeException("Unable to load template for " + FileTemplateManager.getInstance.internalTemplateToSubject(templateName), e) } } val factory: PsiFileFactory = PsiFileFactory.getInstance(project) val file: PsiFile = factory.createFileFromText(fileName, ScalaFileType.SCALA_FILE_TYPE, text) CodeStyleManager.getInstance(project).reformat(file) directory.add(file).asInstanceOf[PsiFile] } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/actions/NewScalaTypeDefinitionAction.scala
Scala
apache-2.0
6,505
package test import com.acerete.input.Input import com.acerete.service.Customer import com.acerete.service.ColorType import com.acerete.service.Color import com.acerete.output.Output import com.acerete.input.InputReader import com.acerete.output.StdOutputWriter import com.acerete.output.OutputWriter import com.acerete.input.FileInputReader import com.acerete.shop.PaintShop import com.acerete.shop.Shop class SetupTest { val NO_FILE: String = "src/test/files/no.txt" val NO_INT: String = "src/test/files/no_int.txt" val NEGATIVE_INT: String = "src/test/files/negative_int.txt" val NO_N_TEST_CASES_FILE: String = "src/test/files/no_n_test_cases.txt" val NO_N_COLORS_FILE: String = "src/test/files/no_n_colors.txt" val NO_N_CUSTOMERS_FILE: String = "src/test/files/no_n_customers.txt" val LIMITS_EXCEEDED_1_FILE: String = "src/test/files/limits_exceeded_1.txt" val LIMITS_EXCEEDED_2_FILE: String = "src/test/files/limits_exceeded_2.txt" val LIMITS_EXCEEDED_3_FILE: String = "src/test/files/limits_exceeded_3.txt" val NO_N_LIKES_FILE: String = "src/test/files/no_n_likes.txt" val WRONG_CUSTOMER_FORMAT_FILE: String = "src/test/files/wrong_customer_format.txt" val WRONG_COLOR_TYPE_ID_FILE: String = "src/test/files/wrong_color_type_id.txt" val WRONG_COLOR_FILE: String = "src/test/files/wrong_color.txt" val ALREADY_LIKED_COLOR_FILE: String = "src/test/files/already_liked_color.txt" val ALREADY_LIKED_MATTE_FILE: String = "src/test/files/already_liked_matte.txt" val VALID_FILE: String = "src/test/files/valid.txt" val OTHER_VALID_FILE: String = "src/test/files/other_valid.txt" val VALID_OUTPUT_RESULT: String = "Case #1: 1 0 0 0 0 \\r\\nCase #2: IMPOSSIBLE\\r\\n" val OTHER_VALID_OUTPUT_RESULT: String = "Case #1: 0 0 0 \\r\\n" val INPUT_READER: InputReader = FileInputReader val OUTPUT_WRITER: OutputWriter = StdOutputWriter val SHOP: Shop = PaintShop // Matches values on 'valid.txt' var input1 = new Input(0, 5) var customerA1: Customer = new Customer() customerA1.addLike(new Color(1, ColorType.MATTE)) var customerA2: Customer = new Customer() customerA2.addLike(new Color(1, ColorType.GLOSSY)) customerA2.addLike(new Color(2, ColorType.GLOSSY)) var customerA3: Customer = new Customer() customerA3.addLike(new Color(5, ColorType.GLOSSY)) input1.addCustomer(customerA1) input1.addCustomer(customerA2) input1.addCustomer(customerA3) var input2:Input = new Input(1, 1) var customerB1: Customer = new Customer() customerB1.addLike(new Color(1, ColorType.GLOSSY)) var customerB2: Customer = new Customer() customerB2.addLike(new Color(1, ColorType.MATTE)) input2.addCustomer(customerB1) input2.addCustomer(customerB2) val VALID_INPUT: Set[Input] = Set(input1, input2) // Matches values on 'VALID_OUTPUT_RESULT' var output1: Output = new Output(0) output1.addColor(ColorType.MATTE) output1.addColor(ColorType.GLOSSY) output1.addColor(ColorType.GLOSSY) output1.addColor(ColorType.GLOSSY) output1.addColor(ColorType.GLOSSY) var output2: Output = new Output(1) val VALID_OUTPUT: List[Output] = List(output1, output2) }
adriwankenobi/paint-shop-scala
src/test/SetupTest.scala
Scala
gpl-2.0
3,138
import scala.collection._ trait GenSeqView0[+A, +Coll] trait GenSeqViewLike[+A, +Coll, +This <: GenSeqView0[A, Coll] with GenSeqViewLike[A, Coll, Nothing]] extends GenSeq[A] { self => trait Transformed[+B] { def length: Int = 0 def apply(idx: Int): B = error("") } trait Reversed extends Transformed[A] { def iterator: Iterator[A] = createReversedIterator private def createReversedIterator: Iterator[A] = { self.foreach(_ => ()) null } } }
folone/dotty
tests/pending/pos/t4365/b_1.scala
Scala
bsd-3-clause
529
package org.vlinderlang.vlinderc.parse import scala.util.parsing.combinator.{Parsers, RegexParsers} sealed abstract class Token private[parse] case object EOF extends Token case class Identifier(name: String) extends Token case object False extends Token case object Import extends Token case object Struct extends Token case object Sub extends Token case object True extends Token case object Typealias extends Token case object Union extends Token case class StringLiteral(value: String) extends Token case object Colon extends Token case object Comma extends Token case object Eq extends Token case object EqGT extends Token case object LeftBrace extends Token case object LeftParen extends Token private[parse] case object Newline extends Token case object Period extends Token case object RightBrace extends Token case object RightParen extends Token case object Semicolon extends Token private[parse] object Lexer extends Parsers with RegexParsers { override type Elem = Char override def skipWhitespace: Boolean = false def space: Parser[Unit] = """ *""".r ^^^ (()) def identifier: Parser[Token] = """[a-zA-Z_][a-zA-Z0-9_]*""".r ^^ Identifier def keyword: Parser[Token] = Vector( "false" ^^^ False, "import" ^^^ Import, "struct" ^^^ Struct, "sub" ^^^ Sub, "true" ^^^ True, "typealias" ^^^ Typealias, "union" ^^^ Union ).reduce(_ ||| _) def literal: Parser[Token] = Vector( "\".*?\"".r ^^ { s => StringLiteral(s.substring(1, s.length - 1)) } ).reduce(_ ||| _) def punctuation: Parser[Token] = Vector( ":" ^^^ Colon, "," ^^^ Comma, "=" ^^^ Eq, "=>" ^^^ EqGT, "{" ^^^ LeftBrace, "(" ^^^ LeftParen, "\r\n" ^^^ Newline, "\n" ^^^ Newline, "." ^^^ Period, "}" ^^^ RightBrace, ")" ^^^ RightParen, ";" ^^^ Semicolon ).reduce(_ ||| _) def token: Parser[Token] = (identifier ||| keyword ||| literal ||| punctuation) <~ space }
vlinder-lang/vlinderc
src/main/scala/org/vlinderlang/vlinderc/parse/lex.scala
Scala
bsd-3-clause
1,948
package uk.gov.dvla.vehicles.presentation.common.controllers import org.mockito.Mockito.when import play.api.test.FakeRequest import play.api.test.Helpers.{contentAsString, defaultAwaitTimeout} import play.mvc.Http.Status.{OK, INTERNAL_SERVER_ERROR} import uk.gov.dvla.vehicles.presentation.common.UnitSpec import uk.gov.dvla.vehicles.presentation.common.webserviceclients.healthstats.{NotHealthyStats, HealthStats} class HealthCheckSpec extends UnitSpec { "requests to /healthcheck" should { "GET request should return 200 'VMPR Application Healthy!' if the health stats are good" in { val healthStats = mock[HealthStats] when(healthStats.healthy).thenReturn(None) val result = new HealthCheck(healthStats).respond(FakeRequest("GET", "/healthcheck")) whenReady(result) (_.header.status should equal(OK)) contentAsString(result) should equal("VMPR Application Healthy!") } "GET request should return 500 if the health stats are not good" in { val healthStats = mock[HealthStats] when(healthStats.healthy).thenReturn(Some(NotHealthyStats("ms1", "stats info"))) val result = new HealthCheck(healthStats).respond(FakeRequest("GET", "/healthcheck")) whenReady(result) {result=> result.header.status should equal(INTERNAL_SERVER_ERROR) } contentAsString(result) should equal("stats info") } } }
dvla/vehicles-presentation-common
test/uk/gov/dvla/vehicles/presentation/common/controllers/HealthCheckSpec.scala
Scala
mit
1,389
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.util import java.sql.{Date, Timestamp} import java.text.SimpleDateFormat import java.util.{Calendar, Locale, TimeZone} import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.util.DateTimeUtils._ import org.apache.spark.unsafe.types.UTF8String class DateTimeUtilsSuite extends SparkFunSuite { val TimeZonePST = TimeZone.getTimeZone("PST") private[this] def getInUTCDays(timestamp: Long): Int = { val tz = TimeZone.getDefault ((timestamp + tz.getOffset(timestamp)) / MILLIS_PER_DAY).toInt } test("nanoseconds truncation") { def checkStringToTimestamp(originalTime: String, expectedParsedTime: String) { val parsedTimestampOp = DateTimeUtils.stringToTimestamp(UTF8String.fromString(originalTime)) assert(parsedTimestampOp.isDefined, "timestamp with nanoseconds was not parsed correctly") assert(DateTimeUtils.timestampToString(parsedTimestampOp.get) === expectedParsedTime) } checkStringToTimestamp("2015-01-02 00:00:00.123456789", "2015-01-02 00:00:00.123456") checkStringToTimestamp("2015-01-02 00:00:00.100000009", "2015-01-02 00:00:00.1") checkStringToTimestamp("2015-01-02 00:00:00.000050000", "2015-01-02 00:00:00.00005") checkStringToTimestamp("2015-01-02 00:00:00.12005", "2015-01-02 00:00:00.12005") checkStringToTimestamp("2015-01-02 00:00:00.100", "2015-01-02 00:00:00.1") checkStringToTimestamp("2015-01-02 00:00:00.000456789", "2015-01-02 00:00:00.000456") checkStringToTimestamp("1950-01-02 00:00:00.000456789", "1950-01-02 00:00:00.000456") } test("timestamp and us") { val now = new Timestamp(System.currentTimeMillis()) now.setNanos(1000) val ns = fromJavaTimestamp(now) assert(ns % 1000000L === 1) assert(toJavaTimestamp(ns) === now) List(-111111111111L, -1L, 0, 1L, 111111111111L).foreach { t => val ts = toJavaTimestamp(t) assert(fromJavaTimestamp(ts) === t) assert(toJavaTimestamp(fromJavaTimestamp(ts)) === ts) } } test("us and julian day") { val (d, ns) = toJulianDay(0) assert(d === JULIAN_DAY_OF_EPOCH) assert(ns === 0) assert(fromJulianDay(d, ns) == 0L) Seq(Timestamp.valueOf("2015-06-11 10:10:10.100"), Timestamp.valueOf("2015-06-11 20:10:10.100"), Timestamp.valueOf("1900-06-11 20:10:10.100")).foreach { t => val (d, ns) = toJulianDay(fromJavaTimestamp(t)) assert(ns > 0) val t1 = toJavaTimestamp(fromJulianDay(d, ns)) assert(t.equals(t1)) } } test("SPARK-6785: java date conversion before and after epoch") { def checkFromToJavaDate(d1: Date): Unit = { val d2 = toJavaDate(fromJavaDate(d1)) assert(d2.toString === d1.toString) } val df1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US) val df2 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z", Locale.US) checkFromToJavaDate(new Date(100)) checkFromToJavaDate(Date.valueOf("1970-01-01")) checkFromToJavaDate(new Date(df1.parse("1970-01-01 00:00:00").getTime)) checkFromToJavaDate(new Date(df2.parse("1970-01-01 00:00:00 UTC").getTime)) checkFromToJavaDate(new Date(df1.parse("1970-01-01 00:00:01").getTime)) checkFromToJavaDate(new Date(df2.parse("1970-01-01 00:00:01 UTC").getTime)) checkFromToJavaDate(new Date(df1.parse("1969-12-31 23:59:59").getTime)) checkFromToJavaDate(new Date(df2.parse("1969-12-31 23:59:59 UTC").getTime)) checkFromToJavaDate(Date.valueOf("1969-01-01")) checkFromToJavaDate(new Date(df1.parse("1969-01-01 00:00:00").getTime)) checkFromToJavaDate(new Date(df2.parse("1969-01-01 00:00:00 UTC").getTime)) checkFromToJavaDate(new Date(df1.parse("1969-01-01 00:00:01").getTime)) checkFromToJavaDate(new Date(df2.parse("1969-01-01 00:00:01 UTC").getTime)) checkFromToJavaDate(new Date(df1.parse("1989-11-09 11:59:59").getTime)) checkFromToJavaDate(new Date(df2.parse("1989-11-09 19:59:59 UTC").getTime)) checkFromToJavaDate(new Date(df1.parse("1776-07-04 10:30:00").getTime)) checkFromToJavaDate(new Date(df2.parse("1776-07-04 18:30:00 UTC").getTime)) } test("string to date") { var c = Calendar.getInstance() c.set(2015, 0, 28, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(stringToDate(UTF8String.fromString("2015-01-28")).get === millisToDays(c.getTimeInMillis)) c.set(2015, 0, 1, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(stringToDate(UTF8String.fromString("2015")).get === millisToDays(c.getTimeInMillis)) c.set(1, 0, 1, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(stringToDate(UTF8String.fromString("0001")).get === millisToDays(c.getTimeInMillis)) c = Calendar.getInstance() c.set(2015, 2, 1, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(stringToDate(UTF8String.fromString("2015-03")).get === millisToDays(c.getTimeInMillis)) c = Calendar.getInstance() c.set(2015, 2, 18, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(stringToDate(UTF8String.fromString("2015-03-18")).get === millisToDays(c.getTimeInMillis)) assert(stringToDate(UTF8String.fromString("2015-03-18 ")).get === millisToDays(c.getTimeInMillis)) assert(stringToDate(UTF8String.fromString("2015-03-18 123142")).get === millisToDays(c.getTimeInMillis)) assert(stringToDate(UTF8String.fromString("2015-03-18T123123")).get === millisToDays(c.getTimeInMillis)) assert(stringToDate(UTF8String.fromString("2015-03-18T")).get === millisToDays(c.getTimeInMillis)) assert(stringToDate(UTF8String.fromString("2015-03-18X")).isEmpty) assert(stringToDate(UTF8String.fromString("2015/03/18")).isEmpty) assert(stringToDate(UTF8String.fromString("2015.03.18")).isEmpty) assert(stringToDate(UTF8String.fromString("20150318")).isEmpty) assert(stringToDate(UTF8String.fromString("2015-031-8")).isEmpty) assert(stringToDate(UTF8String.fromString("02015-03-18")).isEmpty) assert(stringToDate(UTF8String.fromString("015-03-18")).isEmpty) assert(stringToDate(UTF8String.fromString("015")).isEmpty) assert(stringToDate(UTF8String.fromString("02015")).isEmpty) } test("string to time") { // Tests with UTC. val c = Calendar.getInstance(TimeZone.getTimeZone("UTC")) c.set(Calendar.MILLISECOND, 0) c.set(1900, 0, 1, 0, 0, 0) assert(stringToTime("1900-01-01T00:00:00GMT-00:00") === c.getTime()) c.set(2000, 11, 30, 10, 0, 0) assert(stringToTime("2000-12-30T10:00:00Z") === c.getTime()) // Tests with set time zone. c.setTimeZone(TimeZone.getTimeZone("GMT-04:00")) c.set(Calendar.MILLISECOND, 0) c.set(1900, 0, 1, 0, 0, 0) assert(stringToTime("1900-01-01T00:00:00-04:00") === c.getTime()) c.set(1900, 0, 1, 0, 0, 0) assert(stringToTime("1900-01-01T00:00:00GMT-04:00") === c.getTime()) // Tests with local time zone. c.setTimeZone(TimeZone.getDefault()) c.set(Calendar.MILLISECOND, 0) c.set(2000, 11, 30, 0, 0, 0) assert(stringToTime("2000-12-30") === new Date(c.getTimeInMillis())) c.set(2000, 11, 30, 10, 0, 0) assert(stringToTime("2000-12-30 10:00:00") === new Timestamp(c.getTimeInMillis())) } test("string to timestamp") { for (tz <- DateTimeTestUtils.ALL_TIMEZONES) { def checkStringToTimestamp(str: String, expected: Option[Long]): Unit = { assert(stringToTimestamp(UTF8String.fromString(str), tz) === expected) } var c = Calendar.getInstance(tz) c.set(1969, 11, 31, 16, 0, 0) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("1969-12-31 16:00:00", Option(c.getTimeInMillis * 1000)) c.set(1, 0, 1, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("0001", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(tz) c.set(2015, 2, 1, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(tz) c.set(2015, 2, 18, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18 ", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18T", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(tz) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18 12:03:17", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18T12:03:17", Option(c.getTimeInMillis * 1000)) // If the string value includes timezone string, it represents the timestamp string // in the timezone regardless of the tz parameter. c = Calendar.getInstance(TimeZone.getTimeZone("GMT-13:53")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18T12:03:17-13:53", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("UTC")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18T12:03:17Z", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18 12:03:17Z", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT-01:00")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18T12:03:17-1:0", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18T12:03:17-01:00", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18T12:03:17+07:30", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:03")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("2015-03-18T12:03:17+07:03", Option(c.getTimeInMillis * 1000)) // tests for the string including milliseconds. c = Calendar.getInstance(tz) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp("2015-03-18 12:03:17.123", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18T12:03:17.123", Option(c.getTimeInMillis * 1000)) // If the string value includes timezone string, it represents the timestamp string // in the timezone regardless of the tz parameter. c = Calendar.getInstance(TimeZone.getTimeZone("UTC")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 456) checkStringToTimestamp("2015-03-18T12:03:17.456Z", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18 12:03:17.456Z", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT-01:00")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp("2015-03-18T12:03:17.123-1:0", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("2015-03-18T12:03:17.123-01:00", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp("2015-03-18T12:03:17.123+07:30", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp("2015-03-18T12:03:17.123+07:30", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp( "2015-03-18T12:03:17.123121+7:30", Option(c.getTimeInMillis * 1000 + 121)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp( "2015-03-18T12:03:17.12312+7:30", Option(c.getTimeInMillis * 1000 + 120)) c = Calendar.getInstance(tz) c.set(Calendar.HOUR_OF_DAY, 18) c.set(Calendar.MINUTE, 12) c.set(Calendar.SECOND, 15) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp("18:12:15", Option(c.getTimeInMillis * 1000)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(Calendar.HOUR_OF_DAY, 18) c.set(Calendar.MINUTE, 12) c.set(Calendar.SECOND, 15) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp("T18:12:15.12312+7:30", Option(c.getTimeInMillis * 1000 + 120)) c = Calendar.getInstance(TimeZone.getTimeZone("GMT+07:30")) c.set(Calendar.HOUR_OF_DAY, 18) c.set(Calendar.MINUTE, 12) c.set(Calendar.SECOND, 15) c.set(Calendar.MILLISECOND, 123) checkStringToTimestamp("18:12:15.12312+7:30", Option(c.getTimeInMillis * 1000 + 120)) c = Calendar.getInstance(tz) c.set(2011, 4, 6, 7, 8, 9) c.set(Calendar.MILLISECOND, 100) checkStringToTimestamp("2011-05-06 07:08:09.1000", Option(c.getTimeInMillis * 1000)) checkStringToTimestamp("238", None) checkStringToTimestamp("00238", None) checkStringToTimestamp("2015-03-18 123142", None) checkStringToTimestamp("2015-03-18T123123", None) checkStringToTimestamp("2015-03-18X", None) checkStringToTimestamp("2015/03/18", None) checkStringToTimestamp("2015.03.18", None) checkStringToTimestamp("20150318", None) checkStringToTimestamp("2015-031-8", None) checkStringToTimestamp("02015-01-18", None) checkStringToTimestamp("015-01-18", None) checkStringToTimestamp("2015-03-18T12:03.17-20:0", None) checkStringToTimestamp("2015-03-18T12:03.17-0:70", None) checkStringToTimestamp("2015-03-18T12:03.17-1:0:0", None) // Truncating the fractional seconds c = Calendar.getInstance(TimeZone.getTimeZone("GMT+00:00")) c.set(2015, 2, 18, 12, 3, 17) c.set(Calendar.MILLISECOND, 0) checkStringToTimestamp( "2015-03-18T12:03:17.123456789+0:00", Option(c.getTimeInMillis * 1000 + 123456)) } } test("SPARK-15379: special invalid date string") { // Test stringToDate assert(stringToDate( UTF8String.fromString("2015-02-29 00:00:00")).isEmpty) assert(stringToDate( UTF8String.fromString("2015-04-31 00:00:00")).isEmpty) assert(stringToDate(UTF8String.fromString("2015-02-29")).isEmpty) assert(stringToDate(UTF8String.fromString("2015-04-31")).isEmpty) // Test stringToTimestamp assert(stringToTimestamp( UTF8String.fromString("2015-02-29 00:00:00")).isEmpty) assert(stringToTimestamp( UTF8String.fromString("2015-04-31 00:00:00")).isEmpty) assert(stringToTimestamp(UTF8String.fromString("2015-02-29")).isEmpty) assert(stringToTimestamp(UTF8String.fromString("2015-04-31")).isEmpty) } test("hours") { val c = Calendar.getInstance(TimeZonePST) c.set(2015, 2, 18, 13, 2, 11) assert(getHours(c.getTimeInMillis * 1000, TimeZonePST) === 13) assert(getHours(c.getTimeInMillis * 1000, TimeZoneGMT) === 20) c.set(2015, 12, 8, 2, 7, 9) assert(getHours(c.getTimeInMillis * 1000, TimeZonePST) === 2) assert(getHours(c.getTimeInMillis * 1000, TimeZoneGMT) === 10) } test("minutes") { val c = Calendar.getInstance(TimeZonePST) c.set(2015, 2, 18, 13, 2, 11) assert(getMinutes(c.getTimeInMillis * 1000, TimeZonePST) === 2) assert(getMinutes(c.getTimeInMillis * 1000, TimeZoneGMT) === 2) assert(getMinutes(c.getTimeInMillis * 1000, TimeZone.getTimeZone("Australia/North")) === 32) c.set(2015, 2, 8, 2, 7, 9) assert(getMinutes(c.getTimeInMillis * 1000, TimeZonePST) === 7) assert(getMinutes(c.getTimeInMillis * 1000, TimeZoneGMT) === 7) assert(getMinutes(c.getTimeInMillis * 1000, TimeZone.getTimeZone("Australia/North")) === 37) } test("seconds") { val c = Calendar.getInstance(TimeZonePST) c.set(2015, 2, 18, 13, 2, 11) assert(getSeconds(c.getTimeInMillis * 1000, TimeZonePST) === 11) assert(getSeconds(c.getTimeInMillis * 1000, TimeZoneGMT) === 11) c.set(2015, 2, 8, 2, 7, 9) assert(getSeconds(c.getTimeInMillis * 1000, TimeZonePST) === 9) assert(getSeconds(c.getTimeInMillis * 1000, TimeZoneGMT) === 9) } test("hours / minutes / seconds") { Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"), Timestamp.valueOf("2015-06-11 20:13:40.789"), Timestamp.valueOf("1900-06-11 12:14:50.789"), Timestamp.valueOf("1700-02-28 12:14:50.123456")).foreach { t => val us = fromJavaTimestamp(t) assert(toJavaTimestamp(us) === t) } } test("get day in year") { val c = Calendar.getInstance() c.set(2015, 2, 18, 0, 0, 0) assert(getDayInYear(getInUTCDays(c.getTimeInMillis)) === 77) c.set(2012, 2, 18, 0, 0, 0) assert(getDayInYear(getInUTCDays(c.getTimeInMillis)) === 78) } test("get year") { val c = Calendar.getInstance() c.set(2015, 2, 18, 0, 0, 0) assert(getYear(getInUTCDays(c.getTimeInMillis)) === 2015) c.set(2012, 2, 18, 0, 0, 0) assert(getYear(getInUTCDays(c.getTimeInMillis)) === 2012) } test("get quarter") { val c = Calendar.getInstance() c.set(2015, 2, 18, 0, 0, 0) assert(getQuarter(getInUTCDays(c.getTimeInMillis)) === 1) c.set(2012, 11, 18, 0, 0, 0) assert(getQuarter(getInUTCDays(c.getTimeInMillis)) === 4) } test("get month") { val c = Calendar.getInstance() c.set(2015, 2, 18, 0, 0, 0) assert(getMonth(getInUTCDays(c.getTimeInMillis)) === 3) c.set(2012, 11, 18, 0, 0, 0) assert(getMonth(getInUTCDays(c.getTimeInMillis)) === 12) } test("get day of month") { val c = Calendar.getInstance() c.set(2015, 2, 18, 0, 0, 0) assert(getDayOfMonth(getInUTCDays(c.getTimeInMillis)) === 18) c.set(2012, 11, 24, 0, 0, 0) assert(getDayOfMonth(getInUTCDays(c.getTimeInMillis)) === 24) } test("date add months") { val c1 = Calendar.getInstance() c1.set(1997, 1, 28, 10, 30, 0) val days1 = millisToDays(c1.getTimeInMillis) val c2 = Calendar.getInstance() c2.set(2000, 1, 29) assert(dateAddMonths(days1, 36) === millisToDays(c2.getTimeInMillis)) c2.set(1996, 0, 31) assert(dateAddMonths(days1, -13) === millisToDays(c2.getTimeInMillis)) } test("timestamp add months") { val c1 = Calendar.getInstance() c1.set(1997, 1, 28, 10, 30, 0) c1.set(Calendar.MILLISECOND, 0) val ts1 = c1.getTimeInMillis * 1000L val c2 = Calendar.getInstance() c2.set(2000, 1, 29, 10, 30, 0) c2.set(Calendar.MILLISECOND, 123) val ts2 = c2.getTimeInMillis * 1000L assert(timestampAddInterval(ts1, 36, 123000) === ts2) val c3 = Calendar.getInstance(TimeZonePST) c3.set(1997, 1, 27, 16, 0, 0) c3.set(Calendar.MILLISECOND, 0) val ts3 = c3.getTimeInMillis * 1000L val c4 = Calendar.getInstance(TimeZonePST) c4.set(2000, 1, 27, 16, 0, 0) c4.set(Calendar.MILLISECOND, 123) val ts4 = c4.getTimeInMillis * 1000L val c5 = Calendar.getInstance(TimeZoneGMT) c5.set(2000, 1, 29, 0, 0, 0) c5.set(Calendar.MILLISECOND, 123) val ts5 = c5.getTimeInMillis * 1000L assert(timestampAddInterval(ts3, 36, 123000, TimeZonePST) === ts4) assert(timestampAddInterval(ts3, 36, 123000, TimeZoneGMT) === ts5) } test("monthsBetween") { val c1 = Calendar.getInstance() c1.set(1997, 1, 28, 10, 30, 0) val c2 = Calendar.getInstance() c2.set(1996, 9, 30, 0, 0, 0) assert(monthsBetween(c1.getTimeInMillis * 1000L, c2.getTimeInMillis * 1000L) === 3.94959677) c2.set(2000, 1, 28, 0, 0, 0) assert(monthsBetween(c1.getTimeInMillis * 1000L, c2.getTimeInMillis * 1000L) === -36) c2.set(2000, 1, 29, 0, 0, 0) assert(monthsBetween(c1.getTimeInMillis * 1000L, c2.getTimeInMillis * 1000L) === -36) c2.set(1996, 2, 31, 0, 0, 0) assert(monthsBetween(c1.getTimeInMillis * 1000L, c2.getTimeInMillis * 1000L) === 11) val c3 = Calendar.getInstance(TimeZonePST) c3.set(2000, 1, 28, 16, 0, 0) val c4 = Calendar.getInstance(TimeZonePST) c4.set(1997, 1, 28, 16, 0, 0) assert( monthsBetween(c3.getTimeInMillis * 1000L, c4.getTimeInMillis * 1000L, TimeZonePST) === 36.0) assert( monthsBetween(c3.getTimeInMillis * 1000L, c4.getTimeInMillis * 1000L, TimeZoneGMT) === 35.90322581) } test("from UTC timestamp") { def test(utc: String, tz: String, expected: String): Unit = { assert(toJavaTimestamp(fromUTCTime(fromJavaTimestamp(Timestamp.valueOf(utc)), tz)).toString === expected) } for (tz <- DateTimeTestUtils.ALL_TIMEZONES) { DateTimeTestUtils.withDefaultTimeZone(tz) { test("2011-12-25 09:00:00.123456", "UTC", "2011-12-25 09:00:00.123456") test("2011-12-25 09:00:00.123456", "JST", "2011-12-25 18:00:00.123456") test("2011-12-25 09:00:00.123456", "PST", "2011-12-25 01:00:00.123456") test("2011-12-25 09:00:00.123456", "Asia/Shanghai", "2011-12-25 17:00:00.123456") } } DateTimeTestUtils.withDefaultTimeZone(TimeZone.getTimeZone("PST")) { // Daylight Saving Time test("2016-03-13 09:59:59.0", "PST", "2016-03-13 01:59:59.0") test("2016-03-13 10:00:00.0", "PST", "2016-03-13 03:00:00.0") test("2016-11-06 08:59:59.0", "PST", "2016-11-06 01:59:59.0") test("2016-11-06 09:00:00.0", "PST", "2016-11-06 01:00:00.0") test("2016-11-06 10:00:00.0", "PST", "2016-11-06 02:00:00.0") } } test("to UTC timestamp") { def test(utc: String, tz: String, expected: String): Unit = { assert(toJavaTimestamp(toUTCTime(fromJavaTimestamp(Timestamp.valueOf(utc)), tz)).toString === expected) } for (tz <- DateTimeTestUtils.ALL_TIMEZONES) { DateTimeTestUtils.withDefaultTimeZone(tz) { test("2011-12-25 09:00:00.123456", "UTC", "2011-12-25 09:00:00.123456") test("2011-12-25 18:00:00.123456", "JST", "2011-12-25 09:00:00.123456") test("2011-12-25 01:00:00.123456", "PST", "2011-12-25 09:00:00.123456") test("2011-12-25 17:00:00.123456", "Asia/Shanghai", "2011-12-25 09:00:00.123456") } } DateTimeTestUtils.withDefaultTimeZone(TimeZone.getTimeZone("PST")) { // Daylight Saving Time test("2016-03-13 01:59:59", "PST", "2016-03-13 09:59:59.0") // 2016-03-13 02:00:00 PST does not exists test("2016-03-13 02:00:00", "PST", "2016-03-13 10:00:00.0") test("2016-03-13 03:00:00", "PST", "2016-03-13 10:00:00.0") test("2016-11-06 00:59:59", "PST", "2016-11-06 07:59:59.0") // 2016-11-06 01:00:00 PST could be 2016-11-06 08:00:00 UTC or 2016-11-06 09:00:00 UTC test("2016-11-06 01:00:00", "PST", "2016-11-06 09:00:00.0") test("2016-11-06 01:59:59", "PST", "2016-11-06 09:59:59.0") test("2016-11-06 02:00:00", "PST", "2016-11-06 10:00:00.0") } } test("truncTimestamp") { def testTrunc( level: Int, expected: String, inputTS: SQLTimestamp, timezone: TimeZone = DateTimeUtils.defaultTimeZone()): Unit = { val truncated = DateTimeUtils.truncTimestamp(inputTS, level, timezone) val expectedTS = DateTimeUtils.stringToTimestamp(UTF8String.fromString(expected)) assert(truncated === expectedTS.get) } val defaultInputTS = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-05T09:32:05.359")) val defaultInputTS1 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-31T20:32:05.359")) val defaultInputTS2 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-04-01T02:32:05.359")) val defaultInputTS3 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-30T02:32:05.359")) val defaultInputTS4 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-29T02:32:05.359")) testTrunc(DateTimeUtils.TRUNC_TO_YEAR, "2015-01-01T00:00:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_MONTH, "2015-03-01T00:00:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_DAY, "2015-03-05T00:00:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_HOUR, "2015-03-05T09:00:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_MINUTE, "2015-03-05T09:32:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_SECOND, "2015-03-05T09:32:05", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-02T00:00:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-30T00:00:00", defaultInputTS1.get) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-30T00:00:00", defaultInputTS2.get) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-30T00:00:00", defaultInputTS3.get) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-23T00:00:00", defaultInputTS4.get) testTrunc(DateTimeUtils.TRUNC_TO_QUARTER, "2015-01-01T00:00:00", defaultInputTS.get) testTrunc(DateTimeUtils.TRUNC_TO_QUARTER, "2015-01-01T00:00:00", defaultInputTS1.get) testTrunc(DateTimeUtils.TRUNC_TO_QUARTER, "2015-04-01T00:00:00", defaultInputTS2.get) for (tz <- DateTimeTestUtils.ALL_TIMEZONES) { DateTimeTestUtils.withDefaultTimeZone(tz) { val inputTS = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-05T09:32:05.359")) val inputTS1 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-31T20:32:05.359")) val inputTS2 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-04-01T02:32:05.359")) val inputTS3 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-30T02:32:05.359")) val inputTS4 = DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-29T02:32:05.359")) testTrunc(DateTimeUtils.TRUNC_TO_YEAR, "2015-01-01T00:00:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_MONTH, "2015-03-01T00:00:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_DAY, "2015-03-05T00:00:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_HOUR, "2015-03-05T09:00:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_MINUTE, "2015-03-05T09:32:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_SECOND, "2015-03-05T09:32:05", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-02T00:00:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-30T00:00:00", inputTS1.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-30T00:00:00", inputTS2.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-30T00:00:00", inputTS3.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_WEEK, "2015-03-23T00:00:00", inputTS4.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_QUARTER, "2015-01-01T00:00:00", inputTS.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_QUARTER, "2015-01-01T00:00:00", inputTS1.get, tz) testTrunc(DateTimeUtils.TRUNC_TO_QUARTER, "2015-04-01T00:00:00", inputTS2.get, tz) } } } test("daysToMillis and millisToDays") { val c = Calendar.getInstance(TimeZonePST) c.set(2015, 11, 31, 16, 0, 0) assert(millisToDays(c.getTimeInMillis, TimeZonePST) === 16800) assert(millisToDays(c.getTimeInMillis, TimeZoneGMT) === 16801) c.set(2015, 11, 31, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(daysToMillis(16800, TimeZonePST) === c.getTimeInMillis) c.setTimeZone(TimeZoneGMT) c.set(2015, 11, 31, 0, 0, 0) c.set(Calendar.MILLISECOND, 0) assert(daysToMillis(16800, TimeZoneGMT) === c.getTimeInMillis) // There are some days are skipped entirely in some timezone, skip them here. val skipped_days = Map[String, Int]( "Kwajalein" -> 8632, "Pacific/Apia" -> 15338, "Pacific/Enderbury" -> 9131, "Pacific/Fakaofo" -> 15338, "Pacific/Kiritimati" -> 9131, "Pacific/Kwajalein" -> 8632, "MIT" -> 15338) for (tz <- DateTimeTestUtils.ALL_TIMEZONES) { val skipped = skipped_days.getOrElse(tz.getID, Int.MinValue) (-20000 to 20000).foreach { d => if (d != skipped) { assert(millisToDays(daysToMillis(d, tz), tz) === d, s"Round trip of ${d} did not work in tz ${tz}") } } } } }
brad-kaiser/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
Scala
apache-2.0
29,228
package extruder.metrics import extruder.core._ import extruder.metrics.data.MetricType trait MetricSettings extends Settings { val mapEncoderSettings: Settings = new Settings { override val includeClassNameInPath: Boolean = false override def pathToString(path: List[String]): String = path.mkString(".") } override def pathToString(path: List[String]): String = path.map(snakeCaseTransformation).mkString(".") def defaultMetricType: MetricType = MetricType.Gauge }
janstenpickle/extruder
metrics/core/src/main/scala/extruder/metrics/MetricSettings.scala
Scala
mit
487
package fpinscala.errorhandling import scala.{Option => _, Either => _, _} // hide std library `Option` and `Either`, since we are writing our own in this chapter sealed trait Option[+A] { def map[B](f: A => B): Option[B] = { this match { case None => None case Some(a) => Some(f(a)) } } def getOrElse[B>:A](default: => B): B = { this match { case None => default case Some(a) => a } } def flatMap[B](f: A => Option[B]): Option[B] = { map(f) getOrElse None } def orElse[B>:A](ob: => Option[B]): Option[B] = { this match { case None => ob case _ => this } } def filter(f: A => Boolean): Option[A] = { this match { case Some(a) if(f(a)) => this case _ => None } } } case class Some[+A](get: A) extends Option[A] case object None extends Option[Nothing] object Option { def failingFn(i: Int): Int = { val y: Int = throw new Exception("fail!") // `val y: Int = ...` declares `y` as having type `Int`, and sets it equal to the right hand side of the `=`. try { val x = 42 + 5 x + y } catch { case e: Exception => 43 } // A `catch` block is just a pattern matching block like the ones we've seen. `case e: Exception` is a pattern that matches any `Exception`, and it binds this value to the identifier `e`. The match returns the value 43. } def failingFn2(i: Int): Int = { try { val x = 42 + 5 x + ((throw new Exception("fail!")): Int) // A thrown Exception can be given any type; here we're annotating it with the type `Int` } catch { case e: Exception => 43 } } def mean(xs: Seq[Double]): Option[Double] = if (xs.isEmpty) None else Some(xs.sum / xs.length) def variance(xs: Seq[Double]): Option[Double] = { mean(xs) flatMap { m => mean(xs map(x => math.pow(m - x, 2))) } } def map2[A,B,C](a: Option[A], b: Option[B])(f: (A, B) => C): Option[C] = { a flatMap { aa => b map { bb => f(aa, bb) } } } def sequence[A](a: List[Option[A]]): Option[List[A]] = { a match { case Nil => Some(Nil) case h :: t => h flatMap (h1 => sequence(t) map (h1 :: _)) } } def traverse[A, B](a: List[A])(f: A => Option[B]): Option[List[B]] = { a match { case Nil => Some(Nil) case h::t => map2(f(h), traverse(t)(f))(_ :: _) } } }
fpinscala-muc/fpinscala-LithiumTD
exercises/src/main/scala/fpinscala/errorhandling/Option.scala
Scala
mit
2,396
package assets.mustache.overseas import uk.gov.gds.ier.transaction.overseas.lastUkAddress.LastUkAddressSelectMustache import uk.gov.gds.ier.test._ import uk.gov.gds.ier.service.{AddressService, WithAddressService} class LastUkAddressSelectTemplateTest extends TemplateTestSuite with WithMockOverseasControllers with LastUkAddressSelectMustache with WithAddressService { val addressService = mock[AddressService] it should "properly render" in { running(FakeApplication()) { val data = new SelectModel( question = Question(), lookupUrl = "http://lookup", manualUrl = "http://manual", postcode = Field( id = "postcodeId", name = "postcodeName", classes = "postcodeClasses", value = "postcodeValue" ), address = Field( id = "addressId", name = "addressName", classes = "addressClasses", value = "addressValue", optionList = List( SelectOption( value = "optionValue", text = "optionText", selected = """ foo="foo" """ ) ) ), possibleJsonList = Field( id = "possibleJsonId", name = "possibleJsonName", value = "{\\"addresses\\":[]}" ), possiblePostcode = Field( id = "possiblePostcodeId", name = "possiblePostcodeName", value = "possiblePostcodeValue" ), hasAddresses = true, hasAuthority = true ) val html = Mustache.render("overseas/lastUkAddressSelect", data) val doc = Jsoup.parse(html.toString) val fieldset = doc.select("fieldset").first() val postcodeSpan = doc.select("span[class=postcode]").first() postcodeSpan.html() should be("postcodeValue") val postcodeInput = fieldset.select("input[type=hidden]").first() postcodeInput.attr("id") should be("postcodeId") postcodeInput.attr("name") should be("postcodeName") postcodeInput.attr("value") should be("postcodeValue") val lookupLink = doc.select("a[class=change-postcode-button]").first() lookupLink.attr("href") should be("http://lookup") val manualLink = doc.select("a[href=http://manual]").first() manualLink.attr("href") should be("http://manual") val addressLabel = fieldset.select("label[for=addressId]").first() addressLabel.attr("for") should be("addressId") val addressDiv = fieldset.select("div").first() addressDiv.attr("class") should include("addressClasses") val addressSelect = fieldset.select("select").first() addressSelect.attr("id") should be("addressId") addressSelect.attr("name") should be("addressName") addressSelect.attr("class") should include("addressClasses") val option = addressSelect.children().select("option").first() option.attr("value") should be("optionValue") option.attr("foo") should be("foo") option.html() should be("optionText") val hiddenJsonListInput = doc.select("input[type=hidden]").get(1) val hiddenPostcodeInput = doc.select("input[type=hidden]").get(2) hiddenJsonListInput.attr("id") should be("possibleJsonId") hiddenJsonListInput.attr("name") should be("possibleJsonName") hiddenJsonListInput.attr("value") should be("{\\"addresses\\":[]}") hiddenPostcodeInput.attr("id") should be("possiblePostcodeId") hiddenPostcodeInput.attr("name") should be("possiblePostcodeName") hiddenPostcodeInput.attr("value") should be("possiblePostcodeValue") } } it should "should display error message if no addresses provided" in { running(FakeApplication()) { val data = new SelectModel( question = Question(), lookupUrl = "", manualUrl = "", postcode = Field(id = "",name = "",classes = "",value = ""), address = Field( id = "", name = "", classes = "", value = "", optionList = List.empty ), possibleJsonList = Field(id = "",name = "",value = ""), possiblePostcode = Field(id = "",name = "",value = ""), hasAddresses = false, hasAuthority = false ) val html = Mustache.render("overseas/lastUkAddressSelect", data) val doc = Jsoup.parse(html.toString) val wrapper = doc.select("div").first() wrapper.html() should include( "Sorry - we couldn't find any addresses for that postcode" ) doc.select("select").size should be(0) doc.select("a[class=button]").size should be(0) } } it should "display manual link if has authority" in { running(FakeApplication()) { val data = new SelectModel( question = Question(), lookupUrl = "", manualUrl = "/lookup/url", postcode = Field(id = "",name = "",classes = "",value = ""), address = Field( id = "", name = "", classes = "", value = "", optionList = List.empty ), possibleJsonList = Field(id = "",name = "",value = ""), possiblePostcode = Field(id = "",name = "",value = ""), hasAddresses = false, hasAuthority = true ) val html = Mustache.render("overseas/lastUkAddressSelect", data) val doc = Jsoup.parse(html.toString) val wrapper = doc.select("div").first() wrapper.html() should include( "Sorry - we couldn't find any addresses for that postcode" ) doc.select("select").size should be(0) val button = doc.select("a[class=button]").first() button.attr("href") should be("/lookup/url") } } }
michaeldfallen/ier-frontend
test/assets/mustache/overseas/LastUkAddressSelectTemplateTest.scala
Scala
mit
5,725
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ui.scope import java.util.Objects import scala.collection.mutable import scala.collection.mutable.{ListBuffer, StringBuilder} import org.apache.commons.lang3.StringEscapeUtils import org.apache.spark.internal.Logging import org.apache.spark.scheduler.StageInfo import org.apache.spark.storage.StorageLevel /** * A representation of a generic cluster graph used for storing information on RDD operations. * * Each graph is defined with a set of edges and a root cluster, which may contain children * nodes and children clusters. Additionally, a graph may also have edges that enter or exit * the graph from nodes that belong to adjacent graphs. */ private[spark] case class RDDOperationGraph( edges: Seq[RDDOperationEdge], outgoingEdges: Seq[RDDOperationEdge], incomingEdges: Seq[RDDOperationEdge], rootCluster: RDDOperationCluster) /** A node in an RDDOperationGraph. This represents an RDD. */ private[spark] case class RDDOperationNode(id: Int, name: String, cached: Boolean, callsite: String) /** * A directed edge connecting two nodes in an RDDOperationGraph. * This represents an RDD dependency. */ private[spark] case class RDDOperationEdge(fromId: Int, toId: Int) /** * A cluster that groups nodes together in an RDDOperationGraph. * * This represents any grouping of RDDs, including operation scopes (e.g. textFile, flatMap), * stages, jobs, or any higher level construct. A cluster may be nested inside of other clusters. */ private[spark] class RDDOperationCluster(val id: String, private var _name: String) { private val _childNodes = new ListBuffer[RDDOperationNode] private val _childClusters = new ListBuffer[RDDOperationCluster] def name: String = _name def setName(n: String): Unit = { _name = n } def childNodes: Seq[RDDOperationNode] = _childNodes.iterator.toSeq def childClusters: Seq[RDDOperationCluster] = _childClusters.iterator.toSeq def attachChildNode(childNode: RDDOperationNode): Unit = { _childNodes += childNode } def attachChildCluster(childCluster: RDDOperationCluster): Unit = { _childClusters += childCluster } /** Return all the nodes which are cached. */ def getCachedNodes: Seq[RDDOperationNode] = { _childNodes.filter(_.cached) ++ _childClusters.flatMap(_.getCachedNodes) } def canEqual(other: Any): Boolean = other.isInstanceOf[RDDOperationCluster] override def equals(other: Any): Boolean = other match { case that: RDDOperationCluster => (that canEqual this) && _childClusters == that._childClusters && id == that.id && _name == that._name case _ => false } override def hashCode(): Int = { val state = Seq(_childClusters, id, _name) state.map(Objects.hashCode).foldLeft(0)((a, b) => 31 * a + b) } } private[spark] object RDDOperationGraph extends Logging { val STAGE_CLUSTER_PREFIX = "stage_" /** * Construct a RDDOperationGraph for a given stage. * * The root cluster represents the stage, and all children clusters represent RDD operations. * Each node represents an RDD, and each edge represents a dependency between two RDDs pointing * from the parent to the child. * * This does not currently merge common operation scopes across stages. This may be worth * supporting in the future if we decide to group certain stages within the same job under * a common scope (e.g. part of a SQL query). */ def makeOperationGraph(stage: StageInfo, retainedNodes: Int): RDDOperationGraph = { val edges = new ListBuffer[RDDOperationEdge] val nodes = new mutable.HashMap[Int, RDDOperationNode] val clusters = new mutable.HashMap[String, RDDOperationCluster] // indexed by cluster ID // Root cluster is the stage cluster // Use a special prefix here to differentiate this cluster from other operation clusters val stageClusterId = STAGE_CLUSTER_PREFIX + stage.stageId val stageClusterName = s"Stage ${stage.stageId}" + { if (stage.attemptNumber == 0) "" else s" (attempt ${stage.attemptNumber})" } val rootCluster = new RDDOperationCluster(stageClusterId, stageClusterName) var rootNodeCount = 0 val addRDDIds = new mutable.HashSet[Int]() val dropRDDIds = new mutable.HashSet[Int]() // Find nodes, edges, and operation scopes that belong to this stage stage.rddInfos.sortBy(_.id).foreach { rdd => val parentIds = rdd.parentIds val isAllowed = if (parentIds.isEmpty) { rootNodeCount += 1 rootNodeCount <= retainedNodes } else { parentIds.exists(id => addRDDIds.contains(id) || !dropRDDIds.contains(id)) } if (isAllowed) { addRDDIds += rdd.id edges ++= parentIds.filter(id => !dropRDDIds.contains(id)).map(RDDOperationEdge(_, rdd.id)) } else { dropRDDIds += rdd.id } // TODO: differentiate between the intention to cache an RDD and whether it's actually cached val node = nodes.getOrElseUpdate(rdd.id, RDDOperationNode( rdd.id, rdd.name, rdd.storageLevel != StorageLevel.NONE, rdd.callSite)) if (rdd.scope.isEmpty) { // This RDD has no encompassing scope, so we put it directly in the root cluster // This should happen only if an RDD is instantiated outside of a public RDD API if (isAllowed) { rootCluster.attachChildNode(node) } } else { // Otherwise, this RDD belongs to an inner cluster, // which may be nested inside of other clusters val rddScopes = rdd.scope.map { scope => scope.getAllScopes }.getOrElse(Seq.empty) val rddClusters = rddScopes.map { scope => val clusterId = scope.id val clusterName = scope.name.replaceAll("\\n", "\\\\n") clusters.getOrElseUpdate(clusterId, new RDDOperationCluster(clusterId, clusterName)) } // Build the cluster hierarchy for this RDD rddClusters.sliding(2).foreach { pc => if (pc.size == 2) { val parentCluster = pc(0) val childCluster = pc(1) parentCluster.attachChildCluster(childCluster) } } // Attach the outermost cluster to the root cluster, and the RDD to the innermost cluster rddClusters.headOption.foreach { cluster => if (!rootCluster.childClusters.contains(cluster)) { rootCluster.attachChildCluster(cluster) } } if (isAllowed) { rddClusters.lastOption.foreach { cluster => cluster.attachChildNode(node) } } } } // Classify each edge as internal, outgoing or incoming // This information is needed to reason about how stages relate to each other val internalEdges = new ListBuffer[RDDOperationEdge] val outgoingEdges = new ListBuffer[RDDOperationEdge] val incomingEdges = new ListBuffer[RDDOperationEdge] edges.foreach { case e: RDDOperationEdge => val fromThisGraph = nodes.contains(e.fromId) val toThisGraph = nodes.contains(e.toId) (fromThisGraph, toThisGraph) match { case (true, true) => internalEdges += e case (true, false) => outgoingEdges += e case (false, true) => incomingEdges += e // should never happen case _ => logWarning(s"Found an orphan edge in stage ${stage.stageId}: $e") } } RDDOperationGraph(internalEdges, outgoingEdges, incomingEdges, rootCluster) } /** * Generate the content of a dot file that describes the specified graph. * * Note that this only uses a minimal subset of features available to the DOT specification. * Part of the styling must be done here because the rendering library must take certain * attributes into account when arranging the graph elements. More style is added in the * visualization later through post-processing in JavaScript. * * For the complete DOT specification, see http://www.graphviz.org/Documentation/dotguide.pdf. */ def makeDotFile(graph: RDDOperationGraph): String = { val dotFile = new StringBuilder dotFile.append("digraph G {\n") makeDotSubgraph(dotFile, graph.rootCluster, indent = " ") graph.edges.foreach { edge => dotFile.append(s""" ${edge.fromId}->${edge.toId};\n""") } dotFile.append("}") val result = dotFile.toString() logDebug(result) result } /** Return the dot representation of a node in an RDDOperationGraph. */ private def makeDotNode(node: RDDOperationNode): String = { val isCached = if (node.cached) { " [Cached]" } else { "" } val label = s"${node.name} [${node.id}]$isCached\n${node.callsite}" s"""${node.id} [label="${StringEscapeUtils.escapeJava(label)}"]""" } /** Update the dot representation of the RDDOperationGraph in cluster to subgraph. */ private def makeDotSubgraph( subgraph: StringBuilder, cluster: RDDOperationCluster, indent: String): Unit = { subgraph.append(indent).append(s"subgraph cluster${cluster.id} {\n") .append(indent).append(s""" label="${StringEscapeUtils.escapeJava(cluster.name)}";\n""") cluster.childNodes.foreach { node => subgraph.append(indent).append(s" ${makeDotNode(node)};\n") } cluster.childClusters.foreach { cscope => makeDotSubgraph(subgraph, cscope, indent + " ") } subgraph.append(indent).append("}\n") } }
bravo-zhang/spark
core/src/main/scala/org/apache/spark/ui/scope/RDDOperationGraph.scala
Scala
apache-2.0
10,192
/* * Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.internal.javadsl.broker.kafka import java.net.URI import java.util.concurrent.CompletionStage import java.util.concurrent.atomic.AtomicInteger import akka.Done import akka.actor.{ ActorSystem, SupervisorStrategy } import akka.kafka.{ ConsumerSettings, Subscriptions } import akka.kafka.scaladsl.Consumer import akka.pattern.BackoffSupervisor import akka.stream.Materializer import akka.stream.javadsl.{ Flow, Source } import com.lightbend.lagom.internal.broker.kafka.{ ConsumerConfig, KafkaConfig, KafkaSubscriberActor, NoKafkaBrokersException } import com.lightbend.lagom.javadsl.api.Descriptor.TopicCall import com.lightbend.lagom.javadsl.api.{ ServiceInfo, ServiceLocator } import com.lightbend.lagom.javadsl.api.broker.Subscriber import org.apache.kafka.common.serialization.StringDeserializer import org.slf4j.LoggerFactory import scala.concurrent.{ ExecutionContext, Future, Promise } import scala.compat.java8.FutureConverters._ import scala.compat.java8.OptionConverters._ /** * A Consumer for consuming messages from Kafka using the akka-stream-kafka API. */ private[lagom] class JavadslKafkaSubscriber[Message](kafkaConfig: KafkaConfig, topicCall: TopicCall[Message], groupId: Subscriber.GroupId, info: ServiceInfo, system: ActorSystem, serviceLocator: ServiceLocator)(implicit mat: Materializer, ec: ExecutionContext) extends Subscriber[Message] { private val log = LoggerFactory.getLogger(classOf[JavadslKafkaSubscriber[_]]) import JavadslKafkaSubscriber._ private lazy val consumerId = KafkaClientIdSequenceNumber.getAndIncrement private def consumerConfig = ConsumerConfig(system.settings.config) @throws(classOf[IllegalArgumentException]) override def withGroupId(groupIdName: String): Subscriber[Message] = { val newGroupId = { if (groupIdName == null) { // An empty group id is not allowed by Kafka (see https://issues.apache.org/jira/browse/KAFKA-2648 // and https://github.com/akka/reactive-kafka/issues/155) val defaultGroupId = GroupId.default(info) log.debug { "Passed a null groupId, but Kafka requires clients to set one (see KAFKA-2648). " + s"Defaulting $this consumer groupId to $defaultGroupId." } defaultGroupId } else GroupId(groupIdName) } if (newGroupId == groupId) this else new JavadslKafkaSubscriber(kafkaConfig, topicCall, newGroupId, info, system, serviceLocator) } private def consumerSettings = { val keyDeserializer = new StringDeserializer val valueDeserializer = { val messageSerializer = topicCall.messageSerializer() val protocol = messageSerializer.serializerForRequest().protocol() val deserializer = messageSerializer.deserializer(protocol) new JavadslKafkaDeserializer(deserializer) } ConsumerSettings(system, keyDeserializer, valueDeserializer) .withBootstrapServers(kafkaConfig.brokers) .withGroupId(groupId.groupId()) // Consumer must have a unique clientId otherwise a javax.management.InstanceAlreadyExistsException is thrown .withClientId(s"${info.serviceName()}-$consumerId") } private def subscription = Subscriptions.topics(topicCall.topicId().value) override def atMostOnceSource: Source[Message, _] = { kafkaConfig.serviceName match { case Some(name) => log.debug("Creating at most once source using service locator to look up Kafka services at {}", name) akka.stream.scaladsl.Source.single(()) .mapAsync(1)(_ => serviceLocator.locate(name).toScala) .flatMapConcat { case someUri if someUri.isPresent => val uri = someUri.get() log.debug("Connecting to Kafka service named {} at {}", name: Any, uri) Consumer.atMostOnceSource( consumerSettings.withBootstrapServers(s"${uri.getHost}:${uri.getPort}"), subscription ).map(_.value) case _ => throw new NoKafkaBrokersException(name) }.asJava case None => log.debug("Creating at most once source with configured brokers: {}", kafkaConfig.brokers) Consumer.atMostOnceSource(consumerSettings, subscription) .map(_.value).asJava } } private def locateService(name: String): Future[Option[URI]] = serviceLocator.locate(name).toScala.map(_.asScala) override def atLeastOnce(flow: Flow[Message, Done, _]): CompletionStage[Done] = { val streamCompleted = Promise[Done] val consumerProps = KafkaSubscriberActor.props(kafkaConfig, consumerConfig, locateService, topicCall.topicId().value(), flow.asScala, consumerSettings, subscription, streamCompleted) val backoffConsumerProps = BackoffSupervisor.propsWithSupervisorStrategy( consumerProps, s"KafkaConsumerActor$consumerId-${topicCall.topicId().value}", consumerConfig.minBackoff, consumerConfig.maxBackoff, consumerConfig.randomBackoffFactor, SupervisorStrategy.stoppingStrategy ) system.actorOf(backoffConsumerProps, s"KafkaBackoffConsumer$consumerId-${topicCall.topicId().value}") streamCompleted.future.toJava } } private[lagom] object JavadslKafkaSubscriber { private val KafkaClientIdSequenceNumber = new AtomicInteger(1) case class GroupId(groupId: String) extends Subscriber.GroupId { if (GroupId.isInvalidGroupId(groupId)) throw new IllegalArgumentException(s"Failed to create group because [groupId=$groupId] contains invalid character(s). Check the Kafka spec for creating a valid group id.") } case object GroupId { private val InvalidGroupIdChars = Set('/', '\\', ',', '\u0000', ':', '"', '\'', ';', '*', '?', ' ', '\t', '\r', '\n', '=') // based on https://github.com/apache/kafka/blob/623ab1e7c6497c000bc9c9978637f20542a3191c/core/src/test/scala/unit/kafka/common/ConfigTest.scala#L60 private def isInvalidGroupId(groupId: String): Boolean = groupId.exists(InvalidGroupIdChars.apply) def default(info: ServiceInfo): GroupId = GroupId(info.serviceName()) } }
edouardKaiser/lagom
service/javadsl/kafka/client/src/main/scala/com/lightbend/lagom/internal/javadsl/broker/kafka/JavadslKafkaSubscriber.scala
Scala
apache-2.0
6,244
package fgc.alias import fgc.model.Transform import fgc.model.VideoData import fgc.levenshtein.EditDistance.editDist import scala.collection.mutable class AliasTracker(typoFixer: String => String) { private case class AliasStat(key: String){ var displayName: String = "(n/a)" var matches: Map[String, Int] = Map() var count: Int = 0 def registerMatch(rawName: String): Unit = { val matchCount = matches.getOrElse(rawName, 0) + 1 matches += (rawName -> matchCount) count += count + 1 } def determineDisplayName(): Unit = { var newDisplayName = matches.keys.reduceLeft { (x,y) => if (matches(x) > matches(y)) x else y } displayName = newDisplayName } } private val registry: mutable.Map[String, AliasStat] = mutable.Map() private var lookup: Map[String, AliasStat] = Map() private def leveshtein(stats: List[AliasStat]): Map[String, String] = { // need to recursively collapse levenshtein matches // cut execution time in half by caching distances println("starting levenshtein") stats.map { as => val editDistances = stats.filter { os => os.key != as.key }.map{ os => (os, editDist(as.key, os.key)) } val closestEdit = editDistances.reduceLeft { (x,y) => if (x._2 < y._2) x else y } var trueAlias = as if (closestEdit._2 <= 1) { println(s"found close match! $as & $closestEdit") if (closestEdit._1.count > as.count){ println(s"found replacement! $closestEdit") trueAlias = closestEdit._1 } } (as.key, trueAlias.displayName) }.toMap } def register(rawName: String): Unit = { val key = Transform.toKey(rawName) val stat = registry.getOrElseUpdate(key, new AliasStat(key)) stat.registerMatch(rawName) } def normalize(): Unit = { registry.values.foreach(_.determineDisplayName) // leveshtein(registry) lookup = registry.map{ case(key, alias) => var targetAlias = alias val fixedKey = typoFixer(alias.displayName) if (!registry.contains(fixedKey)){ println(fixedKey) } if (fixedKey != key){ targetAlias = registry(fixedKey) } (key, targetAlias) }.toMap } def get(rawName: String): String = { lookup(Transform.toKey(rawName)).displayName } }
mpaulweeks/fgc-video
scala/alias.scala
Scala
mit
2,702
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.consumer import scala.collection._ import org.I0Itec.zkclient.ZkClient import kafka.utils.{Json, ZKGroupDirs, ZkUtils, Logging, CoreUtils} import kafka.common.KafkaException private[kafka] trait TopicCount { def getConsumerThreadIdsPerTopic: Map[String, Set[ConsumerThreadId]] def getTopicCountMap: Map[String, Int] def pattern: String } case class ConsumerThreadId(consumer: String, threadId: Int) extends Ordered[ConsumerThreadId] { override def toString = "%s-%d".format(consumer, threadId) def compare(that: ConsumerThreadId) = toString.compare(that.toString) } private[kafka] object TopicCount extends Logging { val whiteListPattern = "white_list" val blackListPattern = "black_list" val staticPattern = "static" def makeThreadId(consumerIdString: String, threadId: Int) = consumerIdString + "-" + threadId def makeConsumerThreadIdsPerTopic(consumerIdString: String, topicCountMap: Map[String, Int]) = { val consumerThreadIdsPerTopicMap = new mutable.HashMap[String, Set[ConsumerThreadId]]() for ((topic, nConsumers) <- topicCountMap) { val consumerSet = new mutable.HashSet[ConsumerThreadId] assert(nConsumers >= 1) for (i <- 0 until nConsumers) consumerSet += ConsumerThreadId(consumerIdString, i) consumerThreadIdsPerTopicMap.put(topic, consumerSet) } consumerThreadIdsPerTopicMap } def constructTopicCount(group: String, consumerId: String, zkUtils: ZkUtils, excludeInternalTopics: Boolean) : TopicCount = { val dirs = new ZKGroupDirs(group) val topicCountString = zkUtils.readData(dirs.consumerRegistryDir + "/" + consumerId)._1 var subscriptionPattern: String = null var topMap: Map[String, Int] = null try { Json.parseFull(topicCountString) match { case Some(m) => val consumerRegistrationMap = m.asInstanceOf[Map[String, Any]] consumerRegistrationMap.get("pattern") match { case Some(pattern) => subscriptionPattern = pattern.asInstanceOf[String] case None => throw new KafkaException("error constructing TopicCount : " + topicCountString) } consumerRegistrationMap.get("subscription") match { case Some(sub) => topMap = sub.asInstanceOf[Map[String, Int]] case None => throw new KafkaException("error constructing TopicCount : " + topicCountString) } case None => throw new KafkaException("error constructing TopicCount : " + topicCountString) } } catch { case e: Throwable => error("error parsing consumer json string " + topicCountString, e) throw e } val hasWhiteList = whiteListPattern.equals(subscriptionPattern) val hasBlackList = blackListPattern.equals(subscriptionPattern) if (topMap.isEmpty || !(hasWhiteList || hasBlackList)) { new StaticTopicCount(consumerId, topMap) } else { val regex = topMap.head._1 val numStreams = topMap.head._2 val filter = if (hasWhiteList) new Whitelist(regex) else new Blacklist(regex) new WildcardTopicCount(zkUtils, consumerId, filter, numStreams, excludeInternalTopics) } } def constructTopicCount(consumerIdString: String, topicCount: Map[String, Int]) = new StaticTopicCount(consumerIdString, topicCount) def constructTopicCount(consumerIdString: String, filter: TopicFilter, numStreams: Int, zkUtils: ZkUtils, excludeInternalTopics: Boolean) = new WildcardTopicCount(zkUtils, consumerIdString, filter, numStreams, excludeInternalTopics) } private[kafka] class StaticTopicCount(val consumerIdString: String, val topicCountMap: Map[String, Int]) extends TopicCount { def getConsumerThreadIdsPerTopic = TopicCount.makeConsumerThreadIdsPerTopic(consumerIdString, topicCountMap) override def equals(obj: Any): Boolean = { obj match { case null => false case n: StaticTopicCount => consumerIdString == n.consumerIdString && topicCountMap == n.topicCountMap case _ => false } } def getTopicCountMap = topicCountMap def pattern = TopicCount.staticPattern } private[kafka] class WildcardTopicCount(zkUtils: ZkUtils, consumerIdString: String, topicFilter: TopicFilter, numStreams: Int, excludeInternalTopics: Boolean) extends TopicCount { def getConsumerThreadIdsPerTopic = { val wildcardTopics = zkUtils.getChildrenParentMayNotExist(ZkUtils.BrokerTopicsPath) .filter(topic => topicFilter.isTopicAllowed(topic, excludeInternalTopics)) TopicCount.makeConsumerThreadIdsPerTopic(consumerIdString, Map(wildcardTopics.map((_, numStreams)): _*)) } def getTopicCountMap = Map(CoreUtils.JSONEscapeString(topicFilter.regex) -> numStreams) def pattern: String = { topicFilter match { case wl: Whitelist => TopicCount.whiteListPattern case bl: Blacklist => TopicCount.blackListPattern } } }
flange/drift-dev
kafka/00-kafka_2.11-0.10.1.0/libs/tmp/kafka/consumer/TopicCount.scala
Scala
apache-2.0
5,980
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ml.dmlc.mxnet.module import java.io.IOException import ml.dmlc.mxnet.optimizer.SGD import ml.dmlc.mxnet._ import org.slf4j.LoggerFactory import scala.collection.mutable.ArrayBuffer /** * The base class of a modules. A module represents a computation component. The design * purpose of a module is that it abstract a computation "machine", that one can run forward, * backward, update parameters, etc. We aim to make the APIs easy to use, especially in the * case when we need to use imperative API to work with multiple modules (e.g. stochastic * depth network). * * A module has several states: * * - Initial state. Memory is not allocated yet, not ready for computation yet. * - Binded. Shapes for inputs, outputs, and parameters are all known, memory allocated, * ready for computation. * - Parameter initialized. For modules with parameters, doing computation before initializing * the parameters might result in undefined outputs. * - Optimizer installed. An optimizer can be installed to a module. After this, the parameters * of the module can be updated according to the optimizer after gradients are computed * (forward-backward). * * In order for a module to interactive with others, a module should be able to report the * following information in its raw stage (before binded) * * - `data_names`: list of string indicating the names of required data. * - `output_names`: list of string indicating the names of required outputs. * * And also the following richer information after binded: * * - state information * - `binded`: `bool`, indicating whether the memory buffers needed for computation * has been allocated. * - `forTraining`: whether the module is binded for training (if binded). * - `paramsInitialized`: `bool`, indicating whether the parameters of this modules * has been initialized. * - `optimizerInitialized`: `bool`, indicating whether an optimizer is defined * and initialized. * - `inputsNeedGrad`: `bool`, indicating whether gradients with respect to the * input data is needed. Might be useful when implementing composition of modules. * * - input/output information * - `dataShapes`: a list of `(name, shape)`. In theory, since the memory is allocated, * we could directly provide the data arrays. But in the case of data parallelization, * the data arrays might not be of the same shape as viewed from the external world. * - `labelShapes`: a list of `(name, shape)`. This might be `[]` if the module does * not need labels (e.g. it does not contains a loss function at the top), or a module * is not binded for training. * - `outputShapes`: a list of `(name, shape)` for outputs of the module. * * - parameters (for modules with parameters) * - `getParams()`: return a tuple `(argParams, auxParams)`. Each of those * is a dictionary of name to `NDArray` mapping. Those `NDArray` always lives on * CPU. The actual parameters used for computing might live on other devices (GPUs), * this function will retrieve (a copy of) the latest parameters. Therefore, modifying * - `setParams(argParams, auxParams)`: assign parameters to the devices * doing the computation. * - `initParams(...)`: a more flexible interface to assign or initialize the parameters. * * - setup * - `bind()`: prepare environment for computation. * - `initOptimizer()`: install optimizer for parameter updating. * * - computation * - `forward(dataBatch)`: forward operation. * - `backward(outGrads=None)`: backward operation. * - `update()`: update parameters according to installed optimizer. * - `getOutputs()`: get outputs of the previous forward operation. * - `getInputGrads()`: get the gradients with respect to the inputs computed * in the previous backward operation. * - `updateMetric(metric, labels)`: update performance metric for the previous forward * computed results. * * - other properties (mostly for backward compatibility) * - `symbol`: the underlying symbolic graph for this module (if any) * This property is not necessarily constant. For example, for `BucketingModule`, * this property is simply the *current* symbol being used. For other modules, * this value might not be well defined. * * When those intermediate-level API are implemented properly, the following * high-level API will be automatically available for a module: * * - `fit`: train the module parameters on a data set * - `predict`: run prediction on a data set and collect outputs * - `score`: run prediction on a data set and evaluate performance */ abstract class BaseModule { private val logger = LoggerFactory.getLogger(classOf[BaseModule]) private[module] var binded: Boolean = false private[module] var forTraining: Boolean = false private[module] var inputsNeedGrad: Boolean = false private[module] var paramsInitialized: Boolean = false private[module] var optimizerInitialized: Boolean = false private[module] var symbol: Symbol = null private[module] var execGroup: DataParallelExecutorGroup = null private[module] var argParams: Map[String, NDArray] = null private[module] var auxParams: Map[String, NDArray] = null // High Level API // A convenient function that calls both `forward` and `backward`. def forwardBackward(dataBatch: DataBatch): Unit = { forward(dataBatch, isTrain = Option(true)) backward() } /** * Run prediction on `eval_data` and evaluate the performance according to `eval_metric`. * @param evalData : DataIter * @param evalMetric : EvalMetric * @param numBatch Number of batches to run. Default is `Integer.MAX_VALUE`, * indicating run until the `DataIter` finishes. * @param batchEndCallback Could also be a list of functions. * @param reset Default `True`, * indicating whether we should reset `eval_data` before starting evaluating. * @param epoch Default 0. For compatibility, this will be passed to callbacks (if any). * During training, this will correspond to the training epoch number. */ def score(evalData: DataIter, evalMetric: EvalMetric, numBatch: Int = Integer.MAX_VALUE, batchEndCallback: Option[BatchEndCallback] = None, scoreEndCallback: Option[BatchEndCallback] = None, reset: Boolean = true, epoch: Int = 0): EvalMetric = { require(evalData != null && evalMetric != null) require(binded && paramsInitialized) if (reset) { evalData.reset() } evalMetric.reset() var nBatch = 0 while (evalData.hasNext && nBatch < numBatch) { val evalBatch = evalData.next() forward(evalBatch, isTrain = Option(false)) updateMetric(evalMetric, evalBatch.label) batchEndCallback.foreach(callback => { callback.invoke(epoch, nBatch, evalMetric) }) nBatch += 1 } scoreEndCallback.foreach(callback => { callback.invoke(epoch, nBatch, evalMetric) }) evalMetric } /** * Run prediction and collect the outputs. * @param evalData * @param numBatch Default is -1, indicating running all the batches in the data iterator. * @param reset Default is `True`, indicating whether we should reset the data iter before start * doing prediction. * @return The return value will be a nested list like * `[[out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, ...]]` * This mode is useful because in some cases (e.g. bucketing), * the module does not necessarily produce the same number of outputs. */ def predictEveryBatch(evalData: DataIter, numBatch: Int = -1, reset: Boolean = true) : IndexedSeq[IndexedSeq[NDArray]] = { require(binded && paramsInitialized) if (reset) { evalData.reset() } val outputList = ArrayBuffer.empty[IndexedSeq[NDArray]] var nBatch = 0 while (evalData.hasNext && nBatch != numBatch) { val evalBatch = evalData.next() outputList.append(predict(evalBatch)) nBatch += 1 } outputList } def predict(batch: DataBatch): IndexedSeq[NDArray] = { require(binded && paramsInitialized) forward(batch, isTrain = Option(false)) val pad = batch.pad getOutputsMerged().map(out => out.slice(0, out.shape(0)-pad).copy() ) } /** * Run prediction and collect the outputs. * @param evalData * @param numBatch Default is -1, indicating running all the batches in the data iterator. * @param reset Default is `True`, indicating whether we should reset the data iter before start * doing prediction. * @return The return value will be a list `[out1, out2, out3]`. * Where each element is concatenation of the outputs for all the mini-batches. */ def predict(evalData: DataIter, numBatch: Int = -1, reset: Boolean = true) : IndexedSeq[NDArray] = { val outputBatches = predictEveryBatch(evalData, numBatch, reset) val numOutputs = outputBatches.head.size outputBatches.foreach(out => require(out.size == numOutputs, "Cannot merge batches, as num of outputs is not the same in mini-batches." + "Maybe bucketing is used?") ) outputBatches.map(out => NDArray.concatenate(out)) } // Symbol information // A list of names for data required by this module. def dataNames: IndexedSeq[String] // A list of names for the outputs of this module. def outputNames: IndexedSeq[String] // Input/Output information // A list of (name, shape) pairs specifying the data inputs to this module. def dataShapes: IndexedSeq[DataDesc] /** * A list of (name, shape) pairs specifying the label inputs to this module. * If this module does not accept labels -- either it is a module without loss * function, or it is not binded for training, then this should return an empty * list `[]`. */ def labelShapes: IndexedSeq[DataDesc] // A list of (name, shape) pairs specifying the outputs of this module. def outputShapes: IndexedSeq[(String, Shape)] // Parameters of a module /** * Get parameters, those are potentially copies of the the actual parameters used * to do computation on the device. * @return `(arg_params, aux_params)`, a pair of dictionary of name to value mapping. */ def getParams: (Map[String, NDArray], Map[String, NDArray]) /** * Initialize the parameters and auxiliary states. * @param initializer : Initializer * Called to initialize parameters if needed. * arg_params : dict * If not None, should be a dictionary of existing arg_params. Initialization * will be copied from that. * aux_params : dict * If not None, should be a dictionary of existing aux_params. Initialization * will be copied from that. * allow_missing : bool * If true, params could contain missing values, and the initializer will be * called to fill those missing params. * force_init : bool * If true, will force re-initialize even if already initialized. */ def initParams(initializer: Initializer = new Uniform(0.01f), argParams: Map[String, NDArray] = null, auxParams: Map[String, NDArray] = null, allowMissing: Boolean = false, forceInit: Boolean = false): Unit /** * Assign parameter and aux state values. * arg_params : dict * Dictionary of name to value (`NDArray`) mapping. * aux_params : dict * Dictionary of name to value (`NDArray`) mapping. * allow_missing : bool * If true, params could contain missing values, and the initializer will be * called to fill those missing params. * force_init : bool * If true, will force re-initialize even if already initialized. */ def setParams(argParams: Map[String, NDArray], auxParams: Map[String, NDArray], allowMissing: Boolean = false, forceInit: Boolean = true): Unit = { initParams(initializer = null, argParams = argParams, auxParams = auxParams, allowMissing = allowMissing, forceInit = forceInit) } /** * Save model parameters to file. * @param fname Path to output param file. * */ def saveParams(fname: String): Unit = { val (argParams, auxParams) = getParams val saveDict = ( argParams.map { case (k, v) => (s"arg:$k", v.asInContext(Context.cpu())) } ++ auxParams.map { case (k, v) => (s"aux:$k", v.asInContext(Context.cpu())) } ) NDArray.save(fname, saveDict) } /** * Load model parameters from file. * @param fname Path to input param file. * @throws IOException if param file is invalid */ @throws(classOf[IOException]) def loadParams(fname: String): Unit = { val saveDict = NDArray.load(fname) val argParams = scala.collection.mutable.HashMap.empty[String, NDArray] val auxParams = scala.collection.mutable.HashMap.empty[String, NDArray] (saveDict._1 zip saveDict._2) foreach { case (key, value) => key.split(":", 2) match { case Array(argType, name) if argType == "arg" => argParams.put(name, value) case Array(argType, name) if argType == "aux" => auxParams.put(name, value) case _ => throw new IOException("Invalid param file " + fname) } } setParams(argParams.toMap, auxParams.toMap) } /** * * Train the module parameters. * @param trainData * @param evalData If not `None`, will be used as validation set and evaluate * the performance after each epoch. * @param numEpoch Number of epochs to run training. * @param fitParams Extra parameters for training. */ def fit(trainData: DataIter, evalData: Option[DataIter] = None, numEpoch: Int = 1, fitParams: FitParams = new FitParams): Unit = { require(fitParams != null) require(numEpoch > 0, "please specify number of epochs") import ml.dmlc.mxnet.DataDesc._ bind(dataShapes = trainData.provideData, labelShapes = Option(trainData.provideLabel), forTraining = true, forceRebind = fitParams.forceRebind) fitParams.monitor.foreach(installMonitor) initParams(fitParams.initializer, argParams, auxParams, fitParams.allowMissing, fitParams.forceInit) initOptimizer(fitParams.kvstore, fitParams.optimizer) val valMetric = fitParams.validationMetric.getOrElse(fitParams.evalMetric) // training loop for (epoch <- fitParams.beginEpoch until numEpoch) { val tic = System.currentTimeMillis fitParams.evalMetric.reset() var nBatch = 0 while (trainData.hasNext) { val dataBatch = trainData.next() fitParams.monitor.foreach(_.tic()) forwardBackward(dataBatch) update() updateMetric(fitParams.evalMetric, dataBatch.label) fitParams.monitor.foreach(_.tocPrint()) fitParams.batchEndCallback.foreach(callback => callback.invoke(epoch, nBatch, fitParams.evalMetric) ) nBatch += 1 } // one epoch of training is finished val (name, value) = fitParams.evalMetric.get logger.info(s"Epoch[$epoch] Train-$name=$value") val toc = System.currentTimeMillis logger.info(s"Epoch[$epoch] Time cost=${toc - tic}") // sync aux params across devices val (argParamsSync, auxParamsSync) = getParams setParams(argParamsSync, auxParamsSync) fitParams.epochEndCallback.foreach(callback => callback.invoke(epoch, symbol, argParamsSync, auxParamsSync) ) // evaluation on validation set evalData.foreach(data => { val res = score(data, valMetric, scoreEndCallback = fitParams.evalEndCallback, batchEndCallback = fitParams.evalBatchEndCallback, epoch = epoch) val (name, value) = res.get logger.info(s"Epoch[$epoch] Validation-$name=$value") }) // end of 1 epoch, reset the data-iter for another epoch trainData.reset() } } // Install monitor on all executors def installMonitor(monitor: Monitor): Unit // Computations /** * Forward computation. * @param dataBatch Could be anything with similar API implemented. * @param isTrain Default is `None`, which means `isTrain` takes the value of `this.forTraining`. */ def forward(dataBatch: DataBatch, isTrain: Option[Boolean] = None): Unit /** * Backward computation. * @param outGrads Gradient on the outputs to be propagated back. * This parameter is only needed when bind is called * on outputs that are not a loss function. */ def backward(outGrads: Array[NDArray] = null): Unit /** * Get outputs of the previous forward computation. * @return In the case when data-parallelism is used, * the outputs will be merged from multiple devices, * as they look like from a single executor. * The results will look like `[out1, out2]` */ def getOutputsMerged(): IndexedSeq[NDArray] /** * Get outputs of the previous forward computation. * @return In the case when data-parallelism is used, * the outputs will be collected from multiple devices. * The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`, * those `NDArray` might live on different devices. */ def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] /** * Get the gradients to the inputs, computed in the previous backward computation. * @return In the case when data-parallelism is used, * the grads will be merged from multiple devices, * as they look like from a single executor. * The results will look like `[grad1, grad2]` */ def getInputGradsMerged(): IndexedSeq[NDArray] /** * Get the gradients to the inputs, computed in the previous backward computation. * @return In the case when data-parallelism is used, * the grads will be collected from multiple devices. * The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`, * those `NDArray` might live on different devices. */ def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] // Update parameters according to the installed optimizer and the gradients computed // in the previous forward-backward batch. def update(): Unit /** * Evaluate and accumulate evaluation metric on outputs of the last forward computation. * @param evalMetric * @param labels Typically `DataBatch.label`. */ def updateMetric(evalMetric: EvalMetric, labels: IndexedSeq[NDArray]): Unit // module setup /** * Bind the symbols to construct executors. * This is necessary before one can perform computation with the module. * @param dataShapes Typically is `DataIter.provideData`. * @param labelShapes Typically is `DataIter.provideLabel`. * @param forTraining Default is `True`. Whether the executors should be bind for training. * @param inputsNeedGrad Default is `False`. * Whether the gradients to the input data need to be computed. * Typically this is not needed. * But this might be needed when implementing composition of modules. * @param forceRebind Default is `False`. This function does nothing * if the executors are already binded. But with this `True`, * the executors will be forced to rebind. * @param sharedModule Default is `None`. This is used in bucketing. When not `None`, * the shared module essentially corresponds to a different bucket * -- a module with different symbol but with the same sets of parameters * (e.g. unrolled RNNs with different lengths). * @param gradReq Requirement for gradient accumulation (globally). * Can be 'write', 'add', or 'null' (default to 'write'). */ def bind(dataShapes: IndexedSeq[DataDesc], labelShapes: Option[IndexedSeq[DataDesc]] = None, forTraining: Boolean = true, inputsNeedGrad: Boolean = false, forceRebind: Boolean = false, sharedModule: Option[BaseModule] = None, gradReq: String = "write"): Unit // Install and initialize optimizers. def initOptimizer(kvstore: String = "local", optimizer: Optimizer = new SGD(), resetOptimizer: Boolean = true, forceInit: Boolean = false): Unit } class FitParams { private[module] var evalMetric: EvalMetric = new Accuracy() private[module] var epochEndCallback: Option[EpochEndCallback] = None private[module] var batchEndCallback: Option[BatchEndCallback] = None private[module] var kvstore: String = "local" private[module] var optimizer: Optimizer = new SGD() private[module] var evalEndCallback: Option[BatchEndCallback] = None private[module] var evalBatchEndCallback: Option[BatchEndCallback] = None private[module] var initializer: Initializer = new Uniform(0.01f) private[module] var argParams: Map[String, NDArray] = null private[module] var auxParams: Map[String, NDArray] = null private[module] var allowMissing: Boolean = false private[module] var forceRebind: Boolean = false private[module] var forceInit: Boolean = false private[module] var beginEpoch: Int = 0 private[module] var validationMetric: Option[EvalMetric] = None private[module] var monitor: Option[Monitor] = None // The performance measure used to display during training. def setEvalMetric(evalMetric: EvalMetric): FitParams = { require(evalMetric != null) this.evalMetric = evalMetric this } // Each callback will be called with the current // `epoch`, `symbol`, `arg_params` and `aux_params`. def setEpochEndCallback(epochEndCallback: EpochEndCallback): FitParams = { this.epochEndCallback = Option(epochEndCallback) this } // Each callback will be called with a `BatchEndParam`. def setBatchEndCallback(batchEndCallback: BatchEndCallback): FitParams = { this.batchEndCallback = Option(batchEndCallback) this } def setKVStore(kvStore: String): FitParams = { require(kvStore != null) this.kvstore = kvstore this } def setOptimizer(optimizer: Optimizer): FitParams = { require(optimizer != null) this.optimizer = optimizer this } // These will be called at the end of each full evaluation, // with the metrics over the entire evaluation set. def setEvalEndCallback(evalEndCallback: BatchEndCallback): FitParams = { this.evalEndCallback = Option(evalEndCallback) this } // These will be called at the end of each minibatch during evaluation. def setEvalBatchEndCallback(evalBatchEndCallback: BatchEndCallback): FitParams = { this.evalBatchEndCallback = Option(evalBatchEndCallback) this } // Will be called to initialize the module parameters if not already initialized. def setInitializer(initializer: Initializer): FitParams = { require(initializer != null) this.initializer = initializer this } // Default `None`, if not `None`, should be existing parameters from a trained // model or loaded from a checkpoint (previously saved model). In this case, // the value here will be used to initialize the module parameters, // unless they are already initialized by the user // via a call to `init_params` or `fit`. // `argParams` has higher priority to `initializer`. def setArgParams(argParams: Map[String, NDArray]): FitParams = { this.argParams = argParams this } // Default `None`. Similar to `argParams`, except for auxiliary states. def setAuxParams(auxParams: Map[String, NDArray]): FitParams = { this.auxParams = auxParams this } // Default `False`. Indicate whether we allow missing parameters // when `arg_params` and `aux_params` are not `None`. // If this is `True`, then the missing parameters will be // initialized via the `initializer`. def setAllowMissing(allowMissing: Boolean): FitParams = { this.allowMissing = allowMissing this } // Default `False`. Whether to force rebinding the executors if already binded. def setForceRebind(forceRebind: Boolean): FitParams = { this.forceRebind = forceRebind this } // Default `False`. Indicate whether we should force initialization even if the // parameters are already initialized. def setForceInit(forceInit: Boolean): FitParams = { this.forceInit = forceInit this } // Default `0`. Indicate the starting epoch. Usually, if we are resuming from a // checkpoint saved at a previous training phase at epoch N, // then we should specify this value as N+1. def setBeginEpoch(beginEpoch: Int): FitParams = { require(beginEpoch >= 0) this.beginEpoch = beginEpoch this } def setValidationMetric(metric: EvalMetric): FitParams = { this.validationMetric = Option(metric) this } def setMonitor(monitor: Monitor): FitParams = { this.monitor = Option(monitor) this } }
likelyzhao/mxnet
scala-package/core/src/main/scala/ml/dmlc/mxnet/module/BaseModule.scala
Scala
apache-2.0
26,054
/** * This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]]. */ // DO NOT EDIT MANUALLY package sbt final class CommandSource private ( val channelName: String) extends Serializable { override def equals(o: Any): Boolean = o match { case x: CommandSource => (this.channelName == x.channelName) case _ => false } override def hashCode: Int = { 37 * (37 * (17 + "CommandSource".##) + channelName.##) } override def toString: String = { "CommandSource(" + channelName + ")" } protected[this] def copy(channelName: String = channelName): CommandSource = { new CommandSource(channelName) } def withChannelName(channelName: String): CommandSource = { copy(channelName = channelName) } } object CommandSource { def apply(channelName: String): CommandSource = new CommandSource(channelName) }
Duhemm/sbt
main-command/src/main/contraband-scala/sbt/CommandSource.scala
Scala
bsd-3-clause
879
package diameter.Coder import diameter.ValueContainers.OctetString /** * Created by edzmbuh on 19/12/2016. */ trait AvpValueOctetString extends AvpValue{ def value:OctetString def dataRaw = value.dataRaw override def toString() = value.toString() }
dbuhryk/DiameterCoder
Diameter/src/main/scala/diameter/Coder/AvpValueOctetString.scala
Scala
mit
261
/*********************************************************************** * Copyright (c) 2013-2016 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.kafka09 import org.joda.time.{Duration, Instant} import org.junit.runner.RunWith import org.locationtech.geomesa.kafka.{ReplayConfig, GeoMessage} import org.specs2.mock.Mockito import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class ReplayConfigTest extends Specification with Mockito { "ReplayConfig" should { "throw an exception if start is after end" >> { val start = new Instant(19) val end = new Instant(15) val readBehind = Duration.millis(2) new ReplayConfig(start, end, readBehind) must throwA[IllegalArgumentException] } "allow start to equal end" >> { val start = new Instant(15) val end = new Instant(15) val readBehind = Duration.millis(2) val result = new ReplayConfig(start, end, readBehind) result.start mustEqual start result.end mustEqual end result.readBehind mustEqual readBehind } "calculate real start correctly" >> { val start = new Instant(10) val end = new Instant(20) val readBehind = Duration.millis(2) val result = new ReplayConfig(start, end, readBehind) result.realStartTime mustEqual new Instant(8) } "determine if before real start correctly" >> { val start = new Instant(10) val end = new Instant(20) val readBehind = Duration.millis(2) val result = new ReplayConfig(start, end, readBehind) result.isBeforeRealStart(mockMessage(7)) must beTrue result.isBeforeRealStart(mockMessage(8)) must beFalse result.isBeforeRealStart(mockMessage(9)) must beFalse result.isBeforeRealStart(mockMessage(10)) must beFalse } "determine if not after end correctly" >> { val start = new Instant(10) val end = new Instant(20) val readBehind = Duration.millis(2) val result = new ReplayConfig(start, end, readBehind) result.isNotAfterEnd(mockMessage(19)) must beTrue result.isNotAfterEnd(mockMessage(20)) must beTrue result.isNotAfterEnd(mockMessage(21)) must beFalse } "determine if in windown correctly" >> { val start = new Instant(10) val end = new Instant(20) val readBehind = Duration.millis(2) val result = new ReplayConfig(start, end, readBehind) // before read behind result.isInWindow(7) must beFalse // read behind result.isInWindow(8) must beFalse result.isInWindow(9) must beFalse // window (10 to 20).forall {i => result.isInWindow(i) must beTrue } // after result.isInWindow(21) must beFalse result.isInWindow(22) must beFalse } "encode should encode correctly" >> { val start = new Instant(1234) val end = new Instant(4321) val readBehind = Duration.millis(1024) val rc = new ReplayConfig(start, end, readBehind) ReplayConfig.encode(rc) mustEqual s"4d2-10e1-400" } "decode should decode correctly" >> { val start = new Instant(1234) val end = new Instant(4321) val readBehind = Duration.millis(1024) val rc = new ReplayConfig(start, end, readBehind) val encoded = ReplayConfig.encode(rc) ReplayConfig.decode(encoded) must beSome(rc) } } def mockMessage(i: Long): GeoMessage = { val msg = mock[GeoMessage] msg.timestamp returns new Instant(i) msg } }
tkunicki/geomesa
geomesa-kafka/geomesa-kafka-datastore/geomesa-kafka-09-datastore/src/test/scala/org/locationtech/geomesa/kafka09/ReplayConfigTest.scala
Scala
apache-2.0
3,918
package latis.ops import scala.Option.option2Iterable import scala.reflect.runtime.currentMirror import latis.dm.Dataset import latis.dm.Function import latis.dm.Sample import latis.dm.Scalar import latis.dm.Tuple import latis.dm.Variable import latis.util.LatisProperties import latis.util.iterator.MappingIterator import latis.dm.Naught /** * Experimental alternative to Operation. * - maps over iterator instead of using MappingIterator * - applies to both domain and codomain of Functions * - uses Naught for domain and codomain types to see if they are really needed * TODO: CoDomainOperation, e.g. math, like Spark mapValues * can benefit knowing that domain won't change */ abstract class Operation2 { /* * UnaryOperation, BinaryOperation * DomainOperation, CoDomainOperation * enforce by making some methods final? * Filter: can only drop samples, trait? * * or focus on Operations as functions of V -> V with a map method on dataset * but how to override parts without subclass polymorphisp * use partial functions? */ /** * Apply this Operation to the given Dataset. */ def apply(dataset: Dataset): Dataset = dataset match { case Dataset(variable) => { val md = dataset.getMetadata //TODO: delegate to subclass to munge metadata //TODO: add provenance metadata, getProvMsg, append to "history" applyToVariable(variable) match { case Some(v) => Dataset(v, md) case None => Dataset.empty //TODO: preserve type metadata? } } case _ => dataset //dataset is empty } /** * Apply Operation to a Variable. */ def applyToVariable(variable: Variable): Option[Variable] = variable match { case scalar: Scalar => applyToScalar(scalar) case sample: Sample => applyToSample(sample) case tuple: Tuple => applyToTuple(tuple) case function: Function => applyToFunction(function) } /** * Default no-op operation for Scalars. */ def applyToScalar(scalar: Scalar): Option[Variable] = Some(scalar) /** * Default operation for Samples. Apply operation domain and codomain. */ def applyToSample(sample: Sample): Option[Sample] = sample match { case Sample(domain, codomain) => { for (d <- applyToVariable(domain); c <- applyToVariable(codomain)) yield Sample(d,c) } } /** * Default operation for Tuples. Apply operation to each element. * If all elements are invalid, then the Tuple is invalid. */ def applyToTuple(tuple: Tuple): Option[Variable] = { //TODO: should we allow an empty Tuple? probably not // should we reduce a Tuple of one to the thing? consider that it is a Product type val vars = tuple.getVariables.flatMap(applyToVariable(_)) if (vars.length == 0) None else Some(Tuple(vars, tuple.getMetadata)) } /** * Default operation for a Function. Map operation over each sample. */ def applyToFunction(function: Function): Option[Variable] = { //TODO: preserve memoization /* * TODO: avoid having to read data to get Function type * construct Function with function that provides domain and range? * Do we really need the types? see how far we get with Naught */ val samples = function.iterator.flatMap(applyToSample(_)) Some(Function(Naught(), Naught(), samples, function.getMetadata)) } }
dlindhol/LaTiS
src/main/scala/latis/ops/Operation2.scala
Scala
epl-1.0
3,390
/** * This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]]. */ // DO NOT EDIT MANUALLY package sbt.internal.bsp.codec import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError } trait InitializeBuildParamsFormats { self: sbt.internal.bsp.codec.BuildClientCapabilitiesFormats with sbt.internal.util.codec.JValueFormats with sjsonnew.BasicJsonProtocol => implicit lazy val InitializeBuildParamsFormat: JsonFormat[sbt.internal.bsp.InitializeBuildParams] = new JsonFormat[sbt.internal.bsp.InitializeBuildParams] { override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.bsp.InitializeBuildParams = { __jsOpt match { case Some(__js) => unbuilder.beginObject(__js) val displayName = unbuilder.readField[String]("displayName") val version = unbuilder.readField[String]("version") val bspVersion = unbuilder.readField[String]("bspVersion") val rootUri = unbuilder.readField[java.net.URI]("rootUri") val capabilities = unbuilder.readField[sbt.internal.bsp.BuildClientCapabilities]("capabilities") val data = unbuilder.readField[Option[sjsonnew.shaded.scalajson.ast.unsafe.JValue]]("data") unbuilder.endObject() sbt.internal.bsp.InitializeBuildParams(displayName, version, bspVersion, rootUri, capabilities, data) case None => deserializationError("Expected JsObject but found None") } } override def write[J](obj: sbt.internal.bsp.InitializeBuildParams, builder: Builder[J]): Unit = { builder.beginObject() builder.addField("displayName", obj.displayName) builder.addField("version", obj.version) builder.addField("bspVersion", obj.bspVersion) builder.addField("rootUri", obj.rootUri) builder.addField("capabilities", obj.capabilities) builder.addField("data", obj.data) builder.endObject() } } }
sbt/sbt
protocol/src/main/contraband-scala/sbt/internal/bsp/codec/InitializeBuildParamsFormats.scala
Scala
apache-2.0
1,888
// class definitions trait LorentzVector extends Serializable { // abstract members; must be defined by subclasses def px: Double def py: Double def pz: Double def E: Double // methods common to all LorentzVectors def pt = Math.sqrt(px*px + py*py) def p = Math.sqrt(px*px + py*py + pz*pz) def mass = Math.sqrt(E*E - px*px - py*py - pz*pz) def eta = 0.5*Math.log((p + pz)/(p - pz)) def phi = Math.atan2(py, px) // addition operator is a method named "+" def +(two: LorentzVector) = { val one = this // create a subclass and an instance in one block new LorentzVector { def px = one.px + two.px def py = one.py + two.py def pz = one.pz + two.pz def E = one.E + two.E override def toString() = s"LorentzVector($px, $py, $pz, $E)" } } } // particle class definitions are now one-liners case class Jet(px: Double, py: Double, pz: Double, E: Double, btag: Double) extends LorentzVector case class Muon(px: Double, py: Double, pz: Double, E: Double, q: Int, iso: Double) extends LorentzVector case class Electron(px: Double, py: Double, pz: Double, E: Double, q: Int, iso: Double) extends LorentzVector case class Photon(px: Double, py: Double, pz: Double, E: Double, iso: Double) extends LorentzVector case class MET(px: Double, py: Double) { def pt = Math.sqrt(px*px + py*py) } case class Event(jets: Seq[Jet], muons: Seq[Muon], electrons: Seq[Electron], photons: Seq[Photon], met: MET, numPrimaryVertices: Long) // event data iterator case class EventIterator(location: String = "http://histogrammar.org/docs/data/triggerIsoMu24_50fb-1.json.gz") extends Iterator[Event] { import org.dianahep.histogrammar.json._ // use Java libraries to stream and decompress data on-the-fly @transient val scanner = new java.util.Scanner( new java.util.zip.GZIPInputStream( new java.net.URL(location).openStream)) // read one ahead so that hasNext can effectively "peek" private def getNext() = try { Json.parse(scanner.nextLine) collect { case event: JsonObject => eventFromJson(event) } } catch { case err: java.util.NoSuchElementException => None } private var theNext = getNext() // iterator interface def hasNext = !theNext.isEmpty def next() = { val out = theNext.get theNext = getNext() out } def jetFromJson(params: Map[String, JsonNumber]) = new Jet(params("px").toDouble, params("py").toDouble, params("pz").toDouble, params("E").toDouble, params("btag").toDouble) def muonFromJson(params: Map[String, JsonNumber]) = new Muon(params("px").toDouble, params("py").toDouble, params("pz").toDouble, params("E").toDouble, params("q").toInt, params("iso").toDouble) def electronFromJson(params: Map[String, JsonNumber]) = new Electron(params("px").toDouble, params("py").toDouble, params("pz").toDouble, params("E").toDouble, params("q").toInt, params("iso").toDouble) def photonFromJson(params: Map[String, JsonNumber]) = new Photon(params("px").toDouble, params("py").toDouble, params("pz").toDouble, params("E").toDouble, params("iso").toDouble) def metFromJson(params: Map[String, JsonNumber]): MET = new MET(params("px").toDouble, params("py").toDouble) def eventFromJson(params: JsonObject) = { val JsonArray(jets @ _*) = params("jets") val JsonArray(muons @ _*) = params("muons") val JsonArray(electrons @ _*) = params("electrons") val JsonArray(photons @ _*) = params("photons") val met = params("MET").asInstanceOf[JsonObject] val JsonInt(numPrimaryVertices) = params("numPrimaryVertices") new Event( jets collect {case j: JsonObject => jetFromJson(j.to[JsonNumber].toMap)}, muons collect {case j: JsonObject => muonFromJson(j.to[JsonNumber].toMap)}, electrons collect {case j: JsonObject => electronFromJson(j.to[JsonNumber].toMap)}, photons collect {case j: JsonObject => photonFromJson(j.to[JsonNumber].toMap)}, metFromJson(met.to[JsonNumber].toMap), numPrimaryVertices) } } val events = EventIterator()
histogrammar/histogrammar-docs
data/scala-cmsdata.scala
Scala
apache-2.0
4,338
package com.landoop.streamreactor.hive.it import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Span} import org.scalatest.wordspec.AnyWordSpec import java.time.Duration import java.util.concurrent.TimeUnit import scala.io.Source class HiveParquetTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests { private implicit val patience: PatienceConfig = PatienceConfig(Span(30000, Millis), Span(2000, Millis)) "Hive" should { "write records" in { val count = 10000L val topic = createTopic() val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\\n") .replace("{{TOPIC}}", topic) .replace("{{TABLE}}", topic) .replace("{{NAME}}", topic) postTask(taskDef) val producer = stringStringProducer() writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count) producer.close(Duration.ofMillis(TimeUnit.SECONDS.toMillis(30))) // we now should have 1000 records in hive which we can test via jdbc eventually { withConn { conn => val stmt = conn.createStatement val rs = stmt.executeQuery(s"select count(*) from $topic") rs.next() rs.getLong(1) shouldBe count } } stopTask(topic) } } }
datamountaineer/stream-reactor
kafka-connect-hive/it/src/test/scala/com/landoop/streamreactor/hive/it/HiveParquetTest.scala
Scala
apache-2.0
1,449
package test.roundeights.vfunk.filter import org.specs2.mutable._ import scala.collection.immutable.HashSet import com.roundeights.vfunk.filter._ class EncodingTests extends Specification { "A Hex filter" should { val filter = new Hex "Leave a string with only hex characters unchanged" in { val data = "0123456789abcdefABCDEF" filter.filter(data) must_== data } "Strip hex characters from a string" in { val data = FilterHelper.build( 0 to 47, 58 to 64, 71 to 96, 103 to 255 ) filter.filter(data) must_== "" } } }
Nycto/vFunk
src/test/scala/vfunk/filter/EncodingTests.scala
Scala
mit
647
package com.pygmalios.reactiveinflux import com.pygmalios.reactiveinflux.Point.{FieldKey, Measurement, TagKey, TagValue} import com.pygmalios.reactiveinflux.impl.{EscapedString, EscapedStringWithEquals} /** * Common attributes of every point without a timestamp which will be assigned by Influx server. */ trait PointNoTime extends Serializable { def measurement: Measurement def tags: Map[TagKey, TagValue] def fields: Map[FieldKey, FieldValue] } /** * Point with time. */ trait Point extends PointNoTime { /** * Time with nanosecond precision. */ def time: PointTime } object Point { type Measurement = EscapedString type TagKey = EscapedStringWithEquals type TagValue = EscapedStringWithEquals type FieldKey = EscapedStringWithEquals def apply(measurement: Measurement, tags: Map[TagKey, TagValue], fields: Map[FieldKey, FieldValue]): PointNoTime = SimplePointNoTime(measurement, tags, fields) def apply(time: PointTime, measurement: Measurement, tags: Map[TagKey, TagValue], fields: Map[FieldKey, FieldValue]): Point = SimplePoint(time, measurement, tags, fields) } /** * Supported field value types. */ sealed trait FieldValue extends Serializable case class StringFieldValue(value: String) extends FieldValue case class BigDecimalFieldValue(value: BigDecimal) extends FieldValue case class LongFieldValue(value: Long) extends FieldValue case class BooleanFieldValue(value: Boolean) extends FieldValue private[reactiveinflux] case class SimplePointNoTime(measurement: Measurement, tags: Map[TagKey, TagValue], fields: Map[FieldKey, FieldValue]) extends PointNoTime private[reactiveinflux] case class SimplePoint(time: PointTime, measurement: Measurement, tags: Map[TagKey, TagValue], fields: Map[FieldKey, FieldValue]) extends Point
pygmalios/reactiveinflux
src/main/scala/com/pygmalios/reactiveinflux/Point.scala
Scala
apache-2.0
2,043
package eventstore package akka class ScavengeITest extends TestConnection { sequential "scavenge" should { "scavenge database and fail if in progress" in new TestConnectionScope { actor ! ScavengeDatabase expectMsgType[ScavengeDatabaseResponse] actor ! ScavengeDatabase expectEsException() must throwA(ScavengeInProgressException) } } }
EventStore/EventStore.JVM
client/src/test/scala/eventstore/akka/ScavengeITest.scala
Scala
bsd-3-clause
380
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.internal.cypher.acceptance import org.neo4j.cypher.ExecutionEngineFunSuite class NullAcceptanceTest extends ExecutionEngineFunSuite { val anyNull: AnyRef = null.asInstanceOf[AnyRef] test("null nodes should be silently ignored") { // Given empty database // When val result = execute("optional match (a:DoesNotExist) set a.prop = 42 return a") // Then doesn't throw result.toList } val expressions = Seq( "round(null)", "floor(null)", "ceil(null)", "abs(null)", "acos(null)", "asin(null)", "atan(null)", "cos(null)", "cot(null)", "exp(null)", "log(null)", "log10(null)", "sin(null)", "tan(null)", "haversin(null)", "sqrt(null)", "sign(null)", "radians(null)", "atan2(null, 0.3)", "atan2(0.3, null)", "null in [1,2,3]", "2 in null", "null in null", "ANY(x in NULL WHERE x = 42)" ) expressions.foreach { expression => test(expression) { executeScalar[Any]("RETURN " + expression) should equal(anyNull) } } }
HuangLS/neo4j
community/cypher/acceptance/src/test/scala/org/neo4j/internal/cypher/acceptance/NullAcceptanceTest.scala
Scala
apache-2.0
1,871
package com.typesafe.slick.docs import java.sql.Blob import org.reactivestreams.Publisher import scala.collection.mutable.ArrayBuffer import scala.concurrent.{Future, Await} import scala.concurrent.duration.Duration import scala.concurrent.ExecutionContext.Implicits.global import scala.language.higherKinds import slick.backend.DatabasePublisher import slick.driver.H2Driver.api._ object Connection extends App { class Coffees(tag: Tag) extends Table[(String, Blob)](tag, "COFFEES") { def name = column[String]("COF_NAME", O.PrimaryKey) def image = column[Blob]("IMAGE") def * = (name, image) } val coffees = TableQuery[Coffees] if (false){ val dataSource = null.asInstanceOf[javax.sql.DataSource] //#forDataSource val db = Database.forDataSource(dataSource: javax.sql.DataSource) //#forDataSource } if(false) { val jndiName = "" //#forName val db = Database.forName(jndiName: String) //#forName } ;{ //#forConfig val db = Database.forConfig("mydb") //#forConfig db.close } ;{ //#forURL val db = Database.forURL("jdbc:h2:mem:test1;DB_CLOSE_DELAY=-1", driver="org.h2.Driver") //#forURL db.close } ;{ //#forURL2 val db = Database.forURL("jdbc:h2:mem:test1;DB_CLOSE_DELAY=-1", driver="org.h2.Driver", executor = AsyncExecutor("test1", numThreads=10, queueSize=1000)) //#forURL2 db.close } val db = Database.forURL("jdbc:h2:mem:test2;INIT="+coffees.schema.createStatements.mkString("\\\\;"), driver="org.h2.Driver") try { val lines = new ArrayBuffer[Any]() def println(s: Any) = lines += s ;{ //#materialize val q = for (c <- coffees) yield c.name val a = q.result val f: Future[Seq[String]] = db.run(a) f.onSuccess { case s => println(s"Result: $s") } //#materialize Await.result(f, Duration.Inf) };{ //#stream val q = for (c <- coffees) yield c.name val a = q.result val p: DatabasePublisher[String] = db.stream(a) // .foreach is a convenience method on DatabasePublisher. // Use Akka Streams for more elaborate stream processing. //#stream val f = //#stream p.foreach { s => println(s"Element: $s") } //#stream Await.result(f, Duration.Inf) };{ //#streamblob val q = for (c <- coffees) yield c.image val a = q.result val p1: DatabasePublisher[Blob] = db.stream(a) val p2: DatabasePublisher[Array[Byte]] = p1.mapResult { b => b.getBytes(0, b.length().toInt) } //#streamblob };{ //#transaction val a = (for { ns <- coffees.filter(_.name.startsWith("ESPRESSO")).map(_.name).result _ <- DBIO.seq(ns.map(n => coffees.filter(_.name === n).delete): _*) } yield ()).transactionally val f: Future[Unit] = db.run(a) //#transaction Await.result(f, Duration.Inf) } lines.foreach(Predef.println _) } finally db.close //#simpleaction val getAutoCommit = SimpleDBIO[Boolean](_.connection.getAutoCommit) //#simpleaction }
adamkozuch/slick
slick/src/sphinx/code/Connection.scala
Scala
bsd-2-clause
3,096
package im.actor.server.dialog.group import akka.actor.Status import akka.pattern.pipe import com.google.protobuf.ByteString import im.actor.api.rpc.messaging.{ ApiMessage, UpdateMessageRead, UpdateMessageReadByMe, UpdateMessageReceived } import im.actor.server.dialog.{ AuthIdRandomId, GroupDialogCommands, ReadFailed, ReceiveFailed } import im.actor.server.group.GroupErrors.NotAMember import im.actor.server.group.GroupOffice import im.actor.server.history.HistoryUtils import im.actor.server.misc.UpdateCounters import im.actor.server.models import im.actor.server.sequence.SeqUpdatesManager._ import im.actor.server.sequence.{ SeqState, SeqStateDate } import im.actor.server.user.UserOffice import HistoryUtils._ import im.actor.util.cache.CacheHelpers._ import org.joda.time.DateTime import scala.concurrent.Future trait GroupDialogHandlers extends UpdateCounters { this: GroupDialog ⇒ import GroupDialogCommands._ import GroupDialogEvents._ protected def sendMessage( state: GroupDialogState, senderUserId: Int, senderAuthId: Long, randomId: Long, message: ApiMessage, isFat: Boolean ): Unit = { deferStashingReply(LastSenderIdChanged(senderUserId), state) { e ⇒ withMemberIds(groupId) { (memberIds, _, optBot) ⇒ if ((memberIds contains senderUserId) || optBot.contains(senderUserId)) { withCachedFuture[AuthIdRandomId, SeqStateDate](senderAuthId → randomId) { val date = new DateTime for { _ ← Future.sequence(memberIds.filterNot(_ == senderUserId) map { userId ⇒ for { _ ← UserOffice.deliverMessage(userId, groupPeer, senderUserId, randomId, date, message, isFat) counterUpdate ← db.run(getUpdateCountersChanged(userId)) _ ← UserOffice.broadcastUserUpdate(userId, counterUpdate, None, isFat = false, deliveryId = Some(s"counter_${randomId}")) } yield () }) SeqState(seq, state) ← if (optBot.contains(senderUserId)) { Future.successful(SeqState(0, ByteString.EMPTY)) } else { UserOffice.deliverOwnMessage(senderUserId, groupPeer, senderAuthId, randomId, date, message, isFat) } _ ← db.run(writeHistoryMessage(models.Peer.privat(senderUserId), models.Peer.group(groupPeer.id), date, randomId, message.header, message.toByteArray)) } yield SeqStateDate(seq, state, date.getMillis) } recover { case e ⇒ log.error(e, "Failed to send message") throw e } } else Future.successful(Status.Failure(NotAMember)) } } } protected def messageReceived(state: GroupDialogState, receiverUserId: Int, date: Long): Unit = { val replyTo = sender() (if (!state.lastReceiveDate.exists(_ >= date) && !state.lastSenderId.contains(receiverUserId)) { context become working(updatedState(LastReceiveDateChanged(date), state)) withMemberIds(groupId) { (memberIds, _, _) ⇒ val now = System.currentTimeMillis val update = UpdateMessageReceived(groupPeer, date, now) val authIdsF = Future.sequence(memberIds.filterNot(_ == receiverUserId) map UserOffice.getAuthIds) map (_.flatten.toSet) for { _ ← db.run(markMessagesReceived(models.Peer.privat(receiverUserId), models.Peer.group(groupId), new DateTime(date))) authIds ← authIdsF _ ← db.run(persistAndPushUpdates(authIds.toSet, update, None, isFat = false)) } yield MessageReceivedAck() } } else Future.successful(MessageReceivedAck())) pipeTo replyTo onFailure { case e ⇒ replyTo ! Status.Failure(ReceiveFailed) log.error(e, "Failed to mark messages received") } } protected def messageRead(state: GroupDialogState, readerUserId: Int, readerAuthId: Long, date: Long): Unit = { val replyTo = sender() val withMembers = withMemberIds[Unit](groupId) _ val readerUpdatesF: Future[Unit] = withMembers { (memberIds, _, _) ⇒ if (memberIds contains readerUserId) { for { _ ← db.run(markMessagesRead(models.Peer.privat(readerUserId), models.Peer.group(groupId), new DateTime(date))) _ ← UserOffice.broadcastUserUpdate(readerUserId, UpdateMessageReadByMe(groupPeer, date), None, isFat = false, deliveryId = None) counterUpdate ← db.run(getUpdateCountersChanged(readerUserId)) _ ← UserOffice.broadcastUserUpdate(readerUserId, counterUpdate, None, isFat = false, deliveryId = None) } yield () } else Future.successful(()) } val joinerF: Future[Unit] = withMembers { (_, invitedUserIds, _) ⇒ if (invitedUserIds contains readerUserId) { GroupOffice.joinAfterFirstRead(groupId, readerUserId, readerAuthId) } else Future.successful(()) } val readerAckF: Future[Unit] = if (!state.lastSenderId.contains(readerUserId) && !state.lastReadDate.exists(_ >= date)) { //state changes before we assure that message read by group member //When kicked user tries to read messages, we change state, but don't mark message read. //When group member tries to read messages, after kicked user tried to read messages, but before new messages arrived we don't let him to read it. // **This behaviour is buggy and should be fixed after we implement subscription on group members** //It's not critical, as leave/kick is not frequent event in group. context become working(updatedState(LastReadDateChanged(date), state)) withMembers { (memberIds, _, _) ⇒ if (memberIds contains readerUserId) { val now = new DateTime().getMillis val restMembers = memberIds.filterNot(_ == readerUserId) val authIdsF = Future.sequence(restMembers map UserOffice.getAuthIds) map (_.flatten.toSet) for { authIds ← authIdsF _ ← db.run(markMessagesRead(models.Peer.privat(readerUserId), models.Peer.group(groupId), new DateTime(date))) _ ← persistAndPushUpdatesF(authIds, UpdateMessageRead(groupPeer, date, now), None, isFat = false) } yield () } else Future.successful(()) } } else Future.successful(()) (for { _ ← readerUpdatesF _ ← joinerF _ ← readerAckF } yield MessageReadAck()) pipeTo replyTo onFailure { case e ⇒ replyTo ! Status.Failure(ReadFailed) log.error(e, "Failed to mark messages read") } } protected def withMemberIds[T](groupId: Int)(f: (Set[Int], Set[Int], Option[Int]) ⇒ Future[T]): Future[T] = { GroupOffice.getMemberIds(groupId) flatMap { case (memberIds, invitedUserIds, optBot) ⇒ f(memberIds.toSet, invitedUserIds.toSet, optBot) } } }
liruqi/actor-platform
actor-server/actor-core/src/main/scala/im/actor/server/dialog/group/GroupDialogHandlers.scala
Scala
mit
6,923
package controllers import play.api.mvc._, Results._ import lila.api.Context import lila.app._ import lila.common.LilaCookie import lila.db.api.$find import lila.security.Permission import lila.user.tube.userTube import lila.user.{ User => UserModel, UserRepo } import views._ object Account extends LilaController { private def env = Env.user private def relationEnv = Env.relation private def forms = lila.user.DataForm def profile = Auth { implicit ctx => me => Ok(html.account.profile(me, forms profileOf me)).fuccess } def profileApply = AuthBody { implicit ctx => me => implicit val req: Request[_] = ctx.body FormFuResult(forms.profile) { err => fuccess(html.account.profile(me, err)) } { profile => UserRepo.setProfile(me.id, profile) inject Redirect(routes.User show me.username) } } def info = Auth { implicit ctx => me => negotiate( html = notFound, api = _ => Env.pref.api getPref me flatMap { prefs => lila.game.GameRepo urgentGames me map { povs => Env.current.bus.publish(lila.user.User.Active(me), 'userActive) Ok { import play.api.libs.json._ import lila.pref.JsonView._ Env.user.jsonView(me) ++ Json.obj( "prefs" -> prefs, "nowPlaying" -> JsArray(povs take 20 map Env.api.lobbyApi.nowPlaying)) } } } ) } def passwd = Auth { implicit ctx => me => Ok(html.account.passwd(me, forms.passwd)).fuccess } def passwdApply = AuthBody { implicit ctx => me => implicit val req = ctx.body FormFuResult(forms.passwd) { err => fuccess(html.account.passwd(me, err)) } { data => for { ok ← UserRepo.checkPasswordById(me.id, data.oldPasswd) _ ← ok ?? UserRepo.passwd(me.id, data.newPasswd1) } yield { val content = html.account.passwd(me, forms.passwd.fill(data), ok.some) ok.fold(Ok(content), BadRequest(content)) } } } private def emailForm(user: UserModel) = UserRepo email user.id map { email => Env.security.forms.changeEmail(user).fill( lila.security.DataForm.ChangeEmail(~email, "")) } def email = Auth { implicit ctx => me => emailForm(me) map { form => Ok(html.account.email(me, form)) } } def emailApply = AuthBody { implicit ctx => me => UserRepo hasEmail me.id flatMap { case true => notFound case false => implicit val req = ctx.body FormFuResult(Env.security.forms.changeEmail(me)) { err => fuccess(html.account.email(me, err)) } { data => val email = Env.security.emailAddress.validate(data.email) err s"Invalid email ${data.email}" for { ok ← UserRepo.checkPasswordById(me.id, data.passwd) _ ← ok ?? UserRepo.email(me.id, email) form <- emailForm(me) } yield { val content = html.account.email(me, form, ok.some) ok.fold(Ok(content), BadRequest(content)) } } } } def close = Auth { implicit ctx => me => Ok(html.account.close(me, Env.security.forms.closeAccount)).fuccess } def closeConfirm = AuthBody { implicit ctx => me => implicit val req = ctx.body FormFuResult(Env.security.forms.closeAccount) { err => fuccess(html.account.close(me, err)) } { password => UserRepo.checkPasswordById(me.id, password) flatMap { case false => BadRequest(html.account.close(me, Env.security.forms.closeAccount)).fuccess case true => (UserRepo disable me) >> relationEnv.api.unfollowAll(me.id) >> Env.team.api.quitAll(me.id) >> (Env.security disconnect me.id) inject { Redirect(routes.User show me.username) withCookies LilaCookie.newSession } } } } def kid = Auth { implicit ctx => me => Ok(html.account.kid(me)).fuccess } def kidConfirm = Auth { ctx => me => implicit val req = ctx.req (UserRepo toggleKid me) inject Redirect(routes.Account.kid) } private def currentSessionId(implicit ctx: Context) = ~Env.security.api.reqSessionId(ctx.req) def security = Auth { implicit ctx => me => Env.security.api.dedup(me.id, ctx.req) >> Env.security.api.locatedOpenSessions(me.id, 50) map { sessions => Ok(html.account.security(me, sessions, currentSessionId)) } } def signout(sessionId: String) = Auth { implicit ctx => me => if (sessionId == "all") lila.security.Store.closeUserExceptSessionId(me.id, currentSessionId) inject Redirect(routes.Account.security) else lila.security.Store.closeUserAndSessionId(me.id, sessionId) } }
JimmyMow/lila
app/controllers/Account.scala
Scala
mit
4,993
package org.apache.flink.contrib.tensorflow.types import java.nio.{ByteBuffer, ByteOrder} import com.twitter.bijection.Conversion.asMethod import com.twitter.bijection._ import org.apache.flink.contrib.tensorflow.util.TestData import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{Matchers, WordSpecLike} import org.tensorflow.example.Example import org.tensorflow.{DataType, Tensor} import scala.util.Success @RunWith(classOf[JUnitRunner]) class TensorInjectionsTest extends WordSpecLike with Matchers { import TensorInjections._ "TensorInjections" should { "support protobuf messages" which { "have syntax variants" in { val expected: Example = TestData.examples().head val actual1: Tensor = Injection.apply[Example,Tensor](expected) val actual2: Tensor = expected.as[Tensor] val actual3 = expected.as[Tensor @@ Rep[Example]] val inverse1: Example = Injection.apply[Tensor @@ Rep[Example], Example](actual3) } "convert to Tensor" in { val expected: Example = TestData.examples().head val converted: Tensor = expected.as[Tensor] //Injection.apply[Example,Tensor](expected) try { converted shouldBe a [Tensor] val actual = Injection.invert[Example, Tensor](converted) actual shouldBe a [Success[_]] actual.get shouldEqual expected } finally { converted.close() } } "convert to Tuple containing a Tensor" in { val expected: (String,Example) = ("example1", TestData.examples().head) val converted = expected.as[(String,Tensor @@ Rep[Example])] try { println(converted) val actual = converted.as[(String, Example)] println(actual) actual shouldEqual expected } finally { converted._2.close() } } "convert a List containing a Tensor" in { val expected: Seq[Example] = TestData.examples() val converted = expected.as[Seq[Tensor @@ Rep[Example]]] println(converted) val t: Tensor = converted.head println(t) } } "support STRING vectors" in { val examples: List[Example] = TestData.examples().toList // a hack to write a STRING tensor with numerous values val bb = ByteBuffer.allocate(10000).order(ByteOrder.nativeOrder()) bb.position(examples.size * 8) for(i <- examples.indices) { val data = examples(i).as[Tensor] val b = ByteBuffer.allocate(data.numBytes()) data.writeTo(b) data.close() b.flip().position(8) bb.putLong(i * 8, bb.position()) bb.put(b) } val tensor = Tensor.create(DataType.STRING, Array(examples.size.toLong), bb) println(tensor) } // "support Input/Output typeclasses" which { // "convert" in { // val regress: RegressionMethod = new RegressionMethod { // override def regress(input: Tensor @@ Rep[Example]): Tensor @@ Rep[Example] = { // input // } // } // // val input: Example = TestData.examples().head // val output: Example = regress.regress(input.as[Tensor @@ Rep[Example]]).as[Example] // // //// regress.regress() // } // } } }
cookieai/flink-tensorflow
flink-tensorflow/src/test/scala/org/apache/flink/contrib/tensorflow/types/TensorInjectionsTest.scala
Scala
apache-2.0
3,339
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.feature import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest} import org.apache.spark.sql.{DataFrame, Row} class StopWordsRemoverSuite extends MLTest with DefaultReadWriteTest { import testImplicits._ def testStopWordsRemover(t: StopWordsRemover, dataFrame: DataFrame): Unit = { testTransformer[(Array[String], Array[String])](dataFrame, t, "filtered", "expected") { case Row(tokens: Seq[_], wantedTokens: Seq[_]) => assert(tokens === wantedTokens) } } test("StopWordsRemover default") { val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") val dataSet = Seq( (Seq("test", "test"), Seq("test", "test")), (Seq("a", "b", "c", "d"), Seq("b", "c", "d")), (Seq("a", "the", "an"), Seq()), (Seq("A", "The", "AN"), Seq()), (Seq(null), Seq(null)), (Seq(), Seq()) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("StopWordsRemover with particular stop words list") { val stopWords = Array("test", "a", "an", "the") val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords) val dataSet = Seq( (Seq("test", "test"), Seq()), (Seq("a", "b", "c", "d"), Seq("b", "c", "d")), (Seq("a", "the", "an"), Seq()), (Seq("A", "The", "AN"), Seq()), (Seq(null), Seq(null)), (Seq(), Seq()) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("StopWordsRemover with localed input (case insensitive)") { val stopWords = Array("milk", "cookie") val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords) .setCaseSensitive(false) .setLocale("tr") // Turkish alphabet: has no Q, W, X but has dotted and dotless 'I's. val dataSet = Seq( // scalastyle:off (Seq("mİlk", "and", "nuts"), Seq("and", "nuts")), // scalastyle:on (Seq("cookIe", "and", "nuts"), Seq("cookIe", "and", "nuts")), (Seq(null), Seq(null)), (Seq(), Seq()) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("StopWordsRemover with localed input (case sensitive)") { val stopWords = Array("milk", "cookie") val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords) .setCaseSensitive(true) .setLocale("tr") // Turkish alphabet: has no Q, W, X but has dotted and dotless 'I's. val dataSet = Seq( // scalastyle:off (Seq("mİlk", "and", "nuts"), Seq("mİlk", "and", "nuts")), // scalastyle:on (Seq("cookIe", "and", "nuts"), Seq("cookIe", "and", "nuts")), (Seq(null), Seq(null)), (Seq(), Seq()) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("StopWordsRemover with invalid locale") { intercept[IllegalArgumentException] { val stopWords = Array("test", "a", "an", "the") new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords) .setLocale("rt") // invalid locale } } test("StopWordsRemover case sensitive") { val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setCaseSensitive(true) val dataSet = Seq( (Seq("A"), Seq("A")), (Seq("The", "the"), Seq("The")) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("default stop words of supported languages are not empty") { StopWordsRemover.supportedLanguages.foreach { lang => assert(StopWordsRemover.loadDefaultStopWords(lang).nonEmpty, s"The default stop words of $lang cannot be empty.") } } test("StopWordsRemover with language selection") { val stopWords = StopWordsRemover.loadDefaultStopWords("turkish") val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords) val dataSet = Seq( (Seq("acaba", "ama", "biri"), Seq()), (Seq("hep", "her", "scala"), Seq("scala")) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("StopWordsRemover with ignored words") { val stopWords = StopWordsRemover.loadDefaultStopWords("english").toSet -- Set("a") val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords.toArray) val dataSet = Seq( (Seq("python", "scala", "a"), Seq("python", "scala", "a")), (Seq("Python", "Scala", "swift"), Seq("Python", "Scala", "swift")) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("StopWordsRemover with additional words") { val stopWords = StopWordsRemover.loadDefaultStopWords("english").toSet ++ Set("python", "scala") val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol("filtered") .setStopWords(stopWords.toArray) val dataSet = Seq( (Seq("python", "scala", "a"), Seq()), (Seq("Python", "Scala", "swift"), Seq("swift")) ).toDF("raw", "expected") testStopWordsRemover(remover, dataSet) } test("read/write") { val t = new StopWordsRemover() .setInputCol("myInputCol") .setOutputCol("myOutputCol") .setStopWords(Array("the", "a")) .setCaseSensitive(true) testDefaultReadWrite(t) } test("StopWordsRemover output column already exists") { val outputCol = "expected" val remover = new StopWordsRemover() .setInputCol("raw") .setOutputCol(outputCol) val dataSet = Seq((Seq("The", "the", "swift"), Seq("swift"))).toDF("raw", outputCol) testTransformerByInterceptingException[(Array[String], Array[String])]( dataSet, remover, s"requirement failed: Column $outputCol already exists.", "expected") } }
aosagie/spark
mllib/src/test/scala/org/apache/spark/ml/feature/StopWordsRemoverSuite.scala
Scala
apache-2.0
6,822
/* * Copyright 2016 Actian Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.actian.spark_vector /** Implements buffering (and value serialization) for `Vector` columns */ package object colbuffer { /** Common constants (for data sizes) */ final val ByteSize = 1 final val ShortSize = 2 final val IntSize = 4 final val LongSize = 8 final val FloatSize = 4 final val DoubleSize = 8 final val LongLongSize = 16 final val BooleanSize = 1 final val DateSize = 4 }
ActianCorp/spark-vector
src/main/scala/com/actian/spark_vector/colbuffer/package.scala
Scala
apache-2.0
1,016
package lila.puzzle import play.api.data._ import play.api.data.Forms._ import lila.common.Form.{ numberIn, stringIn } object PuzzleForm { case class RoundData( win: Boolean, rated: Boolean, replayDays: Option[Int], streakId: Option[String], streakScore: Option[Int] ) { def result = Result(win) def streakPuzzleId = streakId flatMap Puzzle.toId def mode = chess.Mode(rated) } val round = Form( mapping( "win" -> boolean, "rated" -> boolean, "replayDays" -> optional(numberIn(PuzzleDashboard.dayChoices)), "streakId" -> optional(nonEmptyText), "streakScore" -> optional(number(min = 0, max = 250)) )(RoundData.apply)(RoundData.unapply) ) val vote = Form( single("vote" -> boolean) ) val themeVote = Form( single("vote" -> optional(boolean)) ) val difficulty = Form( single("difficulty" -> stringIn(PuzzleDifficulty.all.map(_.key).toSet)) ) object bc { val round = Form( mapping( "win" -> text )(w => RoundData(win = w == "1" || w == "true", rated = true, none, none, none))(r => none) ) val vote = Form( single("vote" -> numberIn(Set(0, 1))) ) import play.api.libs.json._ case class Solution(id: Long, win: Boolean) case class SolveData(solutions: List[Solution]) implicit val SolutionReads = Json.reads[Solution] implicit val SolveDataReads = Json.reads[SolveData] } }
luanlv/lila
modules/puzzle/src/main/PuzzleForm.scala
Scala
mit
1,501
package at.forsyte.apalache.tla.bmcmt.rules.aux import at.forsyte.apalache.tla.bmcmt.{SymbState, SymbStateRewriter} class OracleFactory(rewriter: SymbStateRewriter) { /** * Create a new oracle that can have a value in the range [0, nvalues). * This oracle is created using the most efficient oracle implementation. * * @param state a symbolic state * @param nvalues the number of values to hold * @return a new symbolic state and the oracle, the state.rex equals to state.rex */ def newDefaultOracle(state: SymbState, nvalues: Int): (SymbState, Oracle) = { UninterpretedConstOracle.create(rewriter, state, nvalues) } /** * Create a new oracle that can have a value in the range [0, nvalues). * This oracle is using uninterpreted constants to encode the oracle values. * * @param state a symbolic state * @param nvalues the number of values to hold * @return a new symbolic state and the oracle, the state.rex equals to state.rex */ def newConstOracle(state: SymbState, nvalues: Int): (SymbState, UninterpretedConstOracle) = { UninterpretedConstOracle.create(rewriter, state, nvalues) } /** * Create a new oracle that can have a value in the range [0, nvalues). * This oracle is using a propositional encoding of oracle values, * e.g., 4 values are encoded as b0 /\\ b1, ~b0 /\\ b1, b0 /\\ ~b1, ~b0 /\\ ~b1. * * @param state a symbolic state * @param nvalues the number of values to hold * @return a new symbolic state and the oracle, the state.rex equals to state.rex */ def newPropositionalOracle(state: SymbState, nvalues: Int): (SymbState, PropositionalOracle) = { PropositionalOracle.create(rewriter, state, nvalues) } /** * Create a new oracle that can have a value in the range [0, nvalues). * This oracle is using an integer encoding of oracle values. * * @param state a symbolic state * @param nvalues the number of values to hold * @return a new symbolic state and the oracle, the state.rex equals to state.rex */ def newIntOracle(state: SymbState, nvalues: Int): (SymbState, IntOracle) = { IntOracle.create(rewriter, state, nvalues) } }
konnov/apalache
tla-bmcmt/src/main/scala/at/forsyte/apalache/tla/bmcmt/rules/aux/OracleFactory.scala
Scala
apache-2.0
2,211
package tanukkii.akkahttp.aws import akka.http.scaladsl.model.HttpHeader.ParsingResult import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model._ import akka.stream.Materializer import akka.stream.scaladsl.{Sink, Source, Flow} import akka.util.ByteString import com.amazonaws.util.SdkHttpUtils import com.amazonaws.{DefaultRequest, AmazonWebServiceResponse, Request} import com.amazonaws.http.{HttpResponseHandler, HttpMethodName, HttpResponse => AWSHttpResponse} import com.amazonaws.transform.Marshaller import scala.collection.JavaConversions._ import java.net.URI import scala.concurrent.Future import scala.collection.JavaConverters._ trait AWSClientConversions { val defaultMediaType = "application/x-amz-json-1.0" implicit def convertToHttpMethod(method: HttpMethodName): HttpMethod = { method match { case HttpMethodName.GET => HttpMethods.GET case HttpMethodName.POST => HttpMethods.POST case HttpMethodName.PUT => HttpMethods.PUT case HttpMethodName.DELETE => HttpMethods.DELETE case HttpMethodName.HEAD => HttpMethods.HEAD case HttpMethodName.PATCH => HttpMethods.PATCH } } implicit def convertToHttpRequest[T](t: T)(implicit marshaller: Marshaller[Request[T], T], connectionFlow: ConnectionFlow[HttpRequest, HttpResponse]): HttpRequest = { val awsRequest = marshaller.marshall(t) awsRequest.setEndpoint(new URI(connectionFlow.endpoint)) connectionFlow.signer.sign(awsRequest, connectionFlow.credentialsProvider.getCredentials) val parameter = awsRequest.getParameters.iterator.flatMap(kv => kv._2.asScala.map(v => kv._1 -> v)).toSeq val entity = if (awsRequest.getContent != null) { val body = Stream.continually(awsRequest.getContent.read).takeWhile(-1 != _).map(_.toByte).toArray val contentType = MediaType.custom(Option(awsRequest.getHeaders.get("Content-Type")).getOrElse(defaultMediaType), binary = false) HttpEntity(ContentType(contentType, () => HttpCharsets.`UTF-8`), body) } else if (SdkHttpUtils.usePayloadForQueryParameters(awsRequest)) { FormData(parameter: _*).toEntity } else HttpEntity.Empty val uri = { val s = Option(awsRequest.getResourcePath).filter(_.length > 0).getOrElse("/") if (SdkHttpUtils.usePayloadForQueryParameters(awsRequest)) Uri(s) else Uri(s).withQuery(Query(parameter: _*)) } val headers = awsRequest.getHeaders.toList.withFilter { case ("Host", _) => false case ("User-Agent", _) => false case ("Content-Length", _) => false case ("Content-Type", _) => false case (_, _) => true }.map { kv => val (name, value) = kv HttpHeader.parse(name, value) }.collect { case ParsingResult.Ok(header, _) => header } HttpRequest(method = awsRequest.getHttpMethod, uri = uri, headers = headers, entity = entity) } implicit def convertFromHttpResponseToSource[T, S <: AWSService](response: HttpResponse)(implicit handler: HttpResponseHandler[AmazonWebServiceResponse[T]], serviceContext: AWSServiceContext[S]): Source[T, Any] = { response.entity.dataBytes.via(byteBufferInputStreamFlow).map { inputStream => val req = new DefaultRequest[T](serviceContext.service.name) val awsResp = new AWSHttpResponse(req, null) awsResp.setContent(inputStream) awsResp.setStatusCode(response.status.intValue) awsResp.setStatusText(response.status.defaultMessage) if (200 <= response.status.intValue && response.status.intValue < 300) { val handle: AmazonWebServiceResponse[T] = handler.handle(awsResp) handle.getResult } else { response.headers.foreach { h => awsResp.addHeader(h.name, h.value) } throw serviceContext.errorResponseHandler.handle(awsResp) } } } implicit def convertFromHttpResponse[T, S <: AWSService](response: HttpResponse)(implicit handler: HttpResponseHandler[AmazonWebServiceResponse[T]], serviceContext: AWSServiceContext[S], materializer: Materializer): Future[T] = { convertFromHttpResponseToSource(response).runWith(Sink.head)(materializer) } private val byteBufferInputStreamFlow = Flow[ByteString].fold(ByteString())(_ ++ _).map(_.iterator.asInputStream) } object AWSClientConversions extends AWSClientConversions
TanUkkii007/akka-http-aws
akka-http-aws-core/src/main/scala/tanukkii/akkahttp/aws/AWSClientConversions.scala
Scala
mit
4,302
package elevators.ui import elevators._ import elevators.queue.RequestQueue import elevators.util.first import java.awt.Container import java.awt.Dimension import java.awt.Graphics import java.awt.GridLayout import java.awt.event.MouseEvent import java.awt.event.MouseListener import javax.swing.JComponent import javax.swing.JPanel import scala.collection.immutable.HashMap import scala.util.Random /** * @param requests: only used for initialization */ class MainPanel(initialRequests: RequestQueue[Int]) extends JPanel with View { var requests = initialRequests this.setPreferredSize(new Dimension(200, 500)) val gridLayout = new GridLayout(0, 1) gridLayout.setVgap(1) this.setLayout(gridLayout) var requestRegions = HashMap[Int, RequestRegion]() for (i <- List.range(1, 50)) { val region = new RequestRegion() Interaction.onClick(region, () => this.enqueue(i)) this.add(region) this.requestRegions += i -> region } def enqueue(request: Int): Unit = { this.drawRequest(request) this.requests = this.requests.enqueue(request) } def dequeue: Unit = { if (!this.requests.isEmpty) { val dequeued = this.requests.dequeue this.drawService(dequeued._1) // sleep? this.requests = dequeued._2 } } override def drawRequest(request: Int): Unit = { this.repaintRegion(request, region => region.requested()) } override def drawService(request: Int): Unit = { this.repaintRegion(request, region => region.serviced()) } private def repaintRegion[T](request: Int, f: RequestRegion => T): Option[T] = { this.requestRegions.get(request).map(region => { val result = f(region) region.repaint() return Some(result) }) } override def paintComponent(graphics: Graphics): Unit = { for (region <- this.requestRegions) { region._2.repaint() } } def randomRequest(): Unit = { val request = generateRequest(this.requests, new Random()) this.enqueue(request) } } object Interaction { def onClick(component: JComponent, clicked: () => Unit) { component.addMouseListener(new DelegateMouseListener(clicked)) } class DelegateMouseListener(clicked: () => Unit) extends MouseListener { override def mouseClicked(event: MouseEvent) = { clicked() } override def mouseEntered(event: MouseEvent): Unit = () override def mouseExited(event: MouseEvent): Unit = () override def mousePressed(event: MouseEvent): Unit = () override def mouseReleased(event: MouseEvent): Unit = () } }
wohanley/elevators
src/main/scala/elevators/ui/MainPanel.scala
Scala
agpl-3.0
2,553
package controllers import play.api._ import play.api.mvc._ import play.api.data._ import play.api.libs.json._ import play.api.libs.json.Reads._ import play.api.libs.functional.syntax._ import models.EpisodeId import models.Watching import models.User object Episodes extends Controller { implicit val userReads: Reads[User] = (JsPath \\ "name").read[String](minLength[String](1)) map(User(_)) def watch(episodeId:EpisodeId) = Action(BodyParsers.parse.json) { request => val userResult = request.body.validate[User] userResult.fold( errors => BadRequest(Json.obj("status"->"Failed","message"->JsError.toFlatJson(errors))), user => { Watching.watch(user,episodeId) val response = Json.obj("status"->"Success") Ok(response) } ) } def unwatch(episodeId:EpisodeId) = Action(BodyParsers.parse.json) { request => val userResult = request.body.validate[User] userResult.fold( errors => BadRequest(Json.obj("status"->"Failed","message"->JsError.toFlatJson(errors))), user => { Watching.unwatch(user,episodeId) val response = Json.obj("status"->"Success") Ok(response) } ) } }
PaulKeeble/TVTracker
app/controllers/Episodes.scala
Scala
gpl-2.0
1,234
package com.epicport import java.io.File import xitrum.Config.application object Configuration { import xitrum.Config.application val data = new File(application.getString("epicport.data")) val profile = new File(data, "profile") if (!data.exists) { throw new IllegalStateException(s"Folder ${data} does not exists") } if (!profile.exists) { throw new IllegalStateException(s"Folder ${profile} does not exists") } }
caiiiycuk/epicport
web/src/main/scala/com/epicport/Configuration.scala
Scala
gpl-2.0
456
package com.twitter.finagle.param import com.twitter.finagle.service.StatsFilter import com.twitter.finagle.util.DefaultTimer import com.twitter.finagle.{Stack, stats, tracing, util} import com.twitter.util.{JavaTimer, NullMonitor} /** * A class eligible for configuring a label used to identify finagle * clients and servers. */ case class Label(label: String) { def mk(): (Label, Stack.Param[Label]) = (this, Label.param) } object Label { private[finagle] val Default: String = "" implicit val param: Stack.Param[Label] = Stack.Param(Label(Default)) } /** * A class eligible for configuring a client library name used to identify * which client library a client is using. */ case class ProtocolLibrary(name: String) { def mk(): (ProtocolLibrary, Stack.Param[ProtocolLibrary]) = (this, ProtocolLibrary.param) } object ProtocolLibrary { implicit val param: Stack.Param[ProtocolLibrary] = Stack.Param(ProtocolLibrary("not-specified")) } /** * A class eligible for configuring a [[com.twitter.util.Timer]] used * throughout finagle clients and servers. * * @param timer it is a requirement that it propagates [[com.twitter.util.Local Locals]] * from scheduling time to execution time. * * @see [[HighResTimer]] for a configuration that needs a more * fine-grained timer as this is typically implemented via a * "hashed wheel timer" which is optimized for approximated * I/O timeout scheduling. */ case class Timer(timer: com.twitter.util.Timer) { def mk(): (Timer, Stack.Param[Timer]) = (this, Timer.param) } object Timer { implicit val param: Stack.Param[Timer] = Stack.Param(Timer(DefaultTimer)) } /** * A class eligible for configuring a high resolution [[com.twitter.util.Timer]] * such that tasks are run tighter to their schedule. * * @param timer it is a requirement that it propagates [[com.twitter.util.Local Locals]] * from scheduling time to execution time. * * @see [[Timer]] for a configuration that is appropriate for * tasks that do not need fine-grained scheduling. * * @note it is expected that the resolution should be sub-10 milliseconds. */ case class HighResTimer(timer: com.twitter.util.Timer) { def mk(): (HighResTimer, Stack.Param[HighResTimer]) = (this, HighResTimer.param) } object HighResTimer { /** * The default Timer used for configuration. * * It is a shared resource and as such, `stop` is ignored. */ val Default: com.twitter.util.Timer = new JavaTimer(true, Some("HighResTimer")) { override def stop(): Unit = () } implicit val param: Stack.Param[HighResTimer] = Stack.Param(HighResTimer(Default)) } /** * A class eligible for configuring a [[java.util.logging.Logger]] * used throughout finagle clients and servers. */ case class Logger(log: java.util.logging.Logger) { def mk(): (Logger, Stack.Param[Logger]) = (this, Logger.param) } object Logger { implicit val param: Stack.Param[Logger] = Stack.Param(Logger(util.DefaultLogger)) } /** * A class eligible for configuring a * [[com.twitter.finagle.stats.StatsReceiver]] throughout finagle * clients and servers. */ case class Stats(statsReceiver: stats.StatsReceiver) { def mk(): (Stats, Stack.Param[Stats]) = (this, Stats.param) } object Stats { implicit val param: Stack.Param[Stats] = Stack.Param(Stats(stats.DefaultStatsReceiver)) } /** * A class eligible for configuring a [[com.twitter.util.Monitor]] * throughout finagle servers and clients. */ case class Monitor(monitor: com.twitter.util.Monitor) { def mk(): (Monitor, Stack.Param[Monitor]) = (this, Monitor.param) } object Monitor { implicit val param: Stack.Param[Monitor] = Stack.Param(Monitor(NullMonitor)) } /** * A class eligible for configuring a [[com.twitter.finagle.service.ResponseClassifier]] * which is used to determine the result of a request/response. * * This allows developers to give Finagle the additional application specific * knowledge necessary in order to properly classify them. Without this, * Finagle can only safely make judgements about the transport level failures. * * As an example take an HTTP client that receives a response with a 500 status * code back from a server. To Finagle this is a successful request/response * based solely on the transport level. The application developer may want to * treat all 500 status codes as failures and can do so via a * [[com.twitter.finagle.service.ResponseClassifier]]. * * It is a [[PartialFunction]] and as such multiple classifiers can be composed * together via [[PartialFunction.orElse]]. * * @see `com.twitter.finagle.http.service.HttpResponseClassifier` for some * HTTP classification tools. * * @note If unspecified, the default classifier is * [[com.twitter.finagle.service.ResponseClassifier.Default]] * which is a total function fully covering the input domain. */ case class ResponseClassifier(responseClassifier: com.twitter.finagle.service.ResponseClassifier) { def mk(): (ResponseClassifier, Stack.Param[ResponseClassifier]) = (this, ResponseClassifier.param) } object ResponseClassifier { implicit val param: Stack.Param[ResponseClassifier] = Stack.Param(ResponseClassifier(com.twitter.finagle.service.ResponseClassifier.Default)) } /** * A class eligible for configuring a * [[com.twitter.finagle.util.ReporterFactory]] throughout finagle servers and * clients. */ case class Reporter(reporter: util.ReporterFactory) { def mk(): (Reporter, Stack.Param[Reporter]) = (this, Reporter.param) } object Reporter { implicit val param: Stack.Param[Reporter] = Stack.Param(Reporter(util.LoadedReporterFactory)) } /** * A class eligible for configuring a * [[com.twitter.finagle.tracing.Tracer]] throughout finagle servers * and clients. */ case class Tracer(tracer: tracing.Tracer) { def mk(): (Tracer, Stack.Param[Tracer]) = (this, Tracer.param) } object Tracer { implicit val param: Stack.Param[Tracer] = Stack.Param(Tracer(tracing.DefaultTracer)) } /** * A class eligible for configuring a * [[com.twitter.finagle.stats.ExceptionStatsHandler]] throughout finagle servers * and clients. * * NB: Since the default for failures is to be scoped under "failures", if you * set the default to be in another scope, it may be difficult for engineers * unfamiliar with your stats to understand your service's key metrics. */ case class ExceptionStatsHandler(categorizer: stats.ExceptionStatsHandler) object ExceptionStatsHandler { implicit val param: Stack.Param[ExceptionStatsHandler] = new Stack.Param[ExceptionStatsHandler] { // Note, this is lazy to avoid potential failures during // static initialization. lazy val default = ExceptionStatsHandler(StatsFilter.DefaultExceptions) } }
mkhq/finagle
finagle-core/src/main/scala/com/twitter/finagle/param/Params.scala
Scala
apache-2.0
6,749
/* ************************************************************************** * * * Copyright (C) 2011 Christian Krause * * * * Christian Krause <[email protected]> * * * **************************************************************************** * * * This file is part of 'org.anyzor'. * * * * This project is free software: you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation, either version 3 of the License, or * * any later version. * * * * This project is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with this project. If not, see <http://www.gnu.org/licenses/>. * * * ****************************************************************************/ package org.anyzor package gather package feed class FeedGatherer(url: String) extends Actor { def receive = { case GatherFeed(url) if url == this.url => self.channel ! Feed(url) } }
wookietreiber/org.anyzor
gatherer-feed/src/main/scala/FeedGatherer.scala
Scala
gpl-3.0
2,073
package services import com.fasterxml.jackson.core.JsonParseException import model.CurrencyConverter import play.api.libs.functional.syntax._ import play.api.libs.json._ case class GogEntry(title: String, link : String, gogId: Long, price: Option[BigDecimal] = None, discounted: Option[BigDecimal] = None, genres : String, owned : Boolean) extends ShopEntry { override def id: Long = gogId } object GogEntry { private val regExp = "var gogData = (.+);".r private val coreGogRead = (JsPath \ "title").read[String] and (JsPath \ "url").read[String] and (JsPath \ "id").read[Long] and (JsPath \ "category").read[String] def gogWishListReads(converter: CurrencyConverter): Reads[GogEntry] = (coreGogRead and (JsPath \ "price" \ "symbol").read[String] and (JsPath \ "price" \ "baseAmount").read[String] and (JsPath \ "price" \ "finalAmount").read[String])((t, u, i, c, s, p, d) => GogEntry(t, "http://www.gog.com" + u, i, converter.convert(p + s), converter.convert(d + s), c, owned = false)) val gogReads: Reads[GogEntry] = coreGogRead((t, u, i, c) => GogEntry(t, "http://www.gog.com" + u, i, genres = c, owned = true)) implicit val gogWrites: Writes[GogEntry] = ( (JsPath \ "title").write[String] and (JsPath \ "link").write[String] and (JsPath \ "gogId").write[Long] and (JsPath \ "price" \ "normal").write[Option[BigDecimal]] and (JsPath \ "price" \ "discounted").write[Option[BigDecimal]] and (JsPath \ "genres").write[String])((e) => (e.title, e.link, e.gogId, e.price, if(e.price != e.discounted) e.discounted else None, e.genres)) private def parseWishList(wishList: String, converter : CurrencyConverter) = { val body = regExp.findAllMatchIn(wishList).map(m => m.group(1)).toSeq.headOption body.map(parseGogEntries(gogWishListReads(converter))).getOrElse(Seq()) } def parse(owned: Seq[String], wishList : String, converter: CurrencyConverter): Seq[GogEntry] = owned.flatMap(parseGogEntries(gogReads)) ++ parseWishList(wishList, converter) def getGogPageNumber(body: String): Int = { try{ (Json.parse(body) \ "totalPages").as[Int] } catch{ case _ : JsonParseException => 0 } } private def parseGogEntries(reads: Reads[GogEntry])(body: String) = { val parse = Json.parse(body) (parse \ "products").as[List[JsValue]].map(_.as(reads)) } }
kongus99/Aggregator
app/services/GogEntry.scala
Scala
gpl-3.0
2,366
package com.ml import com.util._ import com.util.Chart._ import com.graph._ import scala.util.Random import scala.io.Source abstract class DataSet extends FitnessEvalwShow { val outputCount = 1 val data: Seq[(Seq[Double],Double)] def apply(f: Seq[Double] => Seq[Double]): Double = { val ms = (for((data,target) <- data) yield { val ans = f(data)(0) val diff = target - ans diff*diff }).sum Math.sqrt(ms/(data.size.toDouble)) } def show(f: Seq[Double] => Seq[Double]): Graph = { val comparison = for((data,target) <- data) yield { val calculated = f(data)(0) (target, calculated) } val sorted = comparison.sortBy(_._1) Chart(("real", sorted.map{_._1}), ("calculated", sorted.map{_._2})) } } object DataSet{ def index(key: String, keys: Seq[String]) = { val idx = keys.indexOf(key) if(idx == -1) throw new Exception(s"Couldn't find key $key") idx } def fromFunc( vecLen: Int, numVec: Int, vrange: Double) (f: Seq[Double] => Double) = new DataSet{ val range = vrange val data: Seq[(Seq[Double],Double)] = { val rand = new Random() def r() = (rand.nextDouble - 0.5) * 2.0 * range val xs = (1 to numVec).map{ i => Array.fill[Double](vecLen)(r()).toSeq } xs.map( x => (x, f(x)) ) } val inputCount = data.map(x => x._1.length).min } def fromFile(filename: String) = { val source = Source.fromFile(filename).getLines().toList new DataSet{ val data = for(line <- source) yield { val nums = line.split(',').map(x => x.toDouble).toSeq (nums.init, nums.last) } val range = data.map(x => x._2).max val inputCount = data.map(x => x._1.length).min } } def fromCsv(data: CSV, output: String, input: Seq[String]) = { import scala.collection.mutable.ArrayBuffer val dataBuffer = new ArrayBuffer[(Seq[Double],Double)]() data.foreach{ row => val t = row(output).toDouble val valueBuffer = new ArrayBuffer[Double]() val v = input.foreach { valueBuffer += row(_).toDouble } dataBuffer +=( (valueBuffer.toSeq,t) ) } new DataSet{ val data = dataBuffer.toSeq val range = data.map(x => x._2).max val inputCount = data.map(x => x._1.length).min } } }
BrettAM/EvCompHW
src/main/scala/ml/DataSet.scala
Scala
apache-2.0
2,631
package edu.gemini.phase2.template.factory.impl.graces import edu.gemini.phase2.template.factory.impl._ import edu.gemini.spModel.core.MagnitudeBand import edu.gemini.spModel.gemini.graces.blueprint.SpGracesBlueprint import edu.gemini.pot.sp.{SPComponentType, ISPGroup} import edu.gemini.spModel.gemini.visitor.VisitorInstrument import edu.gemini.spModel.target.SPTarget import SpGracesBlueprint.ReadMode._ import SpGracesBlueprint.FiberMode._ case class Graces(blueprint: SpGracesBlueprint, exampleTarget: Option[SPTarget]) extends GroupInitializer[SpGracesBlueprint] with TemplateDsl2[VisitorInstrument] { val program = "GRACES PHASE I/II MAPPING BPS" def instCompType: SPComponentType = VisitorInstrument.SP_TYPE def seqConfigCompType: SPComponentType = sys.error("No sequence component type for visitor instrument GRACES") // This seems to be necessary, sorry. var db:Option[TemplateDb] = None override def initialize(db:TemplateDb):Maybe[ISPGroup] = try { this.db = Some(db) super.initialize(db) } finally { this.db = None } // R = Phase-I target R-band or V-band magnitude val rMag: Option[Double] = for { t <- exampleTarget m <- t.getMagnitude(MagnitudeBand.R) orElse t.getMagnitude(MagnitudeBand.V) } yield m.value // IF FIBER-MODE == 1 AND (READ-MODE == Normal OR READ-MODE == Fast): // IF R> 10 INCLUDE {1} // ELIF R<=10 INCLUDE {2} // ELSE INCLUDE {1,2} # Unknown brightness, so include both // // ELIF FIBER-MODE == 1 AND READ-MODE == Slow: // IF R> 10 INCLUDE {3} // ELIF R<=10 INCLUDE {4} // ELSE INCLUDE {3,4} // // ELIF FIBER-MODE == 2 AND (READ-MODE == Normal OR READ-MODE == Fast): // IF R> 10 INCLUDE {5} // ELIF R<=10 INCLUDE {6} // ELSE INCLUDE {5,6} // // ELIF FIBER-MODE == 2 AND READ-MODE == Slow: // IF R> 10 INCLUDE {7} // ELIF R<=10 INCLUDE {8} // ELSE INCLUDE {7,8} def select(gt10: Int, lte10: Int): List[Int] = rMag.map(m => if (m > 10) List(gt10) else List(lte10)).getOrElse(List(gt10, lte10)) val sci = (blueprint.getFiberMode, blueprint.getReadMode) match { case (ONE_FIBER, NORMAL | FAST) => select(1, 2) case (ONE_FIBER, SLOW) => select(3, 4) case (TWO_FIBER, NORMAL | FAST) => select(5, 6) case (TWO_FIBER, SLOW) => select(7, 8) } include(sci : _*) in TargetGroup // these should be in top level, but will appear in each template group for now addNote("How to prepare your program", "GRACES set-up") in TopLevel }
spakzad/ocs
bundle/edu.gemini.phase2.skeleton.servlet/src/main/scala/edu/gemini/phase2/template/factory/impl/graces/Graces.scala
Scala
bsd-3-clause
2,595
package scorex.app import akka.actor.ActorRef import scorex.consensus.ConsensusModule import scorex.network.message.BasicMessagesRepo import scorex.settings.Settings import scorex.transaction.{BlockStorage, History, TransactionModule} import scorex.wallet.Wallet /** * Pure interface to application */ trait Application { //modules implicit val consensusModule: ConsensusModule[_] implicit val transactionModule: TransactionModule[_] val basicMessagesSpecsRepo: BasicMessagesRepo val history: History val blockStorage: BlockStorage val networkController: ActorRef val coordinator: ActorRef val blockGenerator: ActorRef val blockchainSynchronizer: ActorRef val scoreObserver: ActorRef val settings: Settings val wallet: Wallet }
alexeykiselev/WavesScorex
scorex-basics/src/main/scala/scorex/app/Application.scala
Scala
cc0-1.0
771
package org.example1.usage import org.example1.declaration.X import org.example1.declaration.data.A trait Usage_MergeToExisting_Imports_1_1 { val a: A = ??? val x: X = ??? }
JetBrains/intellij-scala
scala/scala-impl/testdata/move/allInOne/before/org/example1/usage/Usage_MergeToExisting_Imports_1_1.scala
Scala
apache-2.0
181
package paperdoll.cats import paperdoll.core.effect.Effects import paperdoll.core.layer.Layer import CatsEffects.sendTUC import cats.Functor import cats.data.EitherT import paperdoll.std.Either_ import cats.instances.either._ object EitherTLayer { def sendEitherT[F[_]: Functor, A, B](et: EitherT[F, A, B]): Effects.Two[Layer.Aux[F], Either_[A], B] = sendTUC[F[Either[A, B]], Either[A, B]](et.value) }
m50d/paperdoll
cats/src/main/scala/paperdoll/cats/EitherTLayer.scala
Scala
apache-2.0
409
package im.tox.antox.activities import android.app.Activity import android.content.Intent import android.graphics.Color import android.os.{Build, Bundle} import android.preference.PreferenceManager import android.support.v7.app.AppCompatActivity import android.view.{View, WindowManager} import android.widget._ import im.tox.antox.data.UserDB import im.tox.antox.tox.ToxService import im.tox.antoxnightly.R class LoginActivity extends AppCompatActivity with AdapterView.OnItemSelectedListener { private var profileSelected: String = _ protected override def onCreate(savedInstanceState: Bundle) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_login) getSupportActionBar.hide() if (Build.VERSION.SDK_INT != Build.VERSION_CODES.JELLY_BEAN && Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { getWindow.setFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED, WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED) } val preferences = PreferenceManager.getDefaultSharedPreferences(this) val db = new UserDB(this) if (!db.doUsersExist()) { db.close() val createAccount = new Intent(getApplicationContext, classOf[CreateAccountActivity]) startActivity(createAccount) finish() } else if (preferences.getBoolean("loggedin", false)) { db.close() val startTox = new Intent(getApplicationContext, classOf[ToxService]) getApplicationContext.startService(startTox) val main = new Intent(getApplicationContext, classOf[MainActivity]) startActivity(main) finish() } else { val profiles = db.getAllProfiles db.close() val profileSpinner = findViewById(R.id.login_account_name).asInstanceOf[Spinner] val adapter = new ArrayAdapter[String](this, android.R.layout.simple_spinner_dropdown_item, profiles) adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item) profileSpinner.setAdapter(adapter) profileSpinner.setSelection(0) profileSpinner.setOnItemSelectedListener(this) } } def onItemSelected(parent: AdapterView[_], view: View, pos: Int, id: Long) { profileSelected = parent.getItemAtPosition(pos).toString if (parent.getChildAt(0) != null) // getChildAt(pos) returns a view, or null if non-existant parent.getChildAt(0).asInstanceOf[TextView].setTextColor(Color.BLACK) } def onNothingSelected(parent: AdapterView[_]) { } def onClickLogin(view: View) { val account = profileSelected if (account == "") { val context = getApplicationContext val text = getString(R.string.login_must_fill_in) val duration = Toast.LENGTH_SHORT val toast = Toast.makeText(context, text, duration) toast.show() } else { val db = new UserDB(this) if (db.doesUserExist(account)) { val details = db.getUserDetails(account) db.close() val preferences = PreferenceManager.getDefaultSharedPreferences(this) val editor = preferences.edit() editor.putBoolean("loggedin", true) editor.putString("active_account", account) editor.putString("nickname", details.nickname) editor.putString("password", details.password) editor.putString("status", details.status) editor.putString("status_message", details.statusMessage) editor.putBoolean("logging_enabled", details.loggingEnabled) editor.putString("avatar", details.avatarName) editor.apply() val startTox = new Intent(getApplicationContext, classOf[ToxService]) getApplicationContext.startService(startTox) val main = new Intent(getApplicationContext, classOf[MainActivity]) startActivity(main) finish() } else { val context = getApplicationContext val text = getString(R.string.login_bad_login) val duration = Toast.LENGTH_SHORT val toast = Toast.makeText(context, text, duration) toast.show() } } } def onClickCreateAccount(view: View) { val createAccount = new Intent(getApplicationContext, classOf[CreateAccountActivity]) startActivityForResult(createAccount, 1) overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out) } override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent) { if (requestCode == 1) { if (resultCode == Activity.RESULT_OK) { finish() } } } }
Ansa89/Antox
app/src/main/scala/im/tox/antox/activities/LoginActivity.scala
Scala
gpl-3.0
4,476
package im.mange.jetpac.input import im.mange.jetboot.widget.form.FormInput import im.mange.jetpac.{Event, Js} import net.liftweb.http.SHtml import net.liftweb.http.js.JsCmd import net.liftweb.util.Helpers._ //TODO: should this really be in jetboot as it depends on external lib? //TIP: this depends on - https://github.com/RobinHerbots/jquery.inputmask (>= 3.1.62) class MaskedBox(val field: Field, default: Option[String], readOnly: Boolean = false, mask: String) extends FormInput { var value = safeDefault private def safeDefault = default getOrElse "0" private def js = s"""$$(document).ready(function () {{ |$$('#$id').inputmask(); |}});""".stripMargin //TODO: make a method on the EventHandlers to do the handler bit override def render = <div>{attachHandlersToBase(baseElement)} <script type="text/javascript">{js}</script> </div> def baseElement = SHtml.text(value, onSubmit, "id" → id, "style" → styles.render, "class" → classes.render, "data-inputmask" → mask, if (readOnly) "disabled" → s"$readOnly" else "id" → id) private def onSubmit(value: String) { this.value = value } def onKeyUp (handler: String ⇒ JsCmd): this.type = addEvents(Event.onKeyUp -> handler) override def reset = Js.setElementValue(id, safeDefault) & (if (readOnly) Js.disableElement(id) else Js.enableElement(id)) //TODO: need to override init with the JS inputmask init stuff } class NumericBox(field: Field, alias: String = "numeric", default: Option[String], readOnly: Boolean = false, autoGroup: Boolean = true, groupSeparator: String = ",", allowPlus: Boolean = false, allowMinus: Boolean = false, mask: String = "'suffix': ''") extends MaskedBox(field, default, readOnly, mask = s""" |'alias': '$alias', |'autoGroup': $autoGroup, |'groupSeparator': ',', |'placeholder': '0', |'allowPlus': $allowPlus, |'allowMinus': $allowMinus, |'autoUnmask': true, |'clearMaskOnLostFocus': false, |'selectOnClick': true, |$mask""".stripMargin) object IntegerBox { def apply(field: Field, default: Option[Long] = None) = new IntegerBox(field, default) } class IntegerBox(field: Field, default: Option[Long], readOnly: Boolean = false, autoGroup: Boolean = true, groupSeparator: String = ",", min: Long = 0, max: Long = Long.MaxValue, allowPlus: Boolean = false, allowMinus: Boolean = false) extends NumericBox(field, "integer", default.map(_.toString), readOnly, autoGroup, groupSeparator, allowPlus, allowMinus, mask = s"'min': $min,\\n'max': $max") object DecimalBox { def apply(field: Field, default: Option[BigDecimal] = None) = new DecimalBox(field, default) } class DecimalBox(field: Field, default: Option[BigDecimal], readOnly: Boolean = false, autoGroup: Boolean = true, groupSeparator: String = ",", min: BigDecimal = BigDecimal(0), max: BigDecimal = BigDecimal(Double.MaxValue), digits: String = "*", digitsOptional: Boolean = true, allowPlus: Boolean = false, allowMinus: Boolean = false, mask: String = "'suffix': ''") extends NumericBox(field, "decimal", default.map(_.toString()), readOnly, autoGroup, groupSeparator, allowPlus, allowMinus, mask = s"""'digits': '$digits', |'digitsOptional': $digitsOptional, |'min': $min, |'max': $max, |$mask""".stripMargin) object PercentageBox { def apply(field: Field, default: Option[BigDecimal] = None) = new PercentageBox(field, default) } class PercentageBox(field: Field, default: Option[BigDecimal], readOnly: Boolean = false, autoGroup: Boolean = true, groupSeparator: String = ",", min: BigDecimal = BigDecimal(0), max: BigDecimal = BigDecimal(Double.MaxValue), digits: Int = 2, digitsOptional: Boolean = false, allowPlus: Boolean = false, allowMinus: Boolean = false) extends DecimalBox(field, default, readOnly, autoGroup, groupSeparator, min, max, digits.toString, digitsOptional, allowPlus, allowMinus, mask = "'suffix': ' %'")
alltonp/jetboot
src/main/scala/im/mange/jetpac/input/MaskedBoxes.scala
Scala
apache-2.0
5,203
package sri.mobile.examples.uiexplorer.components import org.scalajs.dom import sri.core._ import sri.mobile.ReactNative import sri.mobile.all._ import sri.universal.apis.{Layout, LayoutEvent} import sri.mobile.examples.uiexplorer.{UIExample, UIExplorerPage} import sri.universal.{ReactEvent, SyntheticEvent} import sri.universal.components._ import sri.universal.styles.UniversalStyleSheet import scala.scalajs.js import scala.scalajs.js.annotation.ScalaJSDefined import scala.scalajs.js.{JSON, undefined, UndefOr => U} object LayoutEventsExample extends UIExample { case class State(containerStyle: js.UndefOr[js.Dictionary[Any]] = undefined, extraText: String = "", imageLayout: js.UndefOr[Layout] = undefined, textLayout: js.UndefOr[Layout] = undefined, viewLayout: js.UndefOr[Layout] = undefined, viewStyle: js.UndefOr[js.Dictionary[Any]] = js.undefined) @ScalaJSDefined class Component extends ReactComponent[Unit, State] { initialState(State(viewStyle = styles.dynamicView(20))) def render() = { UIExplorerPage( View(style = styles.containerStyle)( Text()( "layout events are called on mount and whenever layout is recalculated. Note that the layout event will typically be received", Text(style = styles.italicText)("before"), "the layout has updated on screen, especially when using layout animations.", Text(style = styles.pressText, onPress = animateViewLayout _)(" Press here to change layout.") ), View(onLayout = onViewLayout _, style = styles.view(state.viewStyle.getOrElse(null)))( Image(onLayout = onImageLayout _, style = styles.image, source = ImageSource(uri = "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851561_767334496626293_1958532586_n.png"))(), Text()(s"ViewLayout : ${JSON.stringify(state.viewLayout.getOrElse(""))} \\n\\n"), Text(style = styles.text, onLayout = onTextLayout _)(s"A simple piece of text.${state.extraText}"), Text()( s""" | |Text w/h : ${if (state.textLayout.isDefined) s"${state.textLayout.get.width}/${state.textLayout.get.height}" else "?/?"} |Image x/y : ${if (state.imageLayout.isDefined) s"${state.imageLayout.get.x}/${state.imageLayout.get.y}" else "?/?"} """.stripMargin ) ) ) ) } def onViewLayout(e: LayoutEvent) = { dom.window.console.log(s"received view layout event \\n", e.nativeEvent) setState(state.copy(viewLayout = e.nativeEvent.layout)) } def onTextLayout(e: LayoutEvent) = { dom.window.console.log(s"received text layout event \\n", e.nativeEvent) setState(state.copy(textLayout = e.nativeEvent.layout)) } def onImageLayout(e: LayoutEvent) = { dom.window.console.log(s"received image layout event \\n", e.nativeEvent) setState(state.copy(imageLayout = e.nativeEvent.layout)) } def animateViewLayout(e: ReactEvent[SyntheticEvent]) = { ReactNative.LayoutAnimation.configureNext(ReactNative.LayoutAnimation.Presets.spring, () => { println(s"layout animation done") setState(state.copy(extraText = " And a bunch more text to wrap around a few lines", containerStyle = styles.containerStyle)) }) setState(state.copy(viewStyle = styles.dynamicView(if (state.viewStyle.get.getOrElse("margin", 0).asInstanceOf[Double] > 20) 20 else 60))) } } val component = () => makeElement[Component] object styles extends UniversalStyleSheet { def view(another: js.Dictionary[Any]) = styleE(another)(padding := 12, borderColor := "black", borderWidth := 0.5, backgroundColor := "transparent") val text = style(alignSelf.flexStart, borderColor := "rgba(0, 0, 255, 0.2)", borderWidth := 0.5) val image = style(width := 50, height := 50, marginBottom := 10, alignSelf.center) val pressText = style(fontWeight.bold) val italicText = style(fontStyle.italic) val containerStyle = style(width := 280) def dynamicView(value: Double) = style(margin := value) } override def title: String = "Layout Events" override def description: String = "Examples that show how Layout events can be used to measure view size and position" }
chandu0101/sri
mobile-examples/src/main/scala/sri/mobile/examples/uiexplorer/components/LayoutEventsExample.scala
Scala
apache-2.0
4,371
package org.openurp.edu.base.ws import org.beangle.commons.inject.bind.AbstractBindModule import org.openurp.edu.base.ds.code.{ CourseCategoryWS, CourseTypeWS, ExamModeWS, ExamStatusWS, StdLabelTypeWS, StdLabelWS, StdStatusWS, StdTypeWS } class DefaultModule extends AbstractBindModule { protected override def binding() { bind(classOf[StdStatusWS]) bind(classOf[StdLabelWS], classOf[StdLabelTypeWS], classOf[StdTypeWS]) bind(classOf[DirectionWS], classOf[DirectionJournalWS]) bind(classOf[MajorWS], classOf[MajorJournalWS]) bind(classOf[ProjectWS], classOf[ProjectClassroomWS], classOf[ProjectCodeWS]) bind(classOf[StudentWS], classOf[StudentStateWS]) bind(classOf[AdminclassWS]) bind(classOf[CourseWS], classOf[CourseHourWS]) bind(classOf[CourseTypeWS], classOf[CourseCategoryWS], classOf[ExamModeWS], classOf[ExamStatusWS]) } }
openurp/edu-core
base/ws/src/main/scala/org/openurp/edu/base/ws/DefaultModule.scala
Scala
gpl-3.0
883
package net.kemuridama.kafcon.route import akka.http.scaladsl.model.StatusCodes import net.kemuridama.kafcon.model.APIResponse import net.kemuridama.kafcon.service.{UsesBrokerService, MixinBrokerService} import net.kemuridama.kafcon.protocol.BrokerJsonProtocol trait BrokersAPIRoute extends APIRoute with UsesBrokerService with BrokerJsonProtocol { val route = pathPrefix("clusters" / IntNumber / "brokers") { clusterId => pathEnd { get { onSuccess(brokerService.findAll(clusterId)) { response => complete(APIResponse(Some(response))) } } } ~ pathPrefix(IntNumber) { id => pathEnd { get { onSuccess(brokerService.find(clusterId, id)) { case Some(response) => complete(APIResponse(Some(response))) case _ => complete(StatusCodes.NotFound, errorMessage("Not found")) } } } } } } private[route] object BrokersAPIRoute extends BrokersAPIRoute with MixinBrokerService trait UsesBrokersAPIRoute { val brokersAPIRoute: BrokersAPIRoute } trait MixinBrokersAPIRoute { val brokersAPIRoute = BrokersAPIRoute }
kemuridama/kafcon
src/main/scala/net/kemuridama/kafcon/route/BrokersAPIRoute.scala
Scala
mit
1,151
class Top[A] { type AType = A } trait Node { outer => type T <: Node def prepend = new Node { type T = outer.type } } class Main[NextType <: Node](value: Node { type T = NextType }) extends Top[Node { type T = NextType }] { new Main[AType]( (value: AType).prepend ) } /* we've been back-and-forth on this one -- see PRs on scala/bug#8177 for the reasoning I think it should compile and that the following error is due to broken =:= on existentials found : Node{type T = _1.type} where val _1: Node{type T = NextType} required: Node{type T = Main.this.AType} (which expands to) Node{type T = Node{type T = NextType}} I claim (omitting the forSome for brevity, even though the premature skolemization is probably the issue) _1.type =:= Main.this.AType because (1) _1.type <:< Main.this.AType and (2) Main.this.AType <:< _1.type (1), because: _1.type <:< Node{type T = NextType} (because skolemization and _1's upper bound) (2), because: Node{type T = NextType} <:< _1.type forSome val _1: Node{type T = NextType} because: Node{type T = NextType} <:< T forSome {type T <: Node{type T = NextType} with Singleton} because Node{type T = NextType} <:< Node{type T = NextType} with Singleton hmmm.. might the with Singleton be throwing a wrench in our existential house? Behold the equivalent program which type checks without the fix for scala/bug#8177. (Expand type alias, convert type member to type param; note the covariance to encode subtyping on type members.) class Node[+T <: Node[_]] { def prepend = new Node[this.type] } class Main[NextType <: Node[_]](value: Node[NextType]) { new Main(value.prepend) } */
lrytz/scala
test/files/neg/t0764.scala
Scala
apache-2.0
1,671
// Project: surf // Module: // Description: // Copyright (c) 2015 Johannes Kastner <[email protected]> // Distributed under the MIT license (see included LICENSE file) package surf.rest.test //import surf.ServiceRefFactory //import surf.rest.{RESTRequest, RESTAction, RESTResource, RESTService} //import surf.test.TestBase //import utest._ // //object RESTServiceTest extends TestBase { // import ServiceRefFactory.Sync // val eut = RESTResource("eut", new TestService) // // override val tests = TestSuite { // RESTRequest.get(Seq("a","b"),Map()) // } // // class TestService extends RESTService { // def handle = { // case _ => () // } // } //}
jokade/surf
rest/shared/src/test/scala/surf/rest/test/RESTServiceTest.scala
Scala
mit
694
/** * The MIT License (MIT) * * Copyright (c) 2018 Israel Freitas([email protected]) * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package ifreitas.scalaaiml.elements case class Sraix(expression: TemplateExpression*) extends TemplateExpression { def toXml = <sraix>{ expression.toXml }</sraix> }
ifreitas/AimlToXml
src/main/scala/ifreitas/scalaaiml/elements/Sraix.scala
Scala
mit
1,351
package com.monovore.example.coast import com.monovore.coast import coast.flow import com.monovore.coast.flow.{Flow, Topic} import com.monovore.coast.wire.Protocol /** * Based on the discussion in this thread: * * http://mail-archives.apache.org/mod_mbox/incubator-samza-dev/201411.mbox/%3CCAFhxiSQ4V3KTt2L4CcRVHrKDRi-oS26LGCGvhSemKVPH-SW_RA@mail.gmail.com%3E */ object CustomerTransactions extends ExampleMain { import Protocol.native._ type CustomerID = String type TransactionID = String case class Customer() case class Transaction() val Customers = Topic[CustomerID, Customer]("customers") val CustomerTransactions = Topic[TransactionID, CustomerID]("customer-transactions") val Transactions = Topic[TransactionID, Transaction]("transactions") val CustomerInfo = Topic[CustomerID, (Customer, Seq[Transaction])]("customer-info") override def graph: Flow[Unit] = for { transactionsByCustomer <- Flow.stream("transactions-by-customer") { (Flow.source(Transactions).latestOption join Flow.source(CustomerTransactions).latestOption) .updates .flatMap { case (latestTransaction, allCustomers) => val both = for { transaction <- latestTransaction.toSeq customer <- allCustomers } yield customer -> transaction both.toSeq } .groupByKey } _ <- Flow.sink(CustomerInfo) { val allCustomerTransactions = transactionsByCustomer.fold(Seq.empty[Transaction]) { _ :+ _ } val latestCustomerInfo = Flow.source(Customers).latestOption (latestCustomerInfo join allCustomerTransactions) .updates .flatMap { case (customerOption, transactions) => customerOption .map { _ -> transactions } .toSeq } } } yield () }
bkirwi/coast
core/src/main/scala/com/monovore/example/coast/CustomerTransactions.scala
Scala
apache-2.0
1,822
package vexriscv.plugin import vexriscv.{plugin, _} import vexriscv.ip._ import spinal.core._ import spinal.lib._ import scala.collection.mutable.ArrayBuffer //class IBusCachedPlugin(config : InstructionCacheConfig, memoryTranslatorPortConfig : Any = null) extends Plugin[VexRiscv] { // var iBus : InstructionCacheMemBus = null // override def build(pipeline: VexRiscv): Unit = ??? //} case class TightlyCoupledBus() extends Bundle with IMasterSlave { val enable = Bool() val address = UInt(32 bits) val data = Bits(32 bits) override def asMaster(): Unit = { out(enable, address) in(data) } } case class TightlyCoupledPortParameter(name : String, hit : UInt => Bool) case class TightlyCoupledPort(p : TightlyCoupledPortParameter, var bus : TightlyCoupledBus) class IBusCachedPlugin(resetVector : BigInt = 0x80000000l, relaxedPcCalculation : Boolean = false, prediction : BranchPrediction = NONE, historyRamSizeLog2 : Int = 10, compressedGen : Boolean = false, keepPcPlus4 : Boolean = false, val config : InstructionCacheConfig, memoryTranslatorPortConfig : Any = null, injectorStage : Boolean = false, withoutInjectorStage : Boolean = false, relaxPredictorAddress : Boolean = true, predictionBuffer : Boolean = true) extends IBusFetcherImpl( resetVector = resetVector, keepPcPlus4 = keepPcPlus4, decodePcGen = compressedGen, compressedGen = compressedGen, cmdToRspStageCount = (if(config.twoCycleCache) 2 else 1) + (if(relaxedPcCalculation) 1 else 0), allowPcRegReusedForSecondStage = true, injectorReadyCutGen = false, prediction = prediction, historyRamSizeLog2 = historyRamSizeLog2, injectorStage = (!config.twoCycleCache && !withoutInjectorStage) || injectorStage, relaxPredictorAddress = relaxPredictorAddress, fetchRedoGen = true, predictionBuffer = predictionBuffer) with VexRiscvRegressionArg{ import config._ assert(isPow2(cacheSize)) assert(!(memoryTranslatorPortConfig != null && config.cacheSize/config.wayCount > 4096), "When the I$ is used with MMU, each way can't be bigger than a page (4096 bytes)") assert(!(withoutInjectorStage && injectorStage)) override def getVexRiscvRegressionArgs(): Seq[String] = { var args = List[String]() args :+= "IBUS=CACHED" args :+= s"IBUS_DATA_WIDTH=$memDataWidth" args :+= s"COMPRESSED=${if(compressedGen) "yes" else "no"}" args } var iBus : InstructionCacheMemBus = null var mmuBus : MemoryTranslatorBus = null var privilegeService : PrivilegeService = null var decodeExceptionPort : Flow[ExceptionCause] = null val tightlyCoupledPorts = ArrayBuffer[TightlyCoupledPort]() def tightlyGen = tightlyCoupledPorts.nonEmpty def newTightlyCoupledPort(p : TightlyCoupledPortParameter) = { val port = TightlyCoupledPort(p, null) tightlyCoupledPorts += port this } object FLUSH_ALL extends Stageable(Bool) object IBUS_ACCESS_ERROR extends Stageable(Bool) object IBUS_MMU_MISS extends Stageable(Bool) object IBUS_ILLEGAL_ACCESS extends Stageable(Bool) override def setup(pipeline: VexRiscv): Unit = { import Riscv._ import pipeline.config._ super.setup(pipeline) val decoderService = pipeline.service(classOf[DecoderService]) decoderService.addDefault(FLUSH_ALL, False) decoderService.add(FENCE_I, List( FLUSH_ALL -> True )) if(catchSomething) { val exceptionService = pipeline.service(classOf[ExceptionService]) decodeExceptionPort = exceptionService.newExceptionPort(pipeline.decode,1) } if(pipeline.serviceExist(classOf[MemoryTranslator])) mmuBus = pipeline.service(classOf[MemoryTranslator]).newTranslationPort(MemoryTranslatorPort.PRIORITY_INSTRUCTION, memoryTranslatorPortConfig) privilegeService = pipeline.serviceElse(classOf[PrivilegeService], PrivilegeServiceDefault()) if(pipeline.serviceExist(classOf[ReportService])){ val report = pipeline.service(classOf[ReportService]) report.add("iBus" -> { val e = new BusReport() val c = new CacheReport() e.kind = "cached" e.flushInstructions.add(0x100F) //FENCE.I e.flushInstructions.add(0x13) e.flushInstructions.add(0x13) e.flushInstructions.add(0x13) e.info = c c.size = cacheSize c.bytePerLine = bytePerLine e }) } } override def build(pipeline: VexRiscv): Unit = { import pipeline._ import pipeline.config._ pipeline plug new FetchArea(pipeline) { val cache = new InstructionCache(IBusCachedPlugin.this.config.copy(bypassGen = tightlyGen), if(mmuBus != null) mmuBus.p else MemoryTranslatorBusParameter(0,0)) iBus = master(new InstructionCacheMemBus(IBusCachedPlugin.this.config)).setName("iBus") iBus <> cache.io.mem iBus.cmd.address.allowOverride := cache.io.mem.cmd.address //Memory bandwidth counter val rspCounter = RegInit(UInt(32 bits)) init(0) when(iBus.rsp.valid){ rspCounter := rspCounter + 1 } val stageOffset = if(relaxedPcCalculation) 1 else 0 def stages = iBusRsp.stages.drop(stageOffset) tightlyCoupledPorts.foreach(p => p.bus = master(TightlyCoupledBus()).setName(p.p.name)) val s0 = new Area { //address decoding val tightlyCoupledHits = Vec(tightlyCoupledPorts.map(_.p.hit(stages(0).input.payload))) val tightlyCoupledHit = tightlyCoupledHits.orR for((port, hit) <- (tightlyCoupledPorts, tightlyCoupledHits).zipped){ port.bus.enable := stages(0).input.fire && hit port.bus.address := stages(0).input.payload(31 downto 2) @@ U"00" } //Connect prefetch cache side cache.io.cpu.prefetch.isValid := stages(0).input.valid && !tightlyCoupledHit cache.io.cpu.prefetch.pc := stages(0).input.payload stages(0).halt setWhen (cache.io.cpu.prefetch.haltIt) if(mmuBus != null && mmuBus.p.latency == 1) { stages(0).halt setWhen(mmuBus.busy) mmuBus.cmd(0).isValid := cache.io.cpu.prefetch.isValid mmuBus.cmd(0).isStuck := !stages(0).input.ready mmuBus.cmd(0).virtualAddress := cache.io.cpu.prefetch.pc mmuBus.cmd(0).bypassTranslation := False } } val s1 = new Area { val tightlyCoupledHits = RegNextWhen(s0.tightlyCoupledHits, stages(1).input.ready) val tightlyCoupledHit = RegNextWhen(s0.tightlyCoupledHit, stages(1).input.ready) if(tightlyGen) cache.io.cpu.fetch.dataBypassValid := tightlyCoupledHit if(tightlyGen) cache.io.cpu.fetch.dataBypass := MuxOH(tightlyCoupledHits, tightlyCoupledPorts.map(e => CombInit(e.bus.data))) //Connect fetch cache side cache.io.cpu.fetch.isValid := stages(1).input.valid && !tightlyCoupledHit cache.io.cpu.fetch.isStuck := !stages(1).input.ready cache.io.cpu.fetch.pc := stages(1).input.payload if(mmuBus != null) { mmuBus.cmd.last.isValid := cache.io.cpu.fetch.isValid mmuBus.cmd.last.isStuck := !stages(1).input.ready mmuBus.cmd.last.virtualAddress := cache.io.cpu.fetch.pc mmuBus.cmd.last.bypassTranslation := False mmuBus.end := stages(1).input.ready || externalFlush if (mmuBus.p.latency == 0) stages(1).halt setWhen (mmuBus.busy) } if (!twoCycleCache) { cache.io.cpu.fetch.isUser := privilegeService.isUser() } } val s2 = twoCycleCache generate new Area { val tightlyCoupledHit = RegNextWhen(s1.tightlyCoupledHit, stages(2).input.ready) cache.io.cpu.decode.isValid := stages(2).input.valid && !tightlyCoupledHit cache.io.cpu.decode.isStuck := !stages(2).input.ready cache.io.cpu.decode.pc := stages(2).input.payload cache.io.cpu.decode.isUser := privilegeService.isUser() if ((!twoCycleRam || wayCount == 1) && !compressedGen && !injectorStage) { decode.insert(INSTRUCTION_ANTICIPATED) := Mux(decode.arbitration.isStuck, decode.input(INSTRUCTION), cache.io.cpu.fetch.data) } } val rsp = new Area { val iBusRspOutputHalt = False val cacheRsp = if (twoCycleCache) cache.io.cpu.decode else cache.io.cpu.fetch val cacheRspArbitration = stages(if (twoCycleCache) 2 else 1) var issueDetected = False val redoFetch = False //Refill / redo assert(decodePcGen == compressedGen) cache.io.cpu.fill.valid := redoFetch && !cacheRsp.mmuRefilling cache.io.cpu.fill.payload := cacheRsp.physicalAddress if (catchSomething) { decodeExceptionPort.valid := False decodeExceptionPort.code.assignDontCare() decodeExceptionPort.badAddr := cacheRsp.pc(31 downto 2) @@ U"00" } when(cacheRsp.isValid && cacheRsp.mmuRefilling && !issueDetected) { issueDetected \\= True redoFetch := True } if(catchIllegalAccess) when(cacheRsp.isValid && cacheRsp.mmuException && !issueDetected) { issueDetected \\= True decodeExceptionPort.valid := iBusRsp.readyForError decodeExceptionPort.code := 12 } when(cacheRsp.isValid && cacheRsp.cacheMiss && !issueDetected) { issueDetected \\= True cache.io.cpu.fill.valid := True redoFetch := True } if(catchAccessFault) when(cacheRsp.isValid && cacheRsp.error && !issueDetected) { issueDetected \\= True decodeExceptionPort.valid := iBusRsp.readyForError decodeExceptionPort.code := 1 } when(redoFetch) { iBusRsp.redoFetch := True } cacheRspArbitration.halt setWhen (issueDetected || iBusRspOutputHalt) iBusRsp.output.valid := cacheRspArbitration.output.valid cacheRspArbitration.output.ready := iBusRsp.output.ready iBusRsp.output.rsp.inst := cacheRsp.data iBusRsp.output.pc := cacheRspArbitration.output.payload } if (mmuBus != null) { cache.io.cpu.fetch.mmuRsp <> mmuBus.rsp } else { cache.io.cpu.fetch.mmuRsp.physicalAddress := cache.io.cpu.fetch.pc cache.io.cpu.fetch.mmuRsp.allowExecute := True cache.io.cpu.fetch.mmuRsp.allowRead := True cache.io.cpu.fetch.mmuRsp.allowWrite := True cache.io.cpu.fetch.mmuRsp.isIoAccess := False cache.io.cpu.fetch.mmuRsp.exception := False cache.io.cpu.fetch.mmuRsp.refilling := False } val flushStage = decode cache.io.flush := flushStage.arbitration.isValid && flushStage.input(FLUSH_ALL) } } }
SpinalHDL/VexRiscv
src/main/scala/vexriscv/plugin/IBusCachedPlugin.scala
Scala
mit
10,881
/* * Copyright 2011-2022 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.jms.check import java.util.{ HashMap => JHashMap } import javax.jms._ import io.gatling.BaseSpec import io.gatling.commons.validation._ import io.gatling.core.EmptySession import io.gatling.jms._ class JmsSimpleCheckSpec extends BaseSpec with JmsDsl with MockMessage with EmptySession { private val check = simpleCheck { case tm: TextMessage => tm.getText == "OK" case _ => false } "simple check" should "return success if condition is true" in { check.check(textMessage("OK"), emptySession, new JHashMap[Any, Any]) shouldBe a[Success[_]] } it should "return failure if condition is false" in { check.check(textMessage("KO"), emptySession, new JHashMap[Any, Any]) shouldBe a[Failure] } it should "return failure if message is not TextMessage" in { check.check(message, emptySession, new JHashMap[Any, Any]) shouldBe a[Failure] } }
gatling/gatling
gatling-jms/src/test/scala/io/gatling/jms/check/JmsSimpleCheckSpec.scala
Scala
apache-2.0
1,526
package de.tobbra.syringe import scala.collection.mutable import scala.reflect.runtime.universe._ import de.tobbra.syringe.bindings._ import de.tobbra.syringe.modules._ import de.tobbra.syringe._ abstract class DefaultFactory[C <: Context](val modules: Module[C]*) extends Factory { val context: C private val bindings = mutable.Map[Type, (Binding[C], String)]() for { m <- modules b <- m.bindings t = b.providedType.map(_.normalize) } yield if (bindings.contains(t)) { val other = bindings.get(t).get val otherName = other._2 if (otherName == m.name) throw new Error(s"Duplicate binding for type $t in module ${m.name}") else throw new Error(s"Conflicting binding for type $t in modules $otherName and ${m.name}") } else { bindings.put(t, (b, m.name)) } def provide(t: Type): () => Any = { val candidates = bindings.keys.filter(_ <:< t).toIndexedSeq if (candidates.isEmpty) throw new Error(s"Found no binding for type $t.") val exactMatches = candidates.filter(_ =:= t).toIndexedSeq if (exactMatches.size == 1) return makeProvider(t, bindings.get(exactMatches(0)).get._1) if (candidates.size > 1) { def fmtBinding(t: Type, v: (Binding[C], String)) = s"$t -> ${v._1.providedType.map(_.normalize)} in '${v._2}'" val candidateList = candidates.map(c => fmtBinding(c, bindings.get(c).get)) throw new Error(s"Ambigous binding for $t, candidates are: $candidateList.") } makeProvider(t, bindings.get(candidates(0)).get._1) } private def makeProvider(t: Type, b: Binding[C]): () => Any = { val scope = b.scope(context) () => scope.get(t, b.provider(context)) } }
TobBrandt/syringe
source/scala/de.tobbra.syringe/src/de/tobbra/syringe/DefaultFactory.scala
Scala
bsd-2-clause
1,826
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming.kafka010 import java.{ util => ju } import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord, KafkaConsumer } import org.apache.kafka.common.{ KafkaException, TopicPartition } import org.apache.spark.internal.Logging /** * Consumer of single topicpartition, intended for cached reuse. * Underlying consumer is not threadsafe, so neither is this, * but processing the same topicpartition and group id in multiple threads is usually bad anyway. */ private[kafka010] class CachedKafkaConsumer[K, V] private( val groupId: String, val topic: String, val partition: Int, val kafkaParams: ju.Map[String, Object]) extends Logging { require(groupId == kafkaParams.get(ConsumerConfig.GROUP_ID_CONFIG), "groupId used for cache key must match the groupId in kafkaParams") val topicPartition = new TopicPartition(topic, partition) protected val consumer = { val c = new KafkaConsumer[K, V](kafkaParams) val tps = new ju.ArrayList[TopicPartition]() tps.add(topicPartition) c.assign(tps) c } // TODO if the buffer was kept around as a random-access structure, // could possibly optimize re-calculating of an RDD in the same batch protected var buffer = ju.Collections.emptyListIterator[ConsumerRecord[K, V]]() protected var nextOffset = -2L def close(): Unit = consumer.close() /** * Get the record for the given offset, waiting up to timeout ms if IO is necessary. * Sequential forward access will use buffers, but random access will be horribly inefficient. */ def get(offset: Long, timeout: Long): ConsumerRecord[K, V] = { logDebug(s"Get $groupId $topic $partition nextOffset $nextOffset requested $offset") if (offset != nextOffset) { logInfo(s"Initial fetch for $groupId $topic $partition $offset") seek(offset) poll(timeout) } if (!buffer.hasNext()) { poll(timeout) } require(buffer.hasNext(), s"Failed to get records for $groupId $topic $partition $offset after polling for $timeout") var record = buffer.next() if (record.offset != offset) { logInfo(s"Buffer miss for $groupId $topic $partition $offset") seek(offset) poll(timeout) require(buffer.hasNext(), s"Failed to get records for $groupId $topic $partition $offset after polling for $timeout") record = buffer.next() require(record.offset == offset, s"Got wrong record for $groupId $topic $partition even after seeking to offset $offset " + s"got offset ${record.offset} instead. If this is a compacted topic, consider enabling " + "spark.streaming.kafka.allowNonConsecutiveOffsets" ) } nextOffset = offset + 1 record } /** * Start a batch on a compacted topic */ def compactedStart(offset: Long, timeout: Long): Unit = { logDebug(s"compacted start $groupId $topic $partition starting $offset") // This seek may not be necessary, but it's hard to tell due to gaps in compacted topics if (offset != nextOffset) { logInfo(s"Initial fetch for compacted $groupId $topic $partition $offset") seek(offset) poll(timeout) } } /** * Get the next record in the batch from a compacted topic. * Assumes compactedStart has been called first, and ignores gaps. */ def compactedNext(timeout: Long): ConsumerRecord[K, V] = { if (!buffer.hasNext()) { poll(timeout) } require(buffer.hasNext(), s"Failed to get records for compacted $groupId $topic $partition after polling for $timeout") val record = buffer.next() nextOffset = record.offset + 1 record } /** * Rewind to previous record in the batch from a compacted topic. * @throws NoSuchElementException if no previous element */ def compactedPrevious(): ConsumerRecord[K, V] = { buffer.previous() } private def seek(offset: Long): Unit = { logDebug(s"Seeking to $topicPartition $offset") consumer.seek(topicPartition, offset) } private def poll(timeout: Long): Unit = { val p = consumer.poll(timeout) val r = p.records(topicPartition) logDebug(s"Polled ${p.partitions()} ${r.size}") buffer = r.listIterator } } private[kafka010] object CachedKafkaConsumer extends Logging { private case class CacheKey(groupId: String, topic: String, partition: Int) // Don't want to depend on guava, don't want a cleanup thread, use a simple LinkedHashMap private var cache: ju.LinkedHashMap[CacheKey, CachedKafkaConsumer[_, _]] = null /** Must be called before get, once per JVM, to configure the cache. Further calls are ignored */ def init( initialCapacity: Int, maxCapacity: Int, loadFactor: Float): Unit = CachedKafkaConsumer.synchronized { if (null == cache) { logInfo(s"Initializing cache $initialCapacity $maxCapacity $loadFactor") cache = new ju.LinkedHashMap[CacheKey, CachedKafkaConsumer[_, _]]( initialCapacity, loadFactor, true) { override def removeEldestEntry( entry: ju.Map.Entry[CacheKey, CachedKafkaConsumer[_, _]]): Boolean = { if (this.size > maxCapacity) { try { entry.getValue.consumer.close() } catch { case x: KafkaException => logError("Error closing oldest Kafka consumer", x) } true } else { false } } } } } /** * Get a cached consumer for groupId, assigned to topic and partition. * If matching consumer doesn't already exist, will be created using kafkaParams. */ def get[K, V]( groupId: String, topic: String, partition: Int, kafkaParams: ju.Map[String, Object]): CachedKafkaConsumer[K, V] = CachedKafkaConsumer.synchronized { val k = CacheKey(groupId, topic, partition) val v = cache.get(k) if (null == v) { logInfo(s"Cache miss for $k") logDebug(cache.keySet.toString) val c = new CachedKafkaConsumer[K, V](groupId, topic, partition, kafkaParams) cache.put(k, c) c } else { // any given topicpartition should have a consistent key and value type v.asInstanceOf[CachedKafkaConsumer[K, V]] } } /** * Get a fresh new instance, unassociated with the global cache. * Caller is responsible for closing */ def getUncached[K, V]( groupId: String, topic: String, partition: Int, kafkaParams: ju.Map[String, Object]): CachedKafkaConsumer[K, V] = new CachedKafkaConsumer[K, V](groupId, topic, partition, kafkaParams) /** remove consumer for given groupId, topic, and partition, if it exists */ def remove(groupId: String, topic: String, partition: Int): Unit = { val k = CacheKey(groupId, topic, partition) logInfo(s"Removing $k from cache") val v = CachedKafkaConsumer.synchronized { cache.remove(k) } if (null != v) { v.close() logInfo(s"Removed $k from cache") } } }
brad-kaiser/spark
external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/CachedKafkaConsumer.scala
Scala
apache-2.0
7,844
/* * Copyright 2017-2022 John Snow Labs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.johnsnowlabs.nlp import org.apache.spark.ml.PipelineStage import org.apache.spark.ml.param.BooleanParam trait CanBeLazy { this: PipelineStage => val lazyAnnotator: BooleanParam = new BooleanParam(this, "lazyAnnotator", "Whether this AnnotatorModel acts as lazy in RecursivePipelines") def setLazyAnnotator(value: Boolean): this.type = set(lazyAnnotator, value) def getLazyAnnotator: Boolean = $(lazyAnnotator) setDefault(lazyAnnotator, false) }
JohnSnowLabs/spark-nlp
src/main/scala/com/johnsnowlabs/nlp/CanBeLazy.scala
Scala
apache-2.0
1,077
package entitytled.test.hooks.model import entitytled.Entitytled import scala.concurrent.ExecutionContext trait PersonComponent { self: Entitytled with PersonHooksCounterComponent => import driver.api._ case class Person(id: Option[Long], name: String) extends Entity[Person, Long] object Person extends EntityCompanion[Persons, Person, Long] { override protected def afterInsert(id: Long, instance: Person) (implicit ec: ExecutionContext) : DBIO[Unit] = for { counter <- TableQuery[PersonHooksCounters].filter(_.personID === id).result.headOption _ <- counter match { case Some(c) => TableQuery[PersonHooksCounters].filter(_.personID === c.personID) .update(c.copy(afterInsertCount = c.afterInsertCount + 1)) case None => TableQuery[PersonHooksCounters] += PersonHooksCounter(id, afterInsertCount = 1) } } yield () override protected def beforeUpdate(instance: Person) (implicit ec: ExecutionContext) : DBIO[Person] = for { counter <- TableQuery[PersonHooksCounters].filter(_.personID === instance.id).result.headOption _ <- counter match { case Some(c) => TableQuery[PersonHooksCounters].filter(_.personID === c.personID) .update(c.copy(beforeUpdateCount = c.beforeUpdateCount + 1)) case None => TableQuery[PersonHooksCounters] += PersonHooksCounter(instance.id.get, beforeUpdateCount = 1) } } yield instance override protected def afterUpdate(id: Long, instance: Person) (implicit ec: ExecutionContext) : DBIO[Unit] = for { counter <- TableQuery[PersonHooksCounters].filter(_.personID === id).result.headOption _ <- counter match { case Some(c) => TableQuery[PersonHooksCounters].filter(_.personID === c.personID) .update(c.copy(afterUpdateCount = c.afterUpdateCount + 1)) case None => TableQuery[PersonHooksCounters] += PersonHooksCounter(id, afterUpdateCount = 1) } } yield () override protected def afterSave(id: Long, instance: Person) (implicit ec: ExecutionContext) : DBIO[Unit] = for { counter <- TableQuery[PersonHooksCounters].filter(_.personID === id).result.headOption _ <- counter match { case Some(c) => TableQuery[PersonHooksCounters].filter(_.personID === c.personID) .update(c.copy(afterSaveCount = c.afterSaveCount + 1)) case None => TableQuery[PersonHooksCounters] += PersonHooksCounter(id, afterSaveCount = 1) } } yield () override protected def beforeDelete(id: Long)(implicit ec: ExecutionContext): DBIO[Unit] = for { counter <- TableQuery[PersonHooksCounters].filter(_.personID === id).result.headOption _ <- counter match { case Some(c) => TableQuery[PersonHooksCounters].filter(_.personID === c.personID) .update(c.copy(beforeDeleteCount = c.beforeDeleteCount + 1)) case None => TableQuery[PersonHooksCounters] += PersonHooksCounter(id, beforeDeleteCount = 1) } } yield () override protected def afterDelete(id: Long)(implicit ec: ExecutionContext): DBIO[Unit] = for { counter <- TableQuery[PersonHooksCounters].filter(_.personID === id).result.headOption _ <- counter match { case Some(c) => TableQuery[PersonHooksCounters].filter(_.personID === c.personID) .update(c.copy(afterDeleteCount = c.afterDeleteCount + 1)) case None => TableQuery[PersonHooksCounters] += PersonHooksCounter(id, afterDeleteCount = 1) } } yield () } class Persons(tag: Tag) extends EntityTable[Person, Long](tag, "PERSONS") { def id = column[Long]("id", O.PrimaryKey, O.AutoInc) def name = column[String]("name") def * = (id.?, name) <> ((Person.apply _).tupled, Person.unapply) } }
RSSchermer/entitytled
test/src/test/scala/entitytled/test/hooks/model/PersonComponent.scala
Scala
mit
4,036
package org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.simulacrum import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScSimpleTypeElement, ScParameterizedTypeElement} import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScAssignStmt, ScAnnotation} import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject, ScTypeDefinition} import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.SyntheticMembersInjector import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.SyntheticMembersInjector.Kind import org.jetbrains.plugins.scala.lang.psi.types._ import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypingContext} /** * @author Alefas * @since 17/09/15 */ class SimulacrumInjection extends SyntheticMembersInjector { override def needsCompanionObject(source: ScTypeDefinition): Boolean = { source.findAnnotation("simulacrum.typeclass") != null && source.typeParameters.length == 1 } override def injectFunctions(source: ScTypeDefinition): Seq[String] = { source match { case obj: ScObject => obj.fakeCompanionClassOrCompanionClass match { case clazz: ScTypeDefinition if clazz.findAnnotation("simulacrum.typeclass") != null && clazz.typeParameters.length == 1 => val tpName = clazz.typeParameters.head.name Seq(s"def apply[$tpName](implicit instance: ${clazz.name}[$tpName]): ${clazz.name}[$tpName] = instance") case _ => Seq.empty } case _ => Seq.empty } } override def injectInners(source: ScTypeDefinition): Seq[String] = { source match { case obj: ScObject => ScalaPsiUtil.getCompanionModule(obj) match { case Some(clazz) if clazz.findAnnotation("simulacrum.typeclass") != null && clazz.typeParameters.length == 1 => val tpName = clazz.typeParameters.head.name val clazzTpt = new ScTypeParameterType(clazz.typeParameters.head, ScSubstitutor.empty) val ops = clazz.functions.flatMap { case f: ScFunction => if (f.parameters.headOption.flatMap(_.getType(TypingContext.empty).toOption). exists(_.equiv(clazzTpt))) { val annotation = f.findAnnotation("simulacrum.op") val names = annotation match { case a: ScAnnotation => a.constructor.args match { case Some(args) => args.exprs.headOption match { case Some(l: ScLiteral) if l.isString => l.getValue match { case value: String => args.exprs match { case Seq(_, second) => second match { case l: ScLiteral if l.getValue == true => Seq(value, f.name) case a: ScAssignStmt => a.getRExpression match { case Some(l: ScLiteral) if l.getValue == true => Seq(value, f.name) case _ => Seq(value) } case _ => Seq(value) } case _ => Seq(value) } case _ => Seq(f.name) } case _ => Seq(f.name) } case None => Seq(f.name) } case _ => Seq(f.name) } names.map { case name => val typeParamClasue = f.typeParametersClause.map(_.getText).getOrElse("") val restHeadClause = f.paramClauses.clauses.head.parameters.tail.map(_.getText).mkString(", ") val restClauses = f.paramClauses.clauses.tail.map(_.getText).mkString("") s"def $name$typeParamClasue($restHeadClause)$restClauses: $tpName = ???" } } else Seq.empty }.mkString("\n ") val className = clazz.name val OpsTrait = s"""trait Ops[$tpName] { | def typeClassInstance: $className[$tpName] | def self: $tpName | $ops |}""".stripMargin val ToOpsTrait = s"""trait To${className}Ops { | implicit def to${className}Ops[$tpName](target: $tpName)(implicit tc: $className[$tpName]): $className.Ops[$tpName] = ??? |} """.stripMargin val AllOpsSupers = clazz.extendsBlock.templateParents.toSeq.flatMap(parents => parents.typeElements.flatMap { case te => te.getType(TypingContext.empty) match { case Success(ScParameterizedType(classType, Seq(tp)), _) if tp.equiv(clazzTpt) => def fromType: Seq[String] = { ScType.extractClass(classType, Some(clazz.getProject)) match { case Some(cl: ScTypeDefinition) => Seq(s" with ${cl.qualifiedName}.AllOps[$tpName]") case _ => Seq.empty } } //in most cases we have to resolve exactly the same reference //but with .AllOps it will go into companion object (for { ScParameterizedTypeElement(pte, _) <- Option(te) ScSimpleTypeElement(Some(ref)) <- Option(pte) } yield Seq(s" with ${ref.getText}.AllOps[$tpName]")).getOrElse(fromType) case _ => Seq.empty } }).mkString val AllOpsTrait = s"""trait AllOps[$tpName] extends $className.Ops[$tpName]$AllOpsSupers { | def typClassInstance: $className[$tpName] |} """.stripMargin val opsObject = s"""object ops { | implicit def toAll${className}Ops[$tpName](target: $tpName)(implicit tc: $className[$tpName]): $className.AllOps[$tpName] = ??? |} """.stripMargin Seq(OpsTrait, ToOpsTrait, AllOpsTrait, opsObject) case _ => Seq.empty } case _ => Seq.empty } } }
SergeevPavel/intellij-scala
src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/simulacrum/SimulacrumInjection.scala
Scala
apache-2.0
6,958
package im.tox.antox.activities import java.util import android.content.{Context, SharedPreferences} import android.os.Bundle import android.preference.PreferenceManager import android.support.v7.app.{ActionBar, AppCompatActivity} import android.text.InputFilter.LengthFilter import android.text.{Editable, InputFilter, TextWatcher} import android.util.Log import android.view.{Menu, MenuInflater, View} import android.widget._ import im.tox.antox.adapters.ChatMessagesAdapter import im.tox.antox.data.AntoxDB import im.tox.antox.tox.{Reactive, ToxSingleton} import im.tox.antox.utils.Constants import im.tox.antox.wrapper.{ToxKey, Message} import im.tox.antoxnightly.R import rx.lang.scala.schedulers.{AndroidMainThreadScheduler, IOScheduler} import rx.lang.scala.{Observable, Subscription} import scala.collection.JavaConversions._ import scala.concurrent.duration._ abstract class GenericChatActivity extends AppCompatActivity { val TAG: String = "im.tox.antox.activities.ChatActivity" //var ARG_CONTACT_NUMBER: String = "contact_number" var adapter: ChatMessagesAdapter = null var messageBox: EditText = null var isTypingBox: TextView = null var statusTextBox: TextView = null var chatListView: ListView = null var displayNameView: TextView = null var statusIconView: View = null var avatarActionView: View = null var messagesSub: Subscription = null var progressSub: Subscription = null var titleSub: Subscription = null var activeKey: ToxKey = null var scrolling: Boolean = false val MESSAGE_LENGTH_LIMIT = Constants.MAX_MESSAGE_LENGTH * 50 override def onCreate(savedInstanceState: Bundle) = { super.onCreate(savedInstanceState) overridePendingTransition(R.anim.slide_from_right, R.anim.fade_scale_out) setContentView(R.layout.activity_chat) val actionBar = getSupportActionBar val avatarView = getLayoutInflater.inflate(R.layout.avatar_actionview, null) actionBar.setCustomView(avatarView) actionBar.setDisplayOptions(ActionBar.DISPLAY_SHOW_CUSTOM) val extras: Bundle = getIntent.getExtras activeKey = new ToxKey(extras.getString("key")) val thisActivity = this Log.d(TAG, "key = " + activeKey) val preferences = PreferenceManager.getDefaultSharedPreferences(this) val antoxDB = new AntoxDB(this) adapter = new ChatMessagesAdapter(this, getMessageList, antoxDB.getMessageIds(Some(activeKey), preferences.getBoolean("action_messages", false))) displayNameView = this.findViewById(R.id.displayName).asInstanceOf[TextView] statusIconView = this.findViewById(R.id.icon) avatarActionView = this.findViewById(R.id.avatarActionView) avatarActionView.setOnClickListener(new View.OnClickListener() { override def onClick(v: View) { thisActivity.finish() } }) chatListView = this.findViewById(R.id.chatMessages).asInstanceOf[ListView] chatListView.setTranscriptMode(AbsListView.TRANSCRIPT_MODE_NORMAL) chatListView.setStackFromBottom(true) chatListView.setAdapter(adapter) chatListView.setOnScrollListener(new AbsListView.OnScrollListener() { override def onScrollStateChanged(view: AbsListView, scrollState: Int) { scrolling = !(scrollState == AbsListView.OnScrollListener.SCROLL_STATE_IDLE) } override def onScroll(view: AbsListView, firstVisibleItem: Int, visibleItemCount: Int, totalItemCount: Int) { } }) val b = this.findViewById(R.id.sendMessageButton) b.setOnClickListener(new View.OnClickListener() { override def onClick(v: View) { onSendMessage() setTyping(typing = false) } }) messageBox = this.findViewById(R.id.yourMessage).asInstanceOf[EditText] messageBox.setFilters(Array[InputFilter](new LengthFilter(MESSAGE_LENGTH_LIMIT))) messageBox.addTextChangedListener(new TextWatcher() { override def beforeTextChanged(charSequence: CharSequence, start: Int, count: Int, after: Int) { val isTyping = after > 0 setTyping(isTyping) } override def onTextChanged(charSequence: CharSequence, start: Int, count: Int, after: Int) { } override def afterTextChanged(editable: Editable) { } }) } override def onCreateOptionsMenu(menu: Menu): Boolean = { // Inflate the menu items for use in the action bar val inflater: MenuInflater = getMenuInflater inflater.inflate(R.menu.chat_activity, menu) super.onCreateOptionsMenu(menu) } def setDisplayName(name: String) = { this.displayNameView.setText(name) } override def onResume() = { super.onResume() Reactive.activeKey.onNext(Some(activeKey)) Reactive.chatActive.onNext(true) val antoxDB = new AntoxDB(getApplicationContext) antoxDB.markIncomingMessagesRead(activeKey) ToxSingleton.updateMessages(getApplicationContext) messagesSub = Reactive.updatedMessages.subscribe(x => { Log.d(TAG, "Messages updated") updateChat() antoxDB.close() }) progressSub = Observable.interval(500 milliseconds) .observeOn(AndroidMainThreadScheduler()) .subscribe(x => { if (!scrolling) { updateProgress() } }) } def updateChat() = { val observable: Observable[util.ArrayList[Message]] = Observable((observer) => { val cursor: util.ArrayList[Message] = getMessageList observer.onNext(cursor) observer.onCompleted() }) observable .subscribeOn(IOScheduler()) .observeOn(AndroidMainThreadScheduler()) .subscribe((messageList: util.ArrayList[Message]) => { //FIXME make this more efficient adapter.setNotifyOnChange(false) adapter.clear() //add all is not available on api 10 for (message <- messageList) { adapter.add(message) } adapter.notifyDataSetChanged() Log.d(TAG, "changing chat list cursor") }) Log.d("ChatFragment", "new key: " + activeKey) } private def updateProgress() { val start = chatListView.getFirstVisiblePosition val end = chatListView.getLastVisiblePosition for (i <- start to end) { val view = chatListView.getChildAt(i - start) chatListView.getAdapter.getView(i, view, chatListView) } } def validateMessageBox(): Option[String] = { if (messageBox.getText != null && messageBox.getText.toString.length() == 0) { return None } var msg: String = null if (messageBox.getText != null) { msg = messageBox.getText.toString } else { msg = "" } Some(msg) } private def onSendMessage() { Log.d(TAG, "sendMessage") val mMessage = validateMessageBox() mMessage.foreach(rawMessage => { messageBox.setText("") val meMessagePrefix = "/me " val isAction = rawMessage.startsWith(meMessagePrefix) val message = if (isAction) { rawMessage.replaceFirst(meMessagePrefix, "") } else { rawMessage } sendMessage(message, isAction, this) }) } def getMessageList: util.ArrayList[Message] = { val antoxDB = new AntoxDB(this) val preferences: SharedPreferences = PreferenceManager.getDefaultSharedPreferences(this) val messageList: util.ArrayList[Message] = antoxDB.getMessageList(Some(activeKey), preferences.getBoolean("action_messages", true)) messageList } override def onPause() = { super.onPause() Reactive.chatActive.onNext(false) if (isFinishing) overridePendingTransition(R.anim.fade_scale_in, R.anim.slide_to_right) messagesSub.unsubscribe() progressSub.unsubscribe() } //Abstract Methods def sendMessage(message: String, isAction: Boolean, context: Context): Unit def setTyping(typing: Boolean): Unit }
mGhassen/Antox
app/src/main/scala/im/tox/antox/activities/GenericChatActivity.scala
Scala
gpl-3.0
7,719
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.abondar.scalabasic class Rational(n: Int, d: Int) { require(d!=0) private val g = gcd(n.abs,d.abs) val numer: Int= n /g val denom: Int = d/g def this(n: Int) = this(n,1) //auxiliary constructor override def toString = numer + "/" + denom def +(that:Rational): Rational = new Rational( numer*that.denom + that.numer*denom, denom*that.denom ) def +(i:Int): Rational = new Rational( numer + i * denom,denom) def -(that:Rational): Rational = new Rational( numer*that.denom - that.numer*denom, denom*that.denom ) def -(i:Int): Rational = new Rational( numer - i * denom,denom) def *(that:Rational): Rational = new Rational(numer*that.denom , that.numer*denom) def *(i:Int): Rational = new Rational( numer * i ,denom)//can be a conflict with default * int def /(that:Rational): Rational = new Rational( numer * that.denom ,denom*that.numer) def /(i:Int): Rational = new Rational( numer ,denom*i) def lessThan(that:Rational) = this.numer * that.denom < that.numer * this.denom def max(that:Rational) = if (this.lessThan(that)) that else this private def gcd(a: Int, b: Int): Int = if (b==0) a else gcd(b, a%b) }
Dr762/ScalaBase
src/main/scala/org/abondar/scalabasic/Rational.scala
Scala
apache-2.0
1,436
/* * Copyright 2014 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package examples import java.io.File import com.ibm.spark.kernel.protocol.v5.MIMEType import com.ibm.spark.kernel.protocol.v5.client.boot.ClientBootstrap import com.ibm.spark.kernel.protocol.v5.client.boot.layers.{StandardHandlerInitialization, StandardSystemInitialization} import com.ibm.spark.kernel.protocol.v5.content.{ExecuteResult} import com.ibm.spark.kernel.protocol.v5.content._ import com.typesafe.config.{ConfigFactory, Config} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Promise /** * This App demonstrates how to use the spark client in scala. * Use this class as a playground. */ object ScalaSparkClientUsage extends App { val profile: File = new File(getClass.getResource("/kernel-profiles/IOPubIntegrationProfile.json").toURI) val config: Config = ConfigFactory.parseFile(profile) // Setup val client = (new ClientBootstrap(config) with StandardSystemInitialization with StandardHandlerInitialization).createClient() def printStreamContent(content:StreamContent) = { println(s"Stream content on channel ${content.name} was: ${content.text}") } def printTextResult(result:ExecuteResult) = { println(s"ExecuteResult data was: ${result.data.get(MIMEType.PlainText).get}") } def printError(reply:ExecuteReplyError) = { println(s"ExecuteReply error name was: ${reply.ename.get}") } // A callback used to determine if the kernel is no longer responding client.heartbeat(() => { println("hb bad") }) // Assign a val client.execute("val z = 0") // Print the val out to stdout client.execute("println(z)").onStream(printStreamContent) // Stream content on channel stdout was: 0 // Sleep so output does not overlap Thread.sleep(500) // Non streaming message with result client.execute("1+1").onResult(printTextResult) // ExecuteResult data was: res(\\d)+: Int = 2 // Sleep so output does not overlap Thread.sleep(500) // Non streaming message with error client.execute("1/0") .onResult(printTextResult) .onError(printError) // ExecuteReply error name was: java.lang.ArithmeticException // Sleep so output does not overlap Thread.sleep(500) // Stream message and result client.execute("println(\\"Hello World\\"); 2 + 2") .onStream(printStreamContent) .onResult(printTextResult) // Stream content on channel stdout was: "Hello World" // ExecuteResult data was: res(\\d)+: Int = 4 // Sleep so output does not overlap Thread.sleep(500) // Stream message with error client.execute("println(1/1); println(1/2); println(1/0);") .onStream(printStreamContent) .onError(printError) // Stream content on channel stdout was: 1 // Stream content on channel stdout was: 0 // ExecuteReply error name was: java.lang.ArithmeticException // Sleep so output does not overlap Thread.sleep(500) client.execute("val foo = 1+1; 2+2;") .onResult(printTextResult) .onStream(printStreamContent) // ExecuteResult data was: foo: Int = 2 // res278: Int = 4 // Sleep so output does not overlap Thread.sleep(500) // Simulates calculating two numbers in the cluster and adding them client side def complexMath(x: Int, y: Int) = (x + 2) * y val xPromise: Promise[Int] = Promise() val yPromise: Promise[Int] = Promise() def parseResult(result: String): Int = { val intPattern = """res(\\d)+: Int = (\\d)+""".r intPattern findFirstIn result match { case Some(intPattern(resCount, res)) => Integer.parseInt(res) case None => 0 } } // Big data function 1 client.execute("1+1").onResult((executeResult: ExecuteResult) => { val result: Int = parseResult(executeResult.data(MIMEType.PlainText)) xPromise.success(result) }) // Big data function 2 client.execute("3+3").onResult((executeResult: ExecuteResult) => { val result: Int = parseResult(executeResult.data(MIMEType.PlainText)) yPromise.success(result) }) val resultPromise = for { x <- xPromise.future y <- yPromise.future } yield (complexMath(x, y)) resultPromise.onSuccess {case x => println(s"Added result is ${x}") } // Added result is 24 // Sleep so output does not overlap Thread.sleep(500) // Running code on another thread val threadCode: String = """ val s = new Thread(new Runnable { def run() { var x = 0 while(x < 3) { Thread.sleep(1000) println("bean") x = x + 1 } } }) s.start() """.stripMargin client.execute(threadCode).onStream(printStreamContent) // Stream content on channel stdout was: bean // Stream content on channel stdout was: bean // Stream content on channel stdout was: bean // Sleep so output does not overlap Thread.sleep(500) }
yeghishe/spark-kernel
client/src/test/scala/examples/ScalaSparkClientUsage.scala
Scala
apache-2.0
5,466
/* * Copyright 2016 Actian Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.actian.spark_vector.colbuffer.util import java.sql.Timestamp import java.sql.Date import java.util.Calendar /** Helper functions and constants for `Time` conversions. */ object TimeConversion { @inline private final def timeInNanos(source: Timestamp): Long = TimestampConversion.timestampSeconds(source) * PowersOfTen(NanosecondsScale) + source.getNanos final def normalizeTime(source: Timestamp): Long = normalizeNanos(timeInNanos(source)) private final def normalizeNanos(nanos: Long): Long = { val remainder = Math.abs(nanos % NanosecondsInDay) if (remainder >= 0) { remainder } else { remainder + NanosecondsInDay } } @inline final def scaleNanos(unscaledNanos: Long, scale: Int): Long = unscaledNanos / PowersOfTen(NanosecondsScale - scale) @inline final def unscaleNanos(scaledNanos: Long, scale: Int): Long = scaledNanos * PowersOfTen(NanosecondsScale - scale) @inline private final def convertLocalDateHelper(date: Date, sign: Int = 1, cal: Calendar): Unit = { cal.setTime(date) date.setTime(date.getTime + sign * cal.get(Calendar.ZONE_OFFSET) + sign * cal.get(Calendar.DST_OFFSET)) } final def convertLocalDateToUTC(date: Date, cal: Calendar): Unit = convertLocalDateHelper(date, 1, cal) final def convertUTCToLocalDate(date: Date, cal: Calendar): Unit = convertLocalDateHelper(date, -1, cal) @inline private final def convertLocalTimeHelper(time: Timestamp, sign: Int = 1, cal: Calendar): Unit = { val nanos = time.getNanos cal.setTimeInMillis(time.getTime) time.setTime(time.getTime + sign * cal.get(Calendar.ZONE_OFFSET) + sign * cal.get(Calendar.DST_OFFSET)) time.setNanos(nanos) } final def convertLocalTimestampToUTC(time: Timestamp, cal: Calendar): Unit = convertLocalTimeHelper(time, 1, cal) final def convertUTCToLocalTimestamp(time: Timestamp, cal: Calendar): Unit = convertLocalTimeHelper(time, -1, cal) /** * This trait should be used when implementing a type of time conversion, * for example a timezone converter using the upper helper functions. */ trait TimeConverter { def convert(nanos: Long, scale: Int): Long def deconvert(source: Long, scale: Int): Long } }
ActianCorp/spark-vector
src/main/scala/com/actian/spark_vector/colbuffer/util/TimeConversion.scala
Scala
apache-2.0
2,820
/* * The MIT License (MIT) * * Copyright (c) 2016 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package algolia.dsl import algolia.definitions.LogsDefinition import algolia.responses.Logs import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait LogsDsl { implicit val formats: Formats def getLogs = LogsDefinition() @deprecated("use getLogs", "1.27.1") def logs() = LogsDefinition() implicit object LogsDefinitionExecutable extends Executable[LogsDefinition, Logs] { override def apply(client: AlgoliaClient, query: LogsDefinition)( implicit executor: ExecutionContext ): Future[Logs] = { client.request[Logs](query.build()) } } }
algolia/algoliasearch-client-scala
src/main/scala/algolia/dsl/LogsDsl.scala
Scala
mit
1,824
/* * Copyright (C) 2015 Noorq, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.reflect.runtime.universe import com.mailrest.maildal.repository.DomainOwnerRepository import com.mailrest.maildal.repository.DomainRef import com.mailrest.maildal.repository.DomainRepository import com.typesafe.scalalogging.slf4j.LazyLogging import scaldi.Injectable import scaldi.Injector trait DomainService { def lookupDomain(domain: String): Future[Option[DomainContext]] } class DomainServiceImpl(implicit inj: Injector, xc: ExecutionContext = ExecutionContext.global) extends DomainService with Injectable with LazyLogging { val domainRepository = inject [DomainRepository] val domainOwnerRepository = inject [DomainOwnerRepository] def lookupDomain(id: DomainId): Future[Option[DomainContext]] = { domainRepository.findApiKey(id).map(x => x.map(y => new DomainContext(id, y._1))) } def lookupDomain(domainId: String): Future[Option[DomainContext]] = { domainOwnerRepository.findOwner(domainId).flatMap { x=> { x match { case Some(y) => { val accountId = y._1 val id = new DomainId(accountId, domainId) lookupDomain(id) } case None => Future.successful(None) } } } } } case class DomainId(accountId: String, domainId: String) extends DomainRef case class DomainContext(id: DomainId, apiKey: String)
mailrest/mailrest
app/services/DomainService.scala
Scala
apache-2.0
2,095
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openwhisk.core.yarn import akka.actor.{ActorRef, ActorSystem, Props} import akka.http.scaladsl.model.{HttpMethods, StatusCodes} import akka.pattern.ask import akka.util.Timeout import org.apache.openwhisk.common.{Logging, TransactionId} import org.apache.openwhisk.core.containerpool._ import org.apache.openwhisk.core.entity.ExecManifest.ImageName import org.apache.openwhisk.core.entity.{ByteSize, ExecManifest, InvokerInstanceId} import org.apache.openwhisk.core.yarn.YARNComponentActor.CreateContainerAsync import org.apache.openwhisk.core.{ConfigKeys, WhiskConfig} import pureconfig.loadConfigOrThrow import spray.json._ import scala.collection.immutable.HashMap import scala.concurrent.{blocking, ExecutionContext, Future} import scala.concurrent.duration._ import YARNJsonProtocol._ import akka.stream.ActorMaterializer case class YARNConfig(masterUrl: String, yarnLinkLogMessage: Boolean, serviceName: String, authType: String, kerberosPrincipal: String, kerberosKeytab: String, queue: String, memory: String, cpus: Int) object YARNContainerFactoryProvider extends ContainerFactoryProvider { override def instance(actorSystem: ActorSystem, logging: Logging, config: WhiskConfig, instance: InvokerInstanceId, parameters: Map[String, Set[String]]): ContainerFactory = new YARNContainerFactory(actorSystem, logging, config, instance, parameters) } class YARNContainerFactory(actorSystem: ActorSystem, logging: Logging, config: WhiskConfig, instance: InvokerInstanceId, parameters: Map[String, Set[String]], containerArgs: ContainerArgsConfig = loadConfigOrThrow[ContainerArgsConfig](ConfigKeys.containerArgs), yarnConfig: YARNConfig = loadConfigOrThrow[YARNConfig](ConfigKeys.yarn)) extends ContainerFactory { val images: Set[ImageName] = ExecManifest.runtimesManifest.runtimes.flatMap(a => a.versions.map(b => b.image)) //One actor of each type per image for parallelism private var yarnComponentActors: Map[ImageName, ActorRef] = HashMap[ImageName, ActorRef]() private var YARNContainerInfoActors: Map[ImageName, ActorRef] = HashMap[ImageName, ActorRef]() val serviceStartTimeoutMS = 60000 val retryWaitMS = 1000 val runCommand = "" val version = "1.0.0" val description = "OpenWhisk Action Service" //Allows for invoker HA val serviceName: String = yarnConfig.serviceName + "-" + instance.toInt val containerStartTimeoutMS = 60000 implicit val as: ActorSystem = actorSystem implicit val materializer: ActorMaterializer = ActorMaterializer() implicit val ec: ExecutionContext = actorSystem.dispatcher override def init(): Unit = { yarnComponentActors = images .map( i => ( i, actorSystem.actorOf( Props(new YARNComponentActor(actorSystem, logging, yarnConfig, serviceName, i)), name = s"YARNComponentActor-${i.name}"))) .toMap YARNContainerInfoActors = images .map( i => ( i, actorSystem.actorOf( Props(new YARNContainerInfoActor(actorSystem, logging, yarnConfig, serviceName, i)), name = s"YARNComponentInfoActor-${i.name}"))) .toMap blocking { implicit val timeout: Timeout = Timeout(serviceStartTimeoutMS.milliseconds) //Remove service if it already exists val serviceDef = YARNRESTUtil.downloadServiceDefinition(yarnConfig.authType, serviceName, yarnConfig.masterUrl)(logging) if (serviceDef != null) removeService() createService() } } override def createContainer( unusedtid: TransactionId, unusedname: String, actionImage: ExecManifest.ImageName, unuseduserProvidedImage: Boolean, unusedmemory: ByteSize, unusedcpuShares: Int)(implicit config: WhiskConfig, logging: Logging): Future[Container] = { implicit val timeout: Timeout = Timeout(containerStartTimeoutMS.milliseconds) //First send the create command to YARN, then with a different actor, wait for the container to be ready ask(yarnComponentActors(actionImage), CreateContainerAsync).flatMap(_ => ask(YARNContainerInfoActors(actionImage), GetContainerInfo(yarnComponentActors(actionImage))).mapTo[Container]) } override def cleanup(): Unit = { removeService() yarnComponentActors foreach { case (k, v) => actorSystem.stop(v) } YARNContainerInfoActors foreach { case (k, v) => actorSystem.stop(v) } } def createService(): Unit = { logging.info(this, "Creating Service with images: " + images.map(i => i.resolveImageName()).mkString(", ")) val componentList = images .map( i => ComponentDefinition( i.name.replace('.', '-'), //name must be [a-z][a-z0-9-]* Some(0), //start with zero containers Some(runCommand), Option.empty, Some(ArtifactDefinition(i.resolveImageName(), "DOCKER")), Some(ResourceDefinition(yarnConfig.cpus, yarnConfig.memory)), Some(ConfigurationDefinition(Map(("YARN_CONTAINER_RUNTIME_DOCKER_RUN_OVERRIDE_DISABLE", "true")))), List[String]())) .toList //Add kerberos def if necessary var kerberosDef: Option[KerberosPrincipalDefinition] = None if (yarnConfig.authType.equals(YARNRESTUtil.KERBEROSAUTH)) kerberosDef = Some( KerberosPrincipalDefinition(Some(yarnConfig.kerberosPrincipal), Some(yarnConfig.kerberosKeytab))) val service = ServiceDefinition( Some(serviceName), Some(version), Some(description), Some("STABLE"), Some(yarnConfig.queue), componentList, kerberosDef) //Submit service val response = YARNRESTUtil.submitRequestWithAuth( yarnConfig.authType, HttpMethods.POST, s"${yarnConfig.masterUrl}/app/v1/services", service.toJson.compactPrint) //Handle response response match { case httpresponse(StatusCodes.OK, content) => logging.info(this, s"Service submitted. Response: $content") case httpresponse(StatusCodes.Accepted, content) => logging.info(this, s"Service submitted. Response: $content") case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging) } //Wait for service start (up to serviceStartTimeoutMS milliseconds) var started = false var retryCount = 0 val maxRetryCount = serviceStartTimeoutMS / retryWaitMS while (!started && retryCount < maxRetryCount) { val serviceDef = YARNRESTUtil.downloadServiceDefinition(yarnConfig.authType, serviceName, yarnConfig.masterUrl)(logging) if (serviceDef == null) { logging.info(this, "Service not found yet") Thread.sleep(retryWaitMS) } else { serviceDef.state.getOrElse(None) match { case "STABLE" | "STARTED" => logging.info(this, "YARN service achieved stable state") started = true case state => logging.info( this, s"YARN service is not in stable state yet ($retryCount/$maxRetryCount). Current state: $state") Thread.sleep(retryWaitMS) } } retryCount += 1 } if (!started) throw new Exception(s"After ${serviceStartTimeoutMS}ms YARN service did not achieve stable state") } def removeService(): Unit = { val response: httpresponse = YARNRESTUtil.submitRequestWithAuth( yarnConfig.authType, HttpMethods.DELETE, s"${yarnConfig.masterUrl}/app/v1/services/$serviceName", "") response match { case httpresponse(StatusCodes.OK, _) => logging.info(this, "YARN service Removed") case httpresponse(StatusCodes.NotFound, _) => logging.warn(this, "YARN service did not exist") case httpresponse(StatusCodes.BadRequest, _) => logging.warn(this, "YARN service did not exist") case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging) } } }
openwhisk/openwhisk
common/scala/src/main/scala/org/apache/openwhisk/core/yarn/YARNContainerFactory.scala
Scala
apache-2.0
9,269
/* * Copyright 2017 Spotify AB. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.featran.transformers import com.spotify.featran.{FeatureBuilder, FeatureRejection, FlatReader, FlatWriter} import com.twitter.algebird.{Aggregator, Max, Min} /** * Transform features by rescaling each feature to a specific range [`min`, `max`] (default [0, 1]). * * Missing values are transformed to `min`. * * When using aggregated feature summary from a previous session, out of bound values are truncated * to `min` or `max` and [[FeatureRejection.OutOfBound]] rejections are reported. */ object MinMaxScaler extends SettingsBuilder { /** * Create a new [[MinMaxScaler]] instance. * @param min * lower bound after transformation, shared by all features * @param max * upper bound after transformation, shared by all features */ def apply( name: String, min: Double = 0.0, max: Double = 1.0 ): Transformer[Double, (Min[Double], Max[Double]), C] = new MinMaxScaler(name, min, max) /** * Create a new [[MinMaxScaler]] from a settings object * @param setting * Settings object */ def fromSettings(setting: Settings): Transformer[Double, (Min[Double], Max[Double]), C] = { val min = setting.params("min").toDouble val max = setting.params("max").toDouble MinMaxScaler(setting.name, min, max) } private type C = (Double, Double, Double) } private[featran] class MinMaxScaler(name: String, val min: Double, val max: Double) extends OneDimensional[Double, (Min[Double], Max[Double]), MinMaxScaler.C](name) { require(max > min, s"max must be > min") import MinMaxScaler.C override val aggregator: Aggregator[Double, (Min[Double], Max[Double]), C] = Aggregators.from[Double](x => (Min(x), Max(x))).to { r => val (aMin, aMax) = (r._1.get, r._2.get) val f = if ((aMax - aMin).isInfinity) 2.0 else 1.0 // scaling factor to avoid overflow (aMin / f, aMax / f, f) } override def buildFeatures(a: Option[Double], c: C, fb: FeatureBuilder[_]): Unit = a match { case Some(x) => val (aMin, aMax, f) = c val truncated = math.max(math.min(x / f, aMax), aMin) fb.add(name, (truncated - aMin) / (aMax - aMin) * (max - min) + min) if (x < aMin || x > aMax) { fb.reject(this, FeatureRejection.OutOfBound(aMin, aMax, x)) } case None => fb.add(name, min) } override def encodeAggregator(c: C): String = s"${c._1},${c._2},${c._3}" override def decodeAggregator(s: String): C = { val t = s.split(",") (t(0).toDouble, t(1).toDouble, t(2).toDouble) } override def params: Map[String, String] = Map("min" -> min.toString, "max" -> max.toString) override def flatRead[T: FlatReader]: T => Option[Any] = FlatReader[T].readDouble(name) override def flatWriter[T](implicit fw: FlatWriter[T]): Option[Double] => fw.IF = fw.writeDouble(name) }
spotify/featran
core/src/main/scala/com/spotify/featran/transformers/MinMaxScaler.scala
Scala
apache-2.0
3,440
package org.jetbrains.plugins.scala.codeInsight package hints package methodChains import java.awt.{Graphics, Insets, Rectangle} import com.intellij.openapi.Disposable import com.intellij.openapi.editor._ import com.intellij.openapi.editor.ex.EditorEx import com.intellij.openapi.editor.impl.EditorImpl import com.intellij.openapi.editor.markup.TextAttributes import com.intellij.openapi.util.{Disposer, Key} import org.jetbrains.plugins.scala.annotator.hints.Text import org.jetbrains.plugins.scala.codeInsight.hints.methodChains.AlignedInlayGroup._ import org.jetbrains.plugins.scala.codeInsight.implicits.TextPartsHintRenderer import org.jetbrains.plugins.scala.extensions._ private abstract class AlignedHintTemplate(val textParts: Seq[Text]) { def line(document: Document): Int = document.getLineNumber(endOffset) def endOffset: Int } private class AlignedInlayGroup(hints: Seq[AlignedHintTemplate], minMargin: Int = 1, maxMargin: Int = 6) (inlayModel: InlayModel, document: Document, charWidthInPixel: Int) extends Disposable { private val minMarginInPixel = minMargin * charWidthInPixel private val maxMarginInPixel = maxMargin * charWidthInPixel private val alignmentLines: Seq[AlignmentLine] = { val lineToHintMapping = hints.groupBy(_.line(document)).view.mapValues(_.head) val lineHasHint = lineToHintMapping.contains _ val firstLine = 0 max (hints.head.line(document) - 1) val lastLine = document.getLineCount min (hints.last.line(document) + 1) (firstLine to lastLine).flatMap { line => val maybeHint = lineToHintMapping.get(line) val maybeOffset = maybeHint match { case Some(hint) => Some(hint.endOffset) case _ if lineHasHint(line - 1) || lineHasHint(line + 1) => Some(document.getLineEndOffset(line)) case _ => None } maybeOffset.map(new AlignmentLine(_, maybeHint)(document)) } } // unfortunately `AlignedHintsRenderer.getMargin -> recalculateGroupsOffsets` // is called by `inlayModel.addAfterLineEndElement` // so we set it to empty first, so it is not null while the inlays are being build private var inlays: Seq[Inlay[AlignedInlayRenderer]] = Seq.empty locally { inlays = for(line <- alignmentLines; hint <- line.maybeHint) yield { val inlay = inlayModel.addAfterLineEndElement( hint.endOffset, false, new AlignedInlayRenderer(line, hint.textParts, recalculateGroupsOffsets) ) inlay.putUserData(ScalaMethodChainKey, true) inlay } inlays.head.putUserData(ScalaMethodChainDisposableKey, this) } private def recalculateGroupsOffsets(editor: Editor): Unit = { val allEndXs = alignmentLines.map(_.lineEndX(editor)) val actualEndXs = alignmentLines.withFilter(_.hasHint).map(_.lineEndX(editor)) val max = allEndXs.max val avg = actualEndXs.sum / actualEndXs.length var targetMaxX = max + math.max(minMarginInPixel, maxMarginInPixel - (max - avg) / 3) // this makes the group more stable and less flickery targetMaxX -= targetMaxX % charWidthInPixel for (inlay <- inlays) { val renderer = inlay.getRenderer val endX = renderer.line.lineEndX(editor) renderer.setMargin(endX, targetMaxX - endX, inlay, !editor.asOptionOf[EditorEx].exists(_.isPurePaintingMode)) } } override def dispose(): Unit = alignmentLines.foreach(_.dispose()) } private object AlignedInlayGroup { private val ScalaMethodChainDisposableKey: Key[Disposable] = Key.create[Disposable]("SCALA_METHOD_CHAIN_DISPOSABLE_KEY") def dispose(inlay: Inlay[_]): Unit = { inlay .getUserData(ScalaMethodChainDisposableKey) .nullSafe .foreach(Disposer.dispose) } private class AlignmentLine(offset: Int, val maybeHint: Option[AlignedHintTemplate])(document: Document) extends Disposable { private val marker: RangeMarker = document.createRangeMarker(offset, offset) def hasHint: Boolean = maybeHint.isDefined def lineNumber: Int = document.getLineNumber(marker.getEndOffset) def lineEndX(editor: Editor): Int = { val endOffset = marker.getEndOffset if (endOffset < 0 || endOffset >= document.getTextLength) 0 else editor.offsetToXY(document.getLineEndOffset(lineNumber), true, false).x } override def dispose(): Unit = marker.dispose() } private case class Cached(lineEndX: Int, margin: Int) private class AlignedInlayRenderer(val line: AlignmentLine, textParts: Seq[Text], recalculateGroupsOffsets: Editor => Unit) extends TextPartsHintRenderer(textParts, typeHintsMenu) { private var cached: Cached = Cached(lineEndX = 0, margin = 0) def setMargin(lineEndX: Int, margin: Int, inlay: Inlay[_], repaint: Boolean): Unit = { if (cached.margin != margin) { cached = Cached(lineEndX, margin) if (repaint) { inlay.update() } } } override def paint0(editor: Editor, g: Graphics, r: Rectangle, textAttributes: TextAttributes): Unit = { if (cached.lineEndX != line.lineEndX(editor)) { val oldMargin = cached.margin recalculateGroupsOffsets(editor) // after recalculating the offset, r has the wrong width, so we fix that here r.width += cached.margin - oldMargin } var hasSomethingElseInLine = false editor.asOptionOf[EditorImpl].foreach(_.processLineExtensions(line.lineNumber, _ => { hasSomethingElseInLine = true; false} )) if (!hasSomethingElseInLine) { super.paint0(editor, g, r, textAttributes) } } override def getMargin(editor: Editor): Insets = new Insets(0, cached.margin, 0, 0) } }
JetBrains/intellij-scala
scala/codeInsight/src/org/jetbrains/plugins/scala/codeInsight/hints/methodChains/AlignedInlayGroup.scala
Scala
apache-2.0
5,744
/* * _ _ * _ __ ___ | |__ | | ___ * | '_ \\ / _ \\| '_ \\| |/ _ \\ noble :: norcane blog engine * | | | | (_) | |_) | | __/ Copyright (c) 2016-2018 norcane * |_| |_|\\___/|_.__/|_|\\___| * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.norcane.noble.api.models import com.norcane.noble.api.models.dates.{Day, Month, Year} import scala.collection.immutable.SortedMap /** * Represents the single blog. Each blog consists of unique version ID, blog info and collection of * blog posts. Blog version ID is immutable and is used during blog reloading to compare whether * any part of the blog (blog posts, configuration) has changed from actual state. Blog info * represents further details about the blog, such as blog title, used theme, author(s), etc. * Collection of blog posts holds loaded metadata for all blog posts available for this blog. * * @param name unique internal name of the blog * @param versionId unique version ID of the current blog instance * @param info blog info (e.g. title, used theme) * @param blogPosts collection of all blog's posts (as metadata only) * @author Vaclav Svejcar ([email protected]) */ class Blog(val name: String, val versionId: String, val info: BlogInfo, blogPosts: Seq[BlogPostMeta], val pages: Seq[StaticPageMeta]) { /** * Sorted collection of all blog posts (metadata only). */ val posts: Seq[BlogPostMeta] = blogPosts.sorted.reverse /** * Collection of blog posts, sorted by years of publishing. */ val years: Seq[Year] = posts.groupBy(_.date.getYear).map { byYear => val (year, yearPosts) = byYear val months: SortedMap[Int, Month] = SortedMap.empty[Int, Month] ++ yearPosts.groupBy(_.date.getMonthValue).map { byMonth => val (month, monthPosts) = byMonth val days: SortedMap[Int, Day] = SortedMap.empty[Int, Day] ++ monthPosts.groupBy(_.date.getDayOfMonth).map { byDay => val (day, dayPosts) = byDay day -> Day(year, month, day, dayPosts) } month -> Month(year, month, days, monthPosts) } Year(year, months, yearPosts) }.toSeq.sorted val authors: Map[String, Seq[BlogPostMeta]] = posts.groupBy(_.author) /** * Represents the map of tags, where key is the tag name and value collection of blog posts * for the tag. */ val tags: Map[String, Seq[BlogPostMeta]] = posts .flatMap(postMeta => postMeta.tags map (_ -> postMeta)) .groupBy(_._1).mapValues(_ map (_._2)) /** * Sequence of tags, represented by the [[Tag]] object. The tag weight is calculated for each tag * and its represented as an integer number from 1 to 10, where 1 is the lowest value. */ val tagCloud: Seq[Tag] = { val rankFactor: Double = Math.max(1.0, 10.0 / tags.map(_._2.size).fold(0)(Math.max)) tags.map { case (tag, postsMeta) => Tag(tag, postsMeta.size, Math.ceil(rankFactor * postsMeta.size).toInt) }.toSeq.sortBy(_.name) } /** * Returns collection of all blog posts, published in the specified year. * * @param year year of publication * @return collection of all blog posts, published in the specified year */ def forYear(year: Int): Year = years.find(_.year == year).getOrElse(Year.empty(year)) /** * Returns collection of all blog posts, published by the specified author. * * @param authorId unique ID of the author * @return collection of all blog posts, published by the specified author */ def byAuthor(authorId: String): Option[Seq[BlogPostMeta]] = authors.get(authorId) /** * Returns collection of all blog posts for specified tag. * * @param name name of the tag * @return collection of all blog posts for specified tag */ def forTag(name: String): Option[Seq[BlogPostMeta]] = tags.get(name) /** * Returns the static page for the specified permanent link. * * @param permalink permanent link of the static page * @return static page metadata (if any found) */ def page(permalink: String): Option[StaticPageMeta] = pages.find(_.permalink == permalink) }
norcane/noble
sdk/noble-api/src/main/scala/com/norcane/noble/api/models/Blog.scala
Scala
apache-2.0
4,675
/* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package sbt.testing /** Information in addition to a test class name that identifies the suite or * test about which an event was fired. * * This class has five subtypes: * * - <code>SuiteSelector</code> - indicates an event is about an entire suite * of tests whose class was reported as <code>fullyQualifiedName</code> in * the <code>Event</code> * - <code>TestSelector</code> - indicates an event is about a single test * directly contained in the suite whose class was reported as * <code>fullyQualifiedName</code> in the <code>Event</code> * - <code>NestedSuiteSelector</code> - indicates an event is about an entire * nested suite of tests whose top-level, "nesting" class was reported as * <code>fullyQualifiedName</code> in the <code>Event</code> * - <code>NestedTestSelector</code> - indicates an event is about a single * test contained in a nested suite whose top-level, "nesting" class was * reported as <code>fullyQualifiedName</code> in the <code>Event</code> * - <code>TestWildcardSelector</code> - indicates an event is about zero to * many tests directly contained in the suite whose class was reported as * <code>fullyQualifiedName</code> in the <code>Event</code> */ abstract sealed class Selector /** Indicates an event was about the entire suite whose class had the fully * qualified name specified as the <code>fullyQualifiedName</code> attribute * the event. */ final class SuiteSelector extends Selector with Serializable { override def equals(o: Any): Boolean = o.isInstanceOf[SuiteSelector] override def hashCode(): Int = 29 override def toString(): String = "SuiteSelector" } /** Information in addition to a test class name that identifies a test * directly contained in the suite whose class had the fully qualified name * specified as the <code>fullyQualifiedName</code> attribute passed to the * event. */ final class TestSelector(_testName: String) extends Selector with Serializable { if (_testName == null) throw new NullPointerException("testName was null"); /** The name of a test about which an event was fired. * * @return the name of the test */ def testName(): String = _testName override def equals(that: Any): Boolean = that match { case that: TestSelector => this.testName == that.testName case _ => false } override def hashCode(): Int = testName.hashCode() override def toString(): String = s"TestSelector($testName)" } /** Information in addition to a test class name that identifies a nested suite * about which an event was fired. */ final class NestedSuiteSelector(_suiteId: String) extends Selector with Serializable { if (_suiteId == null) throw new NullPointerException("suiteId was null"); /** An id that, in addition to a test class name, identifies a nested suite * about which an event was fired. * * @return the id of the nested suite */ def suiteId(): String = _suiteId override def equals(that: Any): Boolean = that match { case that: NestedSuiteSelector => this.suiteId == that.suiteId case _ => false } override def hashCode(): Int = suiteId.hashCode() override def toString(): String = s"NestedSuiteSelector($suiteId)" } /** Information in addition to a test class name that identifies a test in a * nested suite about which an event was fired. */ final class NestedTestSelector(_suiteId: String, _testName: String) extends Selector with Serializable { if (_suiteId == null) throw new NullPointerException("suiteId was null"); if (_testName == null) throw new NullPointerException("testName was null"); /** An id that, in addition to a test class name, identifies a nested suite * that contains a test about which an event was fired. * * @return the id of the nested suite containing the test */ def suiteId(): String = _suiteId /** The name of the test in a nested suite about which an event was fired. * * @return the name of the test in the nested suite identified by the id * returned by <code>suiteId</code>. */ def testName(): String = _testName override def equals(that: Any): Boolean = that match { case that: NestedTestSelector => this.suiteId == that.suiteId && this.testName == that.testName case _ => false } override def hashCode(): Int = { var retVal = 17 retVal = 31 * retVal + suiteId.hashCode() retVal = 31 * retVal + testName.hashCode() retVal } override def toString(): String = s"NestedTestSelector($suiteId, $testName)" } /** Information that identifies zero to many tests directly contained in a test * class. * * The <code>testWildcard</code> is a simple string, <em>i.e.</em>, not a glob * or regular expression. Any test whose name includes the * <code>testWildcard</code> string as a substring will be selected. */ final class TestWildcardSelector( _testWildcard: String) extends Selector with Serializable { if (_testWildcard == null) throw new NullPointerException("testWildcard was null"); /** A test wildcard string used to select tests. * * The <code>testWildcard</code> is a simple string, <em>i.e.</em>, not a * glob or regular expression. Any test whose name includes the * <code>testWildcard</code> string as a substring will be selected. * * @return the test wildcard string used to select tests. */ def testWildcard(): String = _testWildcard override def equals(that: Any): Boolean = that match { case that: TestWildcardSelector => this.testWildcard == that.testWildcard case _ => false } override def hashCode(): Int = testWildcard.hashCode() override def toString(): String = s"TestWildcardSelector($testWildcard)" }
nicolasstucki/scala-js
test-interface/src/main/scala/sbt/testing/Selectors.scala
Scala
apache-2.0
6,045
package models.user import models.ParamHelper import play.api.libs.json._ import play.api.libs.functional.syntax._ import reactivemongo.bson._ import services.dao.UtilBson case class ServiceArg ( name: String, value: ParamHelper ) object ServiceArg { implicit val serviceArgReader: Reads[ServiceArg] = ( (__ \\ "name").read[String] and (__ \\ "value").read[ParamHelper])(ServiceArg.apply _) implicit val writer = ( (__ \\ "name").write[String] and (__ \\ "value").write[ParamHelper] )(unlift(ServiceArg.unapply)) def toBSON(serviceArg: ServiceArg) = { BSONDocument( "name" -> BSONString(serviceArg.name), "value" -> ParamHelper.toBSON(serviceArg.value)) } def fromBSON(c: BSONDocument) = { ServiceArg(c.getAs[String]("name").get, ParamHelper.fromBSON(c.getAs[BSONDocument]("value").get)) } }
Froggies/Skimbo
app/models/user/ServiceArg.scala
Scala
agpl-3.0
865
/* * ____ ____ _____ ____ ___ ____ * | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R) * | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data * | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc. * |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved. * * This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version * 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See * the GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. * */ package com.precog package mimir import scala.collection.mutable import com.precog.bytecode._ import com.precog.yggdrasil._ import com.precog.util.Identifier trait TypeInferencer extends DAG { import instructions.{ BinaryOperation, ArraySwap, WrapArray, WrapObject, DerefArray, DerefObject } import dag._ def inferTypes(jtpe: JType)(graph: DepGraph): DepGraph = { def collectTypes(universe: JType, graph: DepGraph): Map[DepGraph, Set[JType]] = { def collectSpecTypes(typing: Map[DepGraph, Set[JType]], splits: Map[Identifier, Split], spec: BucketSpec): Map[DepGraph, Set[JType]] = spec match { case UnionBucketSpec(left, right) => collectSpecTypes(collectSpecTypes(typing, splits, left), splits, right) case IntersectBucketSpec(left, right) => collectSpecTypes(collectSpecTypes(typing, splits, left), splits, right) case Group(id, target, child) => collectSpecTypes(inner(None, typing, splits, target), splits, child) case UnfixedSolution(id, target) => inner(Some(universe), typing, splits, target) case Extra(target) => inner(Some(universe), typing, splits, target) } def inner(jtpe: Option[JType], typing: Map[DepGraph, Set[JType]], splits: Map[Identifier, Split], graph: DepGraph): Map[DepGraph, Set[JType]] = { graph match { case _: Root => typing case New(parent) => inner(jtpe, typing, splits, parent) case ld @ AbsoluteLoad(parent, _) => val typing0 = inner(Some(JTextT), typing, splits, parent) jtpe map { jtpe0 => typing0 get ld map { jtpes => typing + (ld -> (jtpes + jtpe0)) } getOrElse { typing + (ld -> Set(jtpe0)) } } getOrElse typing case ld @ RelativeLoad(parent, _) => val typing0 = inner(Some(JTextT), typing, splits, parent) jtpe map { jtpe0 => typing0 get ld map { jtpes => typing + (ld -> (jtpes + jtpe0)) } getOrElse { typing + (ld -> Set(jtpe0)) } } getOrElse typing case Operate(op, parent) => inner(Some(op.tpe.arg), typing, splits, parent) case Reduce(red, parent) => inner(Some(red.tpe.arg), typing, splits, parent) case MegaReduce(_, _) => sys.error("Cannot infer type of MegaReduce. MegaReduce optimization must come after inferTypes.") case Morph1(m, parent) => inner(Some(m.tpe.arg), typing, splits, parent) case Morph2(m, left, right) => inner(Some(m.tpe.arg1), inner(Some(m.tpe.arg0), typing, splits, left), splits, right) case Join(DerefObject, Cross(_), left, right @ ConstString(str)) => inner(jtpe map { jtpe0 => JObjectFixedT(Map(str -> jtpe0)) }, typing, splits, left) case Join(DerefArray, Cross(_), left, right @ ConstDecimal(d)) => inner(jtpe map { jtpe0 => JArrayFixedT(Map(d.toInt -> jtpe0)) }, typing, splits, left) case Join(WrapObject, Cross(_), ConstString(str), right) => { val jtpe2 = jtpe map { case JObjectFixedT(map) => map get str getOrElse universe case _ => universe } inner(jtpe2, typing, splits, right) } case Join(ArraySwap, Cross(_), left, right) => { val jtpe2 = jtpe flatMap { case JArrayFixedT(_) => jtpe case _ => Some(JArrayUnfixedT) } inner(Some(JNumberT), inner(jtpe2, typing, splits, left), splits, right) } case Join(op: BinaryOperation, _, left, right) => inner(Some(op.tpe.arg1), inner(Some(op.tpe.arg0), typing, splits, left), splits, right) case Assert(pred, child) => inner(jtpe, inner(jtpe, typing, splits, pred), splits, child) case graph @ Cond(pred, left, _, right, _) => inner(jtpe, typing, splits, graph.peer) case Observe(data, samples) => inner(jtpe, inner(jtpe, typing, splits, data), splits, samples) case IUI(_, left, right) => inner(jtpe, inner(jtpe, typing, splits, left), splits, right) case Diff(left, right) => inner(jtpe, inner(jtpe, typing, splits, left), splits, right) case Filter(_, target, boolean) => inner(Some(JBooleanT), inner(jtpe, typing, splits, target), splits, boolean) case AddSortKey(parent, _, _, _) => inner(jtpe, typing, splits, parent) case Memoize(parent, _) => inner(jtpe, typing, splits, parent) case Distinct(parent) => inner(jtpe, typing, splits, parent) case s @ Split(spec, child, id) => inner(jtpe, collectSpecTypes(typing, splits, spec), splits + (id -> s), child) // not using extractors due to bug case s: SplitGroup => { val Split(spec, _, _) = splits(s.parentId) findGroup(spec, s.id) map { inner(jtpe, typing, splits, _) } getOrElse typing } // not using extractors due to bug case s: SplitParam => { val Split(spec, _, _) = splits(s.parentId) findParams(spec, s.id).foldLeft(typing) { (typing, graph) => inner(jtpe, typing, splits, graph) } } } } inner(Some(universe), Map(), Map(), graph) } def applyTypes(typing: Map[DepGraph, JType], graph: DepGraph): DepGraph = { graph mapDown { recurse => { case ld @ AbsoluteLoad(parent, _) => AbsoluteLoad(recurse(parent), typing(ld))(ld.loc) case ld @ RelativeLoad(parent, _) => RelativeLoad(recurse(parent), typing(ld))(ld.loc) }} } def findGroup(spec: BucketSpec, id: Int): Option[DepGraph] = spec match { case UnionBucketSpec(left, right) => findGroup(left, id) orElse findGroup(right, id) case IntersectBucketSpec(left, right) => findGroup(left, id) orElse findGroup(right, id) case Group(`id`, target, _) => Some(target) case Group(_, _, _) => None case UnfixedSolution(_, _) => None case Extra(_) => None } def findParams(spec: BucketSpec, id: Int): Set[DepGraph] = spec match { case UnionBucketSpec(left, right) => findParams(left, id) ++ findParams(right, id) case IntersectBucketSpec(left, right) => findParams(left, id) ++ findParams(right, id) case Group(_, _, child) => findParams(child, id) case UnfixedSolution(`id`, child) => Set(child) case UnfixedSolution(_, _) => Set() case Extra(_) => Set() } val collectedTypes = collectTypes(jtpe, graph) val typing = collectedTypes.mapValues(_.reduce(JUnionT)) applyTypes(typing, graph) } }
precog/platform
mimir/src/main/scala/com/precog/mimir/TypeInferencer.scala
Scala
agpl-3.0
8,181
package com.twitter.finagle import com.twitter.finagle.client.{StdStackClient, StackClient, Transporter} import com.twitter.finagle.dispatch.{SerialClientDispatcher, SerialServerDispatcher} import com.twitter.finagle.netty3.{Netty3Transporter, Netty3Listener} import com.twitter.finagle.param.{Label, Stats, ProtocolLibrary} import com.twitter.finagle.server.{StdStackServer, StackServer, Listener} import com.twitter.finagle.thrift.{ClientId => _, _} import com.twitter.finagle.transport.Transport import com.twitter.util.Stopwatch import java.net.SocketAddress import org.apache.thrift.protocol.TProtocolFactory /** * Client and server for [[http://thrift.apache.org Apache Thrift]]. * `Thrift` implements Thrift framed transport and binary protocol by * default, though custom protocol factories (i.e. wire encoding) may * be injected with `withProtocolFactory`. The client, * `Client[ThriftClientRequest, Array[Byte]]` provides direct access * to the thrift transport, but we recommend using code generation * through either [[https://github.com/twitter/scrooge Scrooge]] or * [[https://github.com/mariusaeriksen/thrift-0.5.0-finagle a fork]] * of the Apache generator. A rich API is provided to support * interfaces generated with either of these code generators. * * The client and server uses the standard thrift protocols, with * support for both framed and buffered transports. Finagle attempts * to upgrade the protocol in order to ship an extra envelope * carrying additional request metadata, containing, among other * things, request IDs for Finagle's RPC tracing facilities. * * The negotiation is simple: on connection establishment, an * improbably-named method is dispatched on the server. If that * method isn't found, we are dealing with a legacy thrift server, * and the standard protocol is used. If the remote server is also a * finagle server (or any other supporting this extension), we reply * to the request, and every subsequent request is dispatched with an * envelope carrying trace metadata. The envelope itself is also a * Thrift struct described * [[https://github.com/twitter/finagle/blob/master/finagle-thrift/src/main/thrift/tracing.thrift * here]]. * * == Clients == * * $clientExample * * $thriftUpgrade * * == Servers == * * $serverExample * * @define clientExampleObject Thrift * @define serverExampleObject Thrift */ object Thrift extends Client[ThriftClientRequest, Array[Byte]] with ThriftRichClient with Server[Array[Byte], Array[Byte]] with ThriftRichServer { val protocolFactory: TProtocolFactory = Protocols.binaryFactory() protected val defaultClientName = "thrift" object param { case class ClientId(clientId: Option[thrift.ClientId]) implicit object ClientId extends Stack.Param[ClientId] { val default = ClientId(None) } case class ProtocolFactory(protocolFactory: TProtocolFactory) implicit object ProtocolFactory extends Stack.Param[ProtocolFactory] { val default = ProtocolFactory(Protocols.binaryFactory()) } /** * A `Param` to set the max size of a reusable buffer for the thrift response. * If the buffer size exceeds the specified value, the buffer is not reused, * and a new buffer is used for the next thrift response. * @param maxReusableBufferSize Max buffer size in bytes. */ case class MaxReusableBufferSize(maxReusableBufferSize: Int) implicit object MaxReusableBufferSize extends Stack.Param[MaxReusableBufferSize] { val default = MaxReusableBufferSize(maxThriftBufferSize) } } object Client { private val preparer: Stackable[ServiceFactory[ThriftClientRequest, Array[Byte]]] = new Stack.Module4[ param.ClientId, Label, Stats, param.ProtocolFactory, ServiceFactory[ThriftClientRequest, Array[Byte]] ] { val role = StackClient.Role.prepConn val description = "Prepare TTwitter thrift connection" def make( _clientId: param.ClientId, _label: Label, _stats: Stats, _pf: param.ProtocolFactory, next: ServiceFactory[ThriftClientRequest, Array[Byte]] ) = { val Label(label) = _label val param.ClientId(clientId) = _clientId val param.ProtocolFactory(pf) = _pf val preparer = new ThriftClientPreparer(pf, label, clientId) val underlying = preparer.prepare(next) val Stats(stats) = _stats new ServiceFactoryProxy(underlying) { val stat = stats.stat("codec_connection_preparation_latency_ms") override def apply(conn: ClientConnection) = { val elapsed = Stopwatch.start() super.apply(conn) ensure { stat.add(elapsed().inMilliseconds) } } } } } // We must do 'preparation' this way in order to let Finagle set up tracing & so on. val stack: Stack[ServiceFactory[ThriftClientRequest, Array[Byte]]] = StackClient.newStack .replace(StackClient.Role.prepConn, preparer) } case class Client( stack: Stack[ServiceFactory[ThriftClientRequest, Array[Byte]]] = Client.stack, params: Stack.Params = StackClient.defaultParams + ProtocolLibrary("thrift"), framed: Boolean = true ) extends StdStackClient[ThriftClientRequest, Array[Byte], Client] with ThriftRichClient { protected def copy1( stack: Stack[ServiceFactory[ThriftClientRequest, Array[Byte]]] = this.stack, params: Stack.Params = this.params ): Client = copy(stack, params) protected val defaultClientName = "thrift" protected type In = ThriftClientRequest protected type Out = Array[Byte] protected val param.ProtocolFactory(protocolFactory) = params[param.ProtocolFactory] protected def newTransporter(): Transporter[In, Out] = { val pipeline = if (framed) ThriftClientFramedPipelineFactory else ThriftClientBufferedPipelineFactory(protocolFactory) Netty3Transporter(pipeline, params) } protected def newDispatcher( transport: Transport[ThriftClientRequest, Array[Byte]] ): Service[ThriftClientRequest, Array[Byte]] = new SerialClientDispatcher(transport) def withProtocolFactory(protocolFactory: TProtocolFactory): Client = configured(param.ProtocolFactory(protocolFactory)) def withClientId(clientId: thrift.ClientId): Client = configured(param.ClientId(Some(clientId))) def clientId: Option[thrift.ClientId] = params[param.ClientId].clientId } val client = Client() def newService( dest: Name, label: String ): Service[ThriftClientRequest, Array[Byte]] = client.newService(dest, label) def newClient( dest: Name, label: String ): ServiceFactory[ThriftClientRequest, Array[Byte]] = client.newClient(dest, label) @deprecated("Use `Thrift.client.withProtocolFactory`", "6.22.0") def withProtocolFactory(protocolFactory: TProtocolFactory): Client = client.withProtocolFactory(protocolFactory) @deprecated("Use `Thrift.client.withClientId`", "6.22.0") def withClientId(clientId: thrift.ClientId): Client = client.withClientId(clientId) object Server { private val preparer = new Stack.Module2[Label, param.ProtocolFactory, ServiceFactory[Array[Byte], Array[Byte]]] { val role = StackClient.Role.prepConn val description = "Prepare TTwitter thrift connection" def make( _label: Label, _pf: param.ProtocolFactory, next: ServiceFactory[Array[Byte], Array[Byte]] ) = { val Label(label) = _label val param.ProtocolFactory(pf) = _pf val preparer = new thrift.ThriftServerPreparer(pf, label) preparer.prepare(next) } } val stack: Stack[ServiceFactory[Array[Byte], Array[Byte]]] = StackServer.newStack .replace(StackServer.Role.preparer, preparer) } case class Server( stack: Stack[ServiceFactory[Array[Byte], Array[Byte]]] = Server.stack, params: Stack.Params = StackServer.defaultParams + ProtocolLibrary("thrift"), framed: Boolean = true ) extends StdStackServer[Array[Byte], Array[Byte], Server] with ThriftRichServer { protected def copy1( stack: Stack[ServiceFactory[Array[Byte], Array[Byte]]] = this.stack, params: Stack.Params = this.params ): Server = copy(stack, params) protected type In = Array[Byte] protected type Out = Array[Byte] protected val param.ProtocolFactory(protocolFactory) = params[param.ProtocolFactory] protected def newListener(): Listener[In, Out] = { val pipeline = if (framed) thrift.ThriftServerFramedPipelineFactory else thrift.ThriftServerBufferedPipelineFactory(protocolFactory) Netty3Listener("thrift", pipeline) } protected def newDispatcher( transport: Transport[In, Out], service: Service[Array[Byte], Array[Byte]] ) = new SerialServerDispatcher(transport, service) def withProtocolFactory(protocolFactory: TProtocolFactory): Server = configured(param.ProtocolFactory(protocolFactory)) def withBufferedTransport(): Server = copy(framed=false) } val server = Server() def serve( addr: SocketAddress, service: ServiceFactory[Array[Byte], Array[Byte]] ): ListeningServer = server.serve(addr, service) }
travisbrown/finagle
finagle-thrift/src/main/scala/com/twitter/finagle/Thrift.scala
Scala
apache-2.0
9,392
package com.mz.training.domains.address import com.mz.training.domains.EntityId import spray.json.DefaultJsonProtocol._ /** * Created by zemi on 10/08/16. */ case class Address(id: Long, street: String, zip: String, houseNumber: String, city: String) extends EntityId object Address { implicit val format = jsonFormat5(Address.apply) }
michalzeman/angular2-training
akka-http-server/src/main/scala/com/mz/training/domains/address/Address.scala
Scala
mit
345
package com.softwaremill.codebrag.repository import org.eclipse.jgit.revwalk.RevCommit import com.softwaremill.codebrag.domain.{PartialCommitInfo, CommitInfo} import org.joda.time.DateTime import com.typesafe.scalalogging.slf4j.Logging trait RawCommitsConverter { self: Repository with Logging => def toPartialCommitInfos(jGitCommits: List[RevCommit]): List[PartialCommitInfo] = { toCommitInfos(jGitCommits).map(PartialCommitInfo(_)) } def toCommitInfos(jGitCommits: List[RevCommit]): List[CommitInfo] = { jGitCommits.flatMap(buildCommitInfoSafely(_)) } private def buildCommitInfoSafely(commit: RevCommit): Option[CommitInfo] = { try { Some(buildCommitInfo(commit)) } catch { case e: Exception => { logger.error(s"Cannot import commit with ID ${commit.toObjectId.name()}. Skipping this one") logger.debug("Exception details", e) None } } } private def buildCommitInfo(jGitCommit: RevCommit): CommitInfo = { CommitInfo( repoName = repoName, sha = jGitCommit.toObjectId.name(), message = jGitCommit.getFullMessage, authorName = jGitCommit.getAuthorIdent.getName, authorEmail = jGitCommit.getAuthorIdent.getEmailAddress, committerName = jGitCommit.getCommitterIdent.getName, committerEmail = jGitCommit.getCommitterIdent.getEmailAddress, authorDate = new DateTime(jGitCommit.getAuthorIdent.getWhen), commitDate = new DateTime(jGitCommit.getCommitTime * 1000l), jGitCommit.getParents.map(_.toObjectId.name()).toList) } }
softwaremill/codebrag
codebrag-service/src/main/scala/com/softwaremill/codebrag/repository/RawCommitsConverter.scala
Scala
agpl-3.0
1,566
/* * Copyright 2012 Eike Kettner * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.eknet.publet.auth import org.apache.shiro.realm.AuthorizingRealm import org.apache.shiro.authz.AuthorizationInfo import org.apache.shiro.subject.PrincipalCollection import org.apache.shiro.authc.{UsernamePasswordToken, AuthenticationToken} import org.apache.shiro.authc.credential.SimpleCredentialsMatcher import org.eknet.publet.auth.store.DefaultAuthStore import com.google.inject.{Singleton, Inject} import com.google.common.eventbus.Subscribe import org.apache.shiro.cache.MemoryConstrainedCacheManager import grizzled.slf4j.Logging /** * @author Eike Kettner [email protected] * @since 22.04.12 08:14 */ @Singleton class UsersRealm @Inject() (val db: DefaultAuthStore, resolver: DefaultPermissionResolver) extends AuthorizingRealm with Logging { @Subscribe def clearCacheOnChange(event: AuthDataChanged) { Option(getAuthenticationCache).map(_.clear()) Option(getAuthorizationCache).map(_.clear()) } setCredentialsMatcher(new CompositeCredentialsMatcher(List( new DynamicHashCredentialsMatcher, new DigestCredentialsMatcher, new SimpleCredentialsMatcher ))) setPermissionResolver(resolver) setCacheManager(new MemoryConstrainedCacheManager) setAuthenticationCachingEnabled(true) setAuthorizationCachingEnabled(true) override def supports(token: AuthenticationToken) = { token.isInstanceOf[DigestAuthenticationToken] || token.isInstanceOf[UsernamePasswordToken] } def doGetAuthenticationInfo(token: AuthenticationToken) = { val user = token.getPrincipal.toString db.findUser(user).map(u => new UserAuthcInfo(u, getName)).orNull } def doGetAuthorizationInfo(principals: PrincipalCollection): AuthorizationInfo = { val login = getAvailablePrincipal(principals).toString db.findUser(login).map(u => { val groups = db.getGroups(login) val dbperms = db.getAllUserPermissions(login) new PolicyAuthzInfo(u, groups, dbperms) }).orNull } }
eikek/publet
auth/src/main/scala/org/eknet/publet/auth/UsersRealm.scala
Scala
apache-2.0
2,552
package org.scalaide.ui.internal.diagnostic import org.eclipse.jface.dialogs.Dialog import org.eclipse.jface.dialogs.IDialogConstants import org.eclipse.swt.widgets.{ List => SWTList, _ } import org.eclipse.swt.layout.RowLayout import org.eclipse.swt.layout.GridLayout import org.eclipse.swt.layout.GridData import org.eclipse.ui.internal.layout.CellLayout import org.eclipse.swt.SWT import org.eclipse.swt.events.ModifyListener import org.eclipse.swt.events.ModifyEvent import org.eclipse.swt.events.SelectionAdapter import org.eclipse.swt.events.SelectionListener import org.eclipse.swt.events.SelectionEvent import org.eclipse.core.runtime.Platform import org.scalaide.ui.internal.actions.OpenExternalFile import org.scalaide.core.internal.logging.LogManager import org.scalaide.core.IScalaPlugin import org.scalaide.core.internal.project.ScalaInstallation.platformInstallation import org.scalaide.core.SdtConstants import org.scalaide.core.internal.ScalaPlugin class ReportBugDialog(shell: Shell) extends Dialog(shell) { /** Overwritten in order to set the title text. */ override def configureShell(sh: Shell): Unit = { super.configureShell(sh) sh.setText("Bug Reporter") } protected override def isResizable = true protected override def createDialogArea(parent: Composite): Control = { val control = new Composite(parent, SWT.NONE) control.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)) control.setLayout(new GridLayout) val group1 = new Group(control, SWT.SHADOW_NONE) group1.setText("Installation details") group1.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, true)) group1.setLayout(new GridLayout(1, false)) val messageField = new Text(group1, SWT.READ_ONLY | SWT.MULTI | SWT.BORDER) messageField.setLayoutData(new GridData(GridData.GRAB_HORIZONTAL | GridData.HORIZONTAL_ALIGN_FILL)) val cacheEntries = ScalaPlugin().classLoaderStore.entries val entries = cacheEntries.map(e => s"Compiler v. ${e._1.version.unparse}(${e._1.compiler.classJar})") messageField.setText( s"""|Scala IDE version: | ${IScalaPlugin().getBundle.getVersion} |Scala compiler version: | ${IScalaPlugin().scalaVersion.unparse} |Scala library version: | ${platformInstallation.version.unparse} |Eclipse version: | ${Platform.getBundle("org.eclipse.platform").getVersion} |Class loader store: ${cacheEntries.size} entries | ${entries.mkString("\\n\\t")} |""".stripMargin) val group2 = new Group(control, SWT.SHADOW_NONE) group2.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false)) group2.setLayout(new GridLayout(2, false)) val logFileLink = new Link(group2, SWT.NONE) logFileLink.setText("<a>Check</a> the log") logFileLink.addListener(SWT.Selection, OpenExternalFile(LogManager.logFile)) val reportBugLink = new Link(group2, SWT.NONE) reportBugLink.setText(s"""and <a href="${SdtConstants.IssueTracker}">report a bug</a>.""") reportBugLink.addListener(SWT.Selection, new LinkListener()) val sveltoLink = new Link(group2, SWT.NONE) sveltoLink.setText(s"""Install <a href="${SdtConstants.SveltoHomepage}">svelto</a> to log thread dumps when the UI is unresponsive.""") sveltoLink.addListener(SWT.Selection, new LinkListener) sveltoLink.setLayoutData({ val g = new GridData g.horizontalSpan = 2 g }) control } protected override def createButtonsForButtonBar(parent: Composite): Unit = { // create only OK button createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true) } }
Kwestor/scala-ide
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/diagnostic/ReportBugDialog.scala
Scala
bsd-3-clause
3,739
package com.caibowen.prma.logger.jul import java.util.logging.{LogRecord, Formatter, Handler} import java.lang.StringBuilder import com.caibowen.prma.api.EventAdaptor /** * Created by Bowen Cai on 1/9/2015. */ class JsonFormatter(adaptor: EventAdaptor[LogRecord]) extends Formatter { override def getHead(h: Handler) = "{\\"prmaLogEntries\\":[\\r\\n" override def format (record: LogRecord): String = adaptor.from(record).appendJson(new StringBuilder(512), false).append("\\r\\n").toString override def getTail (h: Handler) = "] }" }
xkommando/PRMA
logger/src/main/scala/com/caibowen/prma/logger/jul/JsonFormatter.scala
Scala
lgpl-3.0
544
package me.snov.sns.api import akka.actor.ActorRef import akka.actor.Status.{Success, Failure} import akka.http.scaladsl.model.HttpResponse import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.pattern.ask import akka.util.Timeout import me.snov.sns.actor.SubscribeActor.{CmdListSubscriptions, CmdListSubscriptionsByTopic, CmdSubscribe, CmdUnsubscribe,CmdSetSubscriptionAttributes,CmdGetSubscriptionAttributes} import me.snov.sns.model.Subscription import me.snov.sns.response.SubscribeResponse import scala.concurrent.ExecutionContext object SubscribeApi { private val arnPattern = """([\\w+_:-]{1,512})""".r def route(actorRef: ActorRef)(implicit timeout: Timeout, ec: ExecutionContext): Route = { pathSingleSlash { formField('Action ! "Subscribe") { formFields('Endpoint, 'Protocol, 'TopicArn) { (endpoint, protocol, topicArn) => complete { (actorRef ? CmdSubscribe(topicArn, protocol, endpoint)).mapTo[Subscription] map { SubscribeResponse.subscribe } } } ~ complete(HttpResponse(400, entity = "Endpoint, Protocol, TopicArn are required")) } ~ formField('Action ! "ListSubscriptionsByTopic") { formField('TopicArn) { case arnPattern(topicArn) => complete { (actorRef ? CmdListSubscriptionsByTopic(topicArn)).mapTo[Iterable[Subscription]] map { SubscribeResponse.listByTopic } } case _ => complete(HttpResponse(400, entity = "Invalid topic ARN")) } ~ complete(HttpResponse(400, entity = "TopicArn is missing")) } ~ formField('Action ! "ListSubscriptions") { complete { (actorRef ? CmdListSubscriptions()).mapTo[Iterable[Subscription]] map { SubscribeResponse.list } } } ~ formField('Action ! "Unsubscribe") { formField('SubscriptionArn) { (arn) => complete { (actorRef ? CmdUnsubscribe(arn)).map { case Success => SubscribeResponse.unsubscribe case _ => HttpResponse(404, entity = "NotFound") } } } ~ complete(HttpResponse(400, entity = "SubscriptionArn is missing")) } ~ formField('Action ! "SetSubscriptionAttributes") { formField('SubscriptionArn, 'AttributeName, 'AttributeValue) { (arn, name, value) => complete { (actorRef ? CmdSetSubscriptionAttributes(arn, name, value)).map { case Success => SubscribeResponse.setSubscriptionAttributes case Failure(ex) => HttpResponse(404, entity = "NotFound") } } } ~ complete(HttpResponse(400, entity = "SubscriptionArn is missing")) } ~ formField('Action ! "GetSubscriptionAttributes") { formField('SubscriptionArn) { (arn) => complete { (actorRef ? CmdGetSubscriptionAttributes(arn)).mapTo[Option[Map[String,String]]] map { attributes => attributes .map(SubscribeResponse.getSubscriptionAttributes) .getOrElse { HttpResponse(404, entity = "Not Found") } } } } ~ complete(HttpResponse(400, entity = "SubscriptionArn is missing")) } } } }
s12v/sns
src/main/scala/me/snov/sns/api/SubscribeApi.scala
Scala
apache-2.0
3,493
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package wvlet.airframe import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context import scala.reflect.macros.{blackbox => sm} private[wvlet] object AirframeMacros { private[wvlet] class BindHelper[C <: Context](val c: C) { import c.universe._ def shouldGenerateTrait(t: c.Type): Boolean = { val a = t.typeSymbol // Find the public default constructor that has no arguments val hasPublicDefaultConstructor = t.members .find(_.isConstructor) .map(_.asMethod).exists { m => m.isPublic && m.paramLists.size == 1 && m.paramLists(0).size == 0 } val hasAbstractMethods = t.members.exists(x => x.isMethod && x.isAbstract && !x.isAbstractOverride) val isTaggedType = t.typeSymbol.fullName.startsWith("wvlet.airframe.surface.tag.") val isSealedType = t.typeSymbol.isClass && t.typeSymbol.asClass.isSealed val shouldInstantiateTrait = if (!a.isStatic) { // = Non static type // If X is non static type (= local class or trait), // we need to instantiate it first in order to populate its $outer variables // We cannot instantiate path-dependent types if (t.toString.contains("#")) { false } else { hasPublicDefaultConstructor } } else if (a.isAbstract) { // = Abstract type // We cannot build abstract type X that has abstract methods, so bind[X].to[ConcreteType] // needs to be found in the design // If there is no abstract methods, it might be a trait without any method !hasAbstractMethods } else { // We cannot instantiate any trait or class without the default constructor // So binding needs to be found in the Design. hasPublicDefaultConstructor } // Tagged type or sealed class binding should be found in Design !isTaggedType && !isSealedType && shouldInstantiateTrait } def bind(session: c.Tree, t: c.Type): c.Tree = { q"""{ val session = ${session} ${newInstanceBinder(t)}(session) }""" } def findSession: c.Tree = { q"""wvlet.airframe.Session.findSession(this)""" } def newInstanceBinder(t: c.Type): c.Tree = { if (shouldGenerateTrait(t)) { q"""{ ss : wvlet.airframe.Session => ss.getOrElse(${surfaceOf(t)}, (new $t with wvlet.airframe.DISupport { def session = ss }).asInstanceOf[$t] ) }""" } else { q"""{ session : wvlet.airframe.Session => session.get[$t](${surfaceOf(t)}) }""" } } def createNewInstanceOf(t: c.Type): c.Tree = { if (shouldGenerateTrait(t)) { q"""{ ss : wvlet.airframe.Session => ss.createNewInstanceOf(${surfaceOf(t)}, (new $t with wvlet.airframe.DISupport { def session = ss }).asInstanceOf[$t] ) }""" } else { q"""{ session : wvlet.airframe.Session => session.createNewInstanceOf[$t](${surfaceOf(t)}) }""" } } /** * Register a factory for generating a trait that can embed the current session. This step is necessary for * instantiating trait, which has no default constructor. * * This method will return the surface of t */ def registerTraitFactory(t: c.Type): c.Tree = { if (shouldGenerateTrait(t)) { q""" { val s = ${surfaceOf(t)} wvlet.airframe.getOrElseUpdateTraitFactoryCache(s, { ss: wvlet.airframe.Session => (new $t with wvlet.airframe.DISupport { def session = ss }).asInstanceOf[Any] } ) s } """ } else { q"""{${surfaceOf(t)}}""" } } def surfaceOf(t: c.Type): c.Tree = { q"wvlet.airframe.surface.Surface.of[$t]" } def provider1Binding[A: c.WeakTypeTag, D1: c.WeakTypeTag]( factory: c.Tree, singleton: Boolean, eager: Boolean ): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val ev1 = implicitly[c.WeakTypeTag[D1]].tpe q"""{ val self = ${c.prefix.tree} val d1 = ${registerTraitFactory(ev1)} import wvlet.airframe.Binder._ self.design.addBinding[${t}](ProviderBinding(DependencyFactory(self.from, Seq(d1), ${factory}), ${singleton}, ${eager}, self.sourceCode)) } """ } def provider2Binding[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag]( factory: c.Tree, singleton: Boolean, eager: Boolean ): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val ev1 = implicitly[c.WeakTypeTag[D1]].tpe val ev2 = implicitly[c.WeakTypeTag[D2]].tpe q"""{ val self = ${c.prefix.tree} val d1 = ${registerTraitFactory(ev1)} val d2 = ${registerTraitFactory(ev2)} import wvlet.airframe.Binder._ self.design.addBinding[${t}](ProviderBinding(DependencyFactory(self.from, Seq(d1, d2), ${factory}), ${singleton}, ${eager}, self.sourceCode)) } """ } def provider3Binding[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag]( factory: c.Tree, singleton: Boolean, eager: Boolean ): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val ev1 = implicitly[c.WeakTypeTag[D1]].tpe val ev2 = implicitly[c.WeakTypeTag[D2]].tpe val ev3 = implicitly[c.WeakTypeTag[D3]].tpe q"""{ val self = ${c.prefix.tree} val d1 = ${registerTraitFactory(ev1)} val d2 = ${registerTraitFactory(ev2)} val d3 = ${registerTraitFactory(ev3)} import wvlet.airframe.Binder._ self.design.addBinding[${t}](ProviderBinding(DependencyFactory(self.from, Seq(d1, d2, d3), ${factory}), ${singleton}, ${eager}, self.sourceCode)) } """ } def provider4Binding[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag]( factory: c.Tree, singleton: Boolean, eager: Boolean ): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val ev1 = implicitly[c.WeakTypeTag[D1]].tpe val ev2 = implicitly[c.WeakTypeTag[D2]].tpe val ev3 = implicitly[c.WeakTypeTag[D3]].tpe val ev4 = implicitly[c.WeakTypeTag[D4]].tpe q"""{ val self = ${c.prefix.tree} val d1 = ${registerTraitFactory(ev1)} val d2 = ${registerTraitFactory(ev2)} val d3 = ${registerTraitFactory(ev3)} val d4 = ${registerTraitFactory(ev4)} import wvlet.airframe.Binder._ self.design.addBinding[${t}](ProviderBinding(DependencyFactory(self.from, Seq(d1, d2, d3, d4), ${factory}), ${singleton}, ${eager}, self.sourceCode)) } """ } def provider5Binding[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag, D5: c.WeakTypeTag ]( factory: c.Tree, singleton: Boolean, eager: Boolean ): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val ev1 = implicitly[c.WeakTypeTag[D1]].tpe val ev2 = implicitly[c.WeakTypeTag[D2]].tpe val ev3 = implicitly[c.WeakTypeTag[D3]].tpe val ev4 = implicitly[c.WeakTypeTag[D4]].tpe val ev5 = implicitly[c.WeakTypeTag[D5]].tpe q"""{ val self = ${c.prefix.tree} val d1 = ${registerTraitFactory(ev1)} val d2 = ${registerTraitFactory(ev2)} val d3 = ${registerTraitFactory(ev3)} val d4 = ${registerTraitFactory(ev4)} val d5 = ${registerTraitFactory(ev5)} import wvlet.airframe.Binder._ self.design.addBinding[${t}](ProviderBinding(DependencyFactory(self.from, Seq(d1, d2, d3, d4, d5), ${factory}), ${singleton}, ${eager}, self.sourceCode)) } """ } def fullTypeNameOf(typeEv: c.Type): String = { typeEv match { case TypeRef(prefix, typeSymbol, args) => if (args.isEmpty) { typeSymbol.fullName } else { val typeArgs = args.map(fullTypeNameOf(_)).mkString(",") s"${typeSymbol.fullName}[${typeArgs}]" } case other => typeEv.typeSymbol.fullName } } } def registerTraitFactoryImpl[A: c.WeakTypeTag](c: sm.Context): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) h.registerTraitFactory(t) } def designBindImpl[A: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val __surface = ${h.registerTraitFactory(t)} new wvlet.airframe.Binder(${c.prefix}, __surface, ${sourceCode(c)}).asInstanceOf[wvlet.airframe.Binder[$t]] }""" } def designRemoveImpl[A: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val d = ${c.prefix} val target = ${h.surfaceOf(t)} new wvlet.airframe.Design(d.designOptions, d.binding.filterNot(_.from == target), d.hooks) } """ } def binderToImpl[B: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[B]].tpe val h = new BindHelper[c.type](c) q""" { val self = ${c.prefix} val to = ${h.registerTraitFactory(t)} self.design.addBinding[${t}](wvlet.airframe.Binder.ClassBinding(self.from, to, self.sourceCode)) }""" } def binderToSingletonOfImpl[B: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[B]].tpe val h = new BindHelper[c.type](c) q""" { val self = ${c.prefix.tree} val to = ${h.registerTraitFactory(t)} if(self.from == to) { wvlet.log.Logger("wvlet.airframe.Binder").warn("Binding to the same type is not allowed: " + to.toString) throw new wvlet.airframe.AirframeException.CYCLIC_DEPENDENCY(List(to), ${sourceCode(c)}) } self.design.addBinding[${t}](wvlet.airframe.Binder.SingletonBinding(self.from, to, false, self.sourceCode)) }""" } def binderToEagerSingletonOfImpl[B: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[B]].tpe val h = new BindHelper[c.type](c) q""" { val self = ${c.prefix.tree} val to = ${h.registerTraitFactory(t)} if(self.from == to) { wvlet.log.Logger("wvlet.airframe.Binder").warn("Binding to the same type is not allowed: " + to.toString) throw new wvlet.airframe.AirframeException.CYCLIC_DEPENDENCY(List(to), ${sourceCode(c)}) } self.design.addBinding[${t}](wvlet.airframe.Binder.SingletonBinding(self.from, to, true, self.sourceCode)) }""" } def bindToProvider1[A: c.WeakTypeTag, D1: c.WeakTypeTag](c: sm.Context)(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider1Binding[A, D1](factory, false, false) } def bindToProvider2[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider2Binding[A, D1, D2](factory, false, false) } def bindToProvider3[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider3Binding[A, D1, D2, D3](factory, false, false) } def bindToProvider4[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider4Binding[A, D1, D2, D3, D4](factory, false, false) } def bindToProvider5[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag, D5: c.WeakTypeTag ]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider5Binding[A, D1, D2, D3, D4, D5](factory, false, false) } def bindToSingletonProvider1[A: c.WeakTypeTag, D1: c.WeakTypeTag](c: sm.Context)(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider1Binding[A, D1](factory, true, false) } def bindToSingletonProvider2[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider2Binding[A, D1, D2](factory, true, false) } def bindToSingletonProvider3[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider3Binding[A, D1, D2, D3](factory, true, false) } def bindToSingletonProvider4[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag ]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider4Binding[A, D1, D2, D3, D4](factory, true, false) } def bindToSingletonProvider5[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag, D5: c.WeakTypeTag ](c: sm.Context)(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider5Binding[A, D1, D2, D3, D4, D5](factory, true, false) } def bindToEagerSingletonProvider1[A: c.WeakTypeTag, D1: c.WeakTypeTag](c: sm.Context)(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider1Binding[A, D1](factory, true, true) } def bindToEagerSingletonProvider2[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider2Binding[A, D1, D2](factory, true, true) } def bindToEagerSingletonProvider3[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider3Binding[A, D1, D2, D3](factory, true, true) } def bindToEagerSingletonProvider4[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag ]( c: sm.Context )(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider4Binding[A, D1, D2, D3, D4](factory, true, true) } def bindToEagerSingletonProvider5[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag, D5: c.WeakTypeTag ](c: sm.Context)(factory: c.Tree): c.Tree = { val h = new BindHelper[c.type](c) h.provider5Binding[A, D1, D2, D3, D4, D5](factory, true, true) } /** * Used when Session location is known * * @param c * @tparam A * @return */ def buildImpl[A: c.WeakTypeTag](c: sm.Context): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe new BindHelper[c.type](c).bind(c.prefix.tree, t) } def buildWithSession[A: c.WeakTypeTag](c: sm.Context)(body: c.Expr[A => Any]): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe // Bind the code block to a local variable to resolve the issue #373 val e = q""" { val codeBlock = ${body} (${c.prefix}).withSession { session => val a = session.build[${t}] codeBlock(a) } } """ e } def runWithSession[A: c.WeakTypeTag, B: c.WeakTypeTag](c: sm.Context)(body: c.Expr[A => B]): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val b = implicitly[c.WeakTypeTag[B]].tpe // Bind the code block to a local variable to resolve the issue #373 val e = q""" { val codeBlock = ${body} (${c.prefix}).withSession { session => val a = session.build[${a}] codeBlock(a) } }.asInstanceOf[${b}] """ e } def addLifeCycle[A: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} new wvlet.airframe.LifeCycleBinder(${c.prefix}.dep, ${h.surfaceOf(t)}, session) } """ } def addInitLifeCycle[A: c.WeakTypeTag](c: sm.Context)(body: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} val dep = ${c.prefix}.dep session.lifeCycleManager.addInitHook(wvlet.airframe.lifecycle.EventHookHolder(${h.surfaceOf(t)}, dep, ${body})) dep } """ } def addInjectLifeCycle[A: c.WeakTypeTag](c: sm.Context)(body: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} val dep = ${c.prefix}.dep session.lifeCycleManager.addInjectHook(wvlet.airframe.lifecycle.EventHookHolder(${h .surfaceOf(t)}, dep, ${body})) dep } """ } def addStartLifeCycle[A: c.WeakTypeTag](c: sm.Context)(body: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} val dep = ${c.prefix}.dep session.lifeCycleManager.addStartHook(wvlet.airframe.lifecycle.EventHookHolder(${h .surfaceOf(t)}, dep, ${body})) dep } """ } def addStartLifeCycleForFactory[F: c.WeakTypeTag](c: sm.Context)(body: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[F]].tpe val i1 = t.typeArgs(0) val a = t.typeArgs(1) val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} val dep = ${c.prefix}.dep session.lifeCycleManager.addStartHook(wvlet.airframe.lifecycle.EventHookHolder(${h .surfaceOf(t)}, dep, ${body})) dep } """ } def addPreShutdownLifeCycle[A: c.WeakTypeTag](c: sm.Context)(body: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} val dep = ${c.prefix}.dep session.lifeCycleManager.addPreShutdownHook(wvlet.airframe.lifecycle.EventHookHolder(${h .surfaceOf(t)}, dep, ${body})) dep } """ } def addShutdownLifeCycle[A: c.WeakTypeTag](c: sm.Context)(body: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} val dep = ${c.prefix}.dep session.lifeCycleManager.addShutdownHook(wvlet.airframe.lifecycle.EventHookHolder(${h .surfaceOf(t)}, dep, ${body})) dep } """ } def bindImpl[A: c.WeakTypeTag](c: sm.Context): c.Tree = { val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) h.bind(h.findSession, t) } def bind0Impl[A: c.WeakTypeTag](c: sm.Context)(provider: c.Tree): c.Tree = { import c.universe._ val t = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val session = ${h.findSession} session.getOrElse(${h.surfaceOf(t)}, $provider) } """ } def bind1Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag](c: sm.Context)(provider: c.Tree): c.Tree = { import c.universe._ val h = new BindHelper[c.type](c) val t = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val dep1 = h.newInstanceBinder(d1) q"""{ val session = ${h.findSession} session.getOrElse(${h.surfaceOf(t)}, $provider($dep1(session))) } """ } def bind2Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag](c: sm.Context)(provider: c.Tree): c.Tree = { import c.universe._ val h = new BindHelper[c.type](c) val t = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) q"""{ val session = ${h.findSession} session.getOrElse(${h.surfaceOf(t)}, $provider($dep1(session), $dep2(session))) } """ } def bind3Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag]( c: sm.Context )(provider: c.Tree): c.Tree = { import c.universe._ val h = new BindHelper[c.type](c) val t = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val d3 = implicitly[c.WeakTypeTag[D3]].tpe val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) val dep3 = h.newInstanceBinder(d3) q"""{ val session = ${h.findSession} session.getOrElse(${h.surfaceOf(t)}, $provider($dep1(session),$dep2(session),$dep3(session))) } """ } def bind4Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag]( c: sm.Context )(provider: c.Tree): c.Tree = { import c.universe._ val h = new BindHelper[c.type](c) val t = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val d3 = implicitly[c.WeakTypeTag[D3]].tpe val d4 = implicitly[c.WeakTypeTag[D4]].tpe val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) val dep3 = h.newInstanceBinder(d3) val dep4 = h.newInstanceBinder(d4) q"""{ val session = ${h.findSession} session.getOrElse(${h.surfaceOf(t)}, $provider($dep1(session),$dep2(session),$dep3(session),$dep4(session)) ) } """ } def bind5Impl[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag, D5: c.WeakTypeTag ](c: sm.Context)(provider: c.Tree): c.Tree = { import c.universe._ val h = new BindHelper[c.type](c) val t = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val d3 = implicitly[c.WeakTypeTag[D3]].tpe val d4 = implicitly[c.WeakTypeTag[D4]].tpe val d5 = implicitly[c.WeakTypeTag[D5]].tpe val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) val dep3 = h.newInstanceBinder(d3) val dep4 = h.newInstanceBinder(d4) val dep5 = h.newInstanceBinder(d5) q"""{ val session = ${h.findSession} session.getOrElse(${h.surfaceOf(t)}, $provider($dep1(session),$dep2(session),$dep3(session),$dep4(session),$dep5(session)) ) } """ } def bindLocal0Impl[A: c.WeakTypeTag](c: sm.Context)(provider: c.Tree): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val h = new BindHelper[c.type](c) q"""{ val surface = ${h.surfaceOf(a)} val session = ${h.findSession} val newChildDesign = wvlet.airframe.newDesign.bind(surface).toLazyInstance(${provider}) val localSession = session.newSharedChildSession(newChildDesign) localSession.get[$a](surface) } """ } def bindLocal1Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag](c: sm.Context)(provider: c.Tree): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val h = new BindHelper[c.type](c) val dep1 = h.newInstanceBinder(d1) q"""{ val surface = ${h.surfaceOf(a)} val session = ${h.findSession} val newChildDesign = wvlet.airframe.newDesign.bind(surface).toLazyInstance($provider($dep1(session))) val localSession = session.newSharedChildSession(newChildDesign) localSession.get[$a](surface) } """ } def bindLocal2Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag]( c: sm.Context )(provider: c.Tree): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val h = new BindHelper[c.type](c) val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) q"""{ val surface = ${h.surfaceOf(a)} val session = ${h.findSession} val newChildDesign = wvlet.airframe.newDesign.bind(surface).toLazyInstance($provider($dep1(session), $dep2(session))) val localSession = session.newSharedChildSession(newChildDesign) localSession.get[$a](surface) } """ } def bindLocal3Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag]( c: sm.Context )(provider: c.Tree): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val d3 = implicitly[c.WeakTypeTag[D3]].tpe val h = new BindHelper[c.type](c) val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) val dep3 = h.newInstanceBinder(d3) q"""{ val surface = ${h.surfaceOf(a)} val session = ${h.findSession} val newChildDesign = wvlet.airframe.newDesign.bind(surface).toLazyInstance($provider($dep1(session), $dep2(session), $dep3(session))) val localSession = session.newSharedChildSession(newChildDesign) localSession.get[$a](surface) } """ } def bindLocal4Impl[A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag]( c: sm.Context )(provider: c.Tree): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val d3 = implicitly[c.WeakTypeTag[D3]].tpe val d4 = implicitly[c.WeakTypeTag[D4]].tpe val h = new BindHelper[c.type](c) val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) val dep3 = h.newInstanceBinder(d3) val dep4 = h.newInstanceBinder(d4) q"""{ val surface = ${h.surfaceOf(a)} val session = ${h.findSession} val newChildDesign = wvlet.airframe.newDesign.bind(surface).toLazyInstance($provider($dep1(session), $dep2(session), $dep3(session), $dep4(session))) val localSession = session.newSharedChildSession(newChildDesign) localSession.get[$a](surface) } """ } def bindLocal5Impl[ A: c.WeakTypeTag, D1: c.WeakTypeTag, D2: c.WeakTypeTag, D3: c.WeakTypeTag, D4: c.WeakTypeTag, D5: c.WeakTypeTag ]( c: sm.Context )(provider: c.Tree): c.Tree = { import c.universe._ val a = implicitly[c.WeakTypeTag[A]].tpe val d1 = implicitly[c.WeakTypeTag[D1]].tpe val d2 = implicitly[c.WeakTypeTag[D2]].tpe val d3 = implicitly[c.WeakTypeTag[D3]].tpe val d4 = implicitly[c.WeakTypeTag[D4]].tpe val d5 = implicitly[c.WeakTypeTag[D5]].tpe val h = new BindHelper[c.type](c) val dep1 = h.newInstanceBinder(d1) val dep2 = h.newInstanceBinder(d2) val dep3 = h.newInstanceBinder(d3) val dep4 = h.newInstanceBinder(d4) val dep5 = h.newInstanceBinder(d5) q"""{ val surface = ${h.surfaceOf(a)} val session = ${h.findSession} val newChildDesign = wvlet.airframe.newDesign.bind(surface).toLazyInstance($provider($dep1(session), $dep2(session), $dep3(session), $dep4(session), $dep5(session))) val localSession = session.newSharedChildSession(newChildDesign) localSession.get[$a](surface) } """ } def bindFactoryImpl[F: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ import scala.language.higherKinds val t = implicitly[c.WeakTypeTag[F]].tpe // F = Function[I1, A] val i1 = t.typeArgs(0) // I1 val a = t.typeArgs(1) // A val h = new BindHelper[c.type](c) q"""{ i1: ${i1} => val session = ${h.findSession}.newSharedChildSession(wvlet.airframe.newDesign.bind(${h .surfaceOf(i1)}).toLazyInstance(i1)) ${h.createNewInstanceOf(a)}(session) } """ } def bindFactory2Impl[F: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ import scala.language.higherKinds val t = implicitly[c.WeakTypeTag[F]].tpe // F = Function[(I1, I2), A] val i1 = t.typeArgs(0) // I1 val i2 = t.typeArgs(1) // I2 val a = t.typeArgs(2) // A val h = new BindHelper[c.type](c) q"""{ (i1: ${i1}, i2: ${i2}) => val session = ${h.findSession}.newSharedChildSession( wvlet.airframe.newDesign .bind(${h.surfaceOf(i1)}).toLazyInstance(i1) .bind(${h.surfaceOf(i2)}).toLazyInstance(i2) ) ${h.createNewInstanceOf(a)}(session) } """ } def bindFactory3Impl[F: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ import scala.language.higherKinds val t = implicitly[c.WeakTypeTag[F]].tpe // F = Function[(I1, I2, I3), A] val i1 = t.typeArgs(0) // I1 val i2 = t.typeArgs(1) // I2 val i3 = t.typeArgs(2) // I3 val a = t.typeArgs(3) // A val h = new BindHelper[c.type](c) q"""{ (i1: ${i1}, i2: ${i2}, i3:${i3}) => val session = ${h.findSession}.newSharedChildSession( wvlet.airframe.newDesign .bind(${h.surfaceOf(i1)}).toLazyInstance(i1) .bind(${h.surfaceOf(i2)}).toLazyInstance(i2) .bind(${h.surfaceOf(i3)}).toLazyInstance(i3) ) ${h.createNewInstanceOf(a)}(session) } """ } def bindFactory4Impl[F: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ import scala.language.higherKinds val t = implicitly[c.WeakTypeTag[F]].tpe // F = Function[(I1, I2, I3, I4), A] val i1 = t.typeArgs(0) // I1 val i2 = t.typeArgs(1) // I2 val i3 = t.typeArgs(2) // I3 val i4 = t.typeArgs(3) // I4 val a = t.typeArgs(4) // A val h = new BindHelper[c.type](c) q"""{ (i1: ${i1}, i2: ${i2}, i3:${i3}, i4:${i4}) => val session = ${h.findSession}.newSharedChildSession( wvlet.airframe.newDesign .bind(${h.surfaceOf(i1)}).toLazyInstance(i1) .bind(${h.surfaceOf(i2)}).toLazyInstance(i2) .bind(${h.surfaceOf(i3)}).toLazyInstance(i3) .bind(${h.surfaceOf(i4)}).toLazyInstance(i4) ) ${h.createNewInstanceOf(a)}(session) } """ } def bindFactory5Impl[F: c.WeakTypeTag](c: sm.Context): c.Tree = { import c.universe._ import scala.language.higherKinds val t = implicitly[c.WeakTypeTag[F]].tpe // F = Function[(I1, I2, I3, I4, I4), A] val i1 = t.typeArgs(0) // I1 val i2 = t.typeArgs(1) // I2 val i3 = t.typeArgs(2) // I3 val i4 = t.typeArgs(3) // I4 val i5 = t.typeArgs(4) // I5 val a = t.typeArgs(5) // A val h = new BindHelper[c.type](c) q"""{ (i1: ${i1}, i2: ${i2}, i3:${i3}, i4:${i4}, i5:${i5}) => val session = ${h.findSession}.newSharedChildSession( wvlet.airframe.newDesign .bind(${h.surfaceOf(i1)}).toLazyInstance(i1) .bind(${h.surfaceOf(i2)}).toLazyInstance(i2) .bind(${h.surfaceOf(i3)}).toLazyInstance(i3) .bind(${h.surfaceOf(i4)}).toLazyInstance(i4) .bind(${h.surfaceOf(i5)}).toLazyInstance(i5) ) ${h.createNewInstanceOf(a)}(session) } """ } def sourceCode(c: sm.Context): c.Tree = { import c.universe._ c.internal.enclosingOwner val pos = c.enclosingPosition q"wvlet.airframe.SourceCode(${pos.source.path}, ${pos.source.file.name}, ${pos.line}, ${pos.column})" } }
wvlet/airframe
airframe-di-macros/src/main/scala-2/wvlet/airframe/AirframeMacros.scala
Scala
apache-2.0
33,616
package org.jetbrains.plugins.scala.lang.psi.types.api import org.jetbrains.plugins.scala.extensions.PsiClassExt import org.jetbrains.plugins.scala.lang.psi.ElementScope import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.{AfterUpdate, Update} import org.jetbrains.plugins.scala.lang.psi.types.{ConstraintsResult, ScParameterizedType, ScType, ScTypeExt, ConstraintSystem, ScalaType} import org.jetbrains.plugins.scala.project.ProjectContext case class JavaArrayType(argument: ScType) extends ValueType { override implicit def projectContext: ProjectContext = argument.projectContext def getParameterizedType(implicit elementScope: ElementScope): Option[ValueType] = { elementScope.getCachedClasses("scala.Array").collect { case clazz: ScClass => clazz }.find(_.getTypeParameters.length == 1) .map(ScalaType.designator) .map(ScParameterizedType(_, Seq(argument))) } override def removeAbstracts = JavaArrayType(argument.removeAbstracts) override def updateSubtypes(updates: Array[Update], index: Int, visited: Set[ScType]): JavaArrayType = { JavaArrayType(argument.recursiveUpdateImpl(updates, index, visited)) } override def updateSubtypesVariance(update: (ScType, Variance) => AfterUpdate, variance: Variance = Covariant, revertVariances: Boolean = false) (implicit visited: Set[ScType]): ScType = JavaArrayType(argument.recursiveVarianceUpdate(update, Invariant)) override def equivInner(`type`: ScType, constraints: ConstraintSystem, falseUndef: Boolean): ConstraintsResult = `type` match { case JavaArrayType(thatArgument) => argument.equiv(thatArgument, constraints, falseUndef) case ParameterizedType(designator, arguments) if arguments.length == 1 => designator.extractClass match { case Some(td) if td.qualifiedName == "scala.Array" => argument.equiv(arguments.head, constraints, falseUndef) case _ => ConstraintsResult.Left } case _ => ConstraintsResult.Left } override def visitType(visitor: TypeVisitor): Unit = visitor.visitJavaArrayType(this) override def typeDepth: Int = argument.typeDepth }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/types/api/JavaArrayType.scala
Scala
apache-2.0
2,333
import org.apache.spark.mllib.util.MLUtils import org.apache.spark.mllib.stat.Statistics import org.apache.spark.mllib.linalg.Vectors import org.apache.spark.mllib.regression.LabeledPoint val x = MLUtils.loadLibSVMFile (sc, "kaggle/stackoverflow-train-sample-replace-text-by-length-numeric.libsvm") val summary = Statistics.colStats (x.map {case LabeledPoint (l, f) => f}) val means = summary.mean.toArray val sds = summary.variance.toArray.map {u => Math.sqrt (u)} val y = x.map {case LabeledPoint (l, f) => LabeledPoint (l, Vectors.dense (((f.toArray zip means) zip sds).map {case ((u, v), w) => (u - v)/w}))} val data = y.toDF val splits = data.randomSplit(Array(0.5, 0.5), seed = 1L) val train = splits(0) val test = splits(1) import org.apache.spark.ml.classification.MultilayerPerceptronClassifier // val layers = Array (5, 2) val layers = Array (5, 14, 2) val mlpc = new MultilayerPerceptronClassifier ().setLayers (layers).setMaxIter (100) val model = mlpc.fit (train) val test_result = model.transform (test) import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator val predictionAndLabels = test_result.select ("prediction", "label") val evaluator = new MulticlassClassificationEvaluator () evaluator.evaluate (test_result)
robert-dodier/PDXScala-2015
stackoverflow_mlpc.scala
Scala
gpl-2.0
1,254
package scredis.io import java.util.concurrent.{CountDownLatch, TimeUnit} import akka.actor._ import com.typesafe.scalalogging.LazyLogging import scredis.protocol.{AuthConfig, Request} import scredis.protocol.requests.ConnectionRequests.{Auth, Quit, Select} import scredis.protocol.requests.ServerRequests.{ClientSetName, Shutdown} import scala.concurrent.ExecutionContext import scala.concurrent.duration._ abstract class AbstractAkkaConnection( protected val system: ActorSystem, val host: String, val port: Int, @volatile protected var authOpt: Option[AuthConfig], @volatile protected var database: Int, @volatile protected var nameOpt: Option[String], protected val decodersCount: Int, protected val receiveTimeoutOpt: Option[FiniteDuration], protected val connectTimeout: FiniteDuration, protected val maxWriteBatchSize: Int, protected val tcpSendBufferSizeHint: Int, protected val tcpReceiveBufferSizeHint: Int, protected val akkaListenerDispatcherPath: String, protected val akkaIODispatcherPath: String, protected val akkaDecoderDispatcherPath: String ) extends Connection with LazyLogging { private val shutdownLatch = new CountDownLatch(1) @volatile protected var isShuttingDown = false override implicit val dispatcher: ExecutionContext = system.dispatcher protected val listenerActor: ActorRef protected def updateState(request: Request[_]): Unit = request match { case Auth(password, username) => authOpt = Some(AuthConfig(username, password)) case Select(db) => database = db case ClientSetName(name) => if (name.isEmpty) { nameOpt = None } else { nameOpt = Some(name) } case Quit() | Shutdown(_) => logger.info(s"Shutting down connection to ${host}:${port}") isShuttingDown = true case _ => } protected def getAuthOpt: Option[AuthConfig] = authOpt protected def getDatabase: Int = database protected def getNameOpt: Option[String] = nameOpt protected def watchTermination(): Unit = system.actorOf( Props( classOf[WatchActor], listenerActor, shutdownLatch ) ) /** * Waits for all the internal actors to be shutdown. * * @note This method is usually called after issuing a QUIT command * * @param timeout amount of time to wait */ def awaitTermination(timeout: Duration = Duration.Inf): Unit = { if (timeout.isFinite) { shutdownLatch.await(timeout.toMillis, TimeUnit.MILLISECONDS) } else { shutdownLatch.await() } } def isTerminated: Boolean = shutdownLatch.getCount == 0 } class WatchActor(actor: ActorRef, shutdownLatch: CountDownLatch) extends Actor with ActorLogging { context.watch(actor) def receive: Receive = { case Terminated(_) => log.info("AkkaConnection actor terminated {}", actor) shutdownLatch.countDown() context.stop(self) } }
scredis/scredis
src/main/scala/scredis/io/AbstractAkkaConnection.scala
Scala
apache-2.0
2,939
package scala.reflect package generic trait Scopes { self: Universe => abstract class AbsScope extends Iterable[Symbol] { def enter(sym: Symbol): Symbol } type Scope <: AbsScope def newScope(): Scope }
cran/rkafkajars
java/scala/reflect/generic/Scopes.scala
Scala
apache-2.0
224
package com.softwaremill.clippy import scala.reflect.internal.util.Position import scala.tools.nsc.reporters.Reporter class FailOnWarningsReporter(r: Reporter, warningMatcher: String => Option[Warning], colorsConfig: ColorsConfig) extends Reporter { override protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) = { val wrapped = DelegatingPosition.wrap(pos, colorsConfig) // cannot delegate to info0 as it's protected, hence special-casing on the possible severity values if (severity == INFO) { r.info(wrapped, msg, force) } else if (severity == WARNING) { warning(wrapped, msg) } else if (severity == ERROR) { error(wrapped, msg) } else { error(wrapped, s"UNKNOWN SEVERITY: $severity\\n$msg") } } override def echo(msg: String) = r.echo(msg) override def comment(pos: Position, msg: String) = r.comment(DelegatingPosition.wrap(pos, colorsConfig), msg) override def hasErrors = r.hasErrors || cancelled override def reset() = { cancelled = false r.reset() } // override def echo(pos: Position, msg: String) = r.echo(DelegatingPosition.wrap(pos, colorsConfig), msg) override def errorCount = r.errorCount override def warningCount = r.warningCount override def hasWarnings = r.hasWarnings override def flush() = r.flush() override def count(severity: Severity): Int = r.count(conv(severity)) override def resetCount(severity: Severity): Unit = r.resetCount(conv(severity)) // private def conv(s: Severity): r.Severity = s match { case INFO => r.INFO case WARNING => r.WARNING case ERROR => r.ERROR } // override def warning(pos: Position, msg: String) = { val wrapped = DelegatingPosition.wrap(pos, colorsConfig) warningMatcher(msg) match { case Some(Warning(_, adviceOpt)) => val finalMsg = adviceOpt.map(advice => msg + s"\\nClippy advises: $advice").getOrElse(msg) r.error(wrapped, finalMsg) case None => r.warning(wrapped, msg) } } override def error(pos: Position, msg: String) = { val wrapped = DelegatingPosition.wrap(pos, colorsConfig) r.error(wrapped, msg) } }
softwaremill/scala-clippy
plugin/src/main/scala/com/softwaremill/clippy/FailOnWarningsReporter.scala
Scala
apache-2.0
2,351
/* * (c) Copyright 2016 Hewlett Packard Enterprise Development LP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cogx.runtime.checkpoint import cogx.compiler.parser.syntaxtree.{ColorActuator, Field} /** Factory for a pipelined color actuator class- creates instances from user code or from a stored parameter string. * * @author Dick Carter */ object SimplePipelinedTestColorActuator { /** The factory method for this actuator. */ private def apply(source: Field, initState: Int): ColorActuator = { /** The smallest value supplied by the next iterator returned by this actuator. */ var state = initState // Note the we subtract 1 from `state`, since we want to repeat the last iterator that supplied the current state.*/ /** The parameters that would restore this actuator to its current state. */ def parameters = (state - 1).toString def reset() { state = initState } def updateAndCheck(it: Iterator[Byte]): Unit = { var expected = state while (it.hasNext) { val actual = it.next & 0xff require(expected == actual, s"Data mismatch: expected $expected, saw $actual.") if (state > 0) expected += 1 } state += 1 } new ColorActuator(source, updateAndCheck _, reset _) { override def restoreParameters = parameters // The default restoringClass object instance would identify this as an anonymous subclass of a (pipelined) Actuator. // We override this here to point to the SimplePipelinedTestColorActuator factory object (so the restore method will be found) override def restoringClass = SimplePipelinedTestColorActuator } } /** The factory method used to create a pipelined actuator. */ def apply(source: Field): ColorActuator = apply(source, 0) /** The factory method used to create a pipelined actuator from its stored parameter string. */ def restore(source: Field, parameterString: String) = { require(source.fieldType.tensorShape.points == 3, "Expecting ColorField as input to ColorActuator, found " + source.fieldType) val parameters = parameterString.split(" ") require(parameters.length == 1, "Expecting 1 parameter, found " + parameters.length) val initState = parameters(0).toInt apply(source, initState) } }
hpe-cct/cct-core
src/test/scala/cogx/runtime/checkpoint/SimplePipelinedTestColorActuator.scala
Scala
apache-2.0
2,827
package org.sierra import org.sierra.command.RedisCommand1d1 import redis.clients.jedis.Jedis import shapeless.HList import shapeless.HNil case class Path(redisKey: String) { def /(key: String): Path = Path(redisKey + ":" + key) def on(keys: String*): Path = Path((redisKey +: keys).mkString(":")) } case class LongKey(prefix: String = "") extends QKey[Long] { override def toKey(q: Long): String = prefix + q.toString } case class PKey[A](f: A => String) extends QKey[A] { def toKey(q: A): String = f(q) } trait QKey[T] { def toKey(q: T): String } case class QPath[+H <: HList, +M](path: Path, qKeys: H, invoker: Path => M) { def build: M = invoker(path) } case class QPathBuilder[+A <: HList, +M](hList: A, invoker: (Path) => M) { def ::[B](qKey: QKey[B]): QPathBuilder[shapeless.::[QKey[B], A], M] = QPathBuilder(qKey :: hList, invoker) def ::(path: Path): QPath[A, M] = QPath(path, hList, invoker) } package object api { import scala.language.higherKinds implicit class QPathResolver[T, +H <: HList, K](qp: QPath[shapeless.::[QKey[T], H], K]) { def /(q: T) = QPath( qp.path./(qp.qKeys.head.toKey(q)), qp.qKeys.tail, qp.invoker) } implicit class RedisCommandIntelijHelper[M[_], C, B](command: RedisCommand1d1[M, C, B]) { def <<:[K >: C](qp: QPath[HNil, M[K]])(implicit client: Jedis): B = on(qp) def on[K >: C](qp: QPath[HNil, M[K]])(implicit client: Jedis): B = command.execute(qp.build) } }
naoh87/sierra
src/main/scala/org/sierra/Path.scala
Scala
mit
1,487