code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package breeze.numerics import breeze.optimize._ import breeze.generic.UFunc import breeze.linalg._ import breeze.linalg.support.CanTraverseValues import breeze.linalg.support.CanTraverseValues.ValuesVisitor import breeze.macros.expand import breeze.math._ import spire.implicits._ package object financial { sealed class PaymentTime(val t: Int) case object Start extends PaymentTime(1) case object End extends PaymentTime(0) def futureValue(rate: Double, numPeriods: Int, payment:Double, presentValue: Double, when: PaymentTime = End):Double = { require(numPeriods >= 0) if (rate == 0) { -1*(presentValue+payment*numPeriods) } else { val fromPv = presentValue * math.pow(1.0+rate, numPeriods) val fromPayments = payment*((1.0+rate*when.t)/rate)*(math.pow(1.0+rate, numPeriods)-1.0) -1*(fromPv + fromPayments) } } def presentValue(rate: Double, numPeriods: Int, payment:Double, futureValue: Double, when: PaymentTime = End):Double = { require(numPeriods >= 0) if (rate == 0) { -1*(futureValue+payment*numPeriods) } else { val denominator = math.pow(1.0+rate, numPeriods) val fromPayments = payment*((1.0+rate*when.t)/rate)*(math.pow(1.0+rate, numPeriods)-1.0) -1*(futureValue + fromPayments) / denominator } } object netPresentValue extends UFunc { @expand implicit def reduce[@expand.args(Double, Float, Int) Scalar, T](implicit iter: CanTraverseValues[T, Scalar], @expand.sequence[Scalar](0.0, 0.0f, 0) zero: Scalar): Impl2[Double, T, Double] = new Impl2[Double, T, Double] { def apply(rate: Double, revenueStream: T): Double = { val visit = new ValuesVisitor[Scalar] { final val decayConst: Double = 1.0/(1.0 + rate) var decayUntilNow: Double = 1.0 var sum: Double = 0.0 def visit(a: Scalar): Unit = { sum += decayUntilNow*a decayUntilNow *= decayConst } def zeros(numZero: Int, zeroValue: Scalar): Unit = () } iter.traverse(revenueStream, visit) visit.sum } } } def payment(rate: Double, numPeriods: Int, presentValue: Double, futureValue: Double = 0.0, when: PaymentTime = End):Double = { if (rate == 0) { -1*(futureValue+presentValue) / numPeriods } else { val denominator = ((1.0+rate*when.t)/rate)*(math.pow(1.0+rate, numPeriods)-1.0) -1*(futureValue + presentValue * math.pow(1.0+rate, numPeriods)) / denominator } } def principalInterest(rate: Double, numPeriods: Int, presentValue: Double, futureValue: Double = 0.0, when: PaymentTime = End): (DenseVector[Double],DenseVector[Double], DenseVector[Double]) = { if (when == Start) { throw new IllegalArgumentException("This method is broken for payment at the start of the period!") } val pmt = payment(rate, numPeriods, presentValue, futureValue, when) val interestPayment = DenseVector.zeros[Double](numPeriods) val principalPayment = DenseVector.zeros[Double](numPeriods) val principalRemaining = DenseVector.zeros[Double](numPeriods) var principal = presentValue var interest = presentValue*rate cfor(0)(i => i < numPeriods, i => i+1)(i => { val ip = -1*math.max(interest, 0) interest += ip principal += (pmt - ip) principalRemaining.unsafeUpdate(i, principal) interestPayment.unsafeUpdate(i, ip) principalPayment.unsafeUpdate(i, pmt-ip) interest += (principal+interest)*rate }) (principalPayment, interestPayment, principalRemaining) } def interestPayments(rate: Double, numPeriods: Int, presentValue: Double, futureValue: Double = 0.0, when: PaymentTime = End): DenseVector[Double] = principalInterest(rate, numPeriods, presentValue, futureValue, when)._1 def principalPayments(rate: Double, numPeriods: Int, presentValue: Double, futureValue: Double = 0.0, when: PaymentTime = End): DenseVector[Double] = principalInterest(rate, numPeriods, presentValue, futureValue, when)._2 def principalRemaining(rate: Double, numPeriods: Int, presentValue: Double, futureValue: Double = 0.0, when: PaymentTime = End): DenseVector[Double] = principalInterest(rate, numPeriods, presentValue, futureValue, when)._3 private def roots(coeffs: DenseVector[Double]) = { val coeffsArray = coeffs.toArray; val trailingZeros = coeffsArray.indexWhere(0 != _); val tailZerosIdx = coeffsArray.lastIndexWhere(0 != _) val nonZeroCoeffs = coeffs.slice(trailingZeros, tailZerosIdx + 1) val N = nonZeroCoeffs.length - 1; val complexRoots = if (0 < N) { val A = DenseMatrix.zeros[Double](N, N); //fill the 1th diagnal below the main diagnal with ones val downDiagIdxs = for(i <-(1 until N)) yield (i, i - 1) A(downDiagIdxs) := 1.0 A(0 until 1, ::) := nonZeroCoeffs(1 to N) :/ -nonZeroCoeffs(0) val rootEig = eig(A) val nonZeroEigNum = rootEig.eigenvalues.length; val complexEig = DenseVector.zeros[Complex](nonZeroEigNum) for (i <- 0 until nonZeroEigNum) { complexEig(i) = Complex(rootEig.eigenvalues(i), rootEig.eigenvaluesComplex(i)) } complexEig } else { DenseVector.zeros[Complex](N + 1) } //pading 0 to the end val fullRoots = if (0 < trailingZeros) { DenseVector.vertcat(complexRoots, DenseVector.zeros[Complex](trailingZeros)) } else { complexRoots } fullRoots } def interalRateReturn(cashflow: DenseVector[Double]): Option[Double]= { require(cashflow(0) < 0, "Input cash flows per time period. The cashflow(0) represent the initial invesment which should be negative!") val res = roots(reverse(cashflow)) val realRes = DenseVector[Double]( for(c:Complex <- res.toArray if ((c.im() == 0) && (0 < c.re()))) yield c.re() ) val rates = realRes.mapValues(v =>1.0 / v - 1.0) val rate = if (rates.length <= 0) { None } else { Option[Double](rates(argmin(abs(rates)))) } rate } def modifiedInternalRateReturn(values:DenseVector[Double], financeRate:Double, reinvestRate:Double = 0) = { val n = values.length var posCnt:Int = values.valuesIterator.count(0 < _) val positives = values.mapValues(x => if (0 < x) x else 0) var negCnt:Int = values.valuesIterator.count(_ < 0) val negatives = values.mapValues(x => if (x < 0) x else 0) if (posCnt == 0 || negCnt == 0) { throw new IllegalArgumentException("The values must has one positive and negative value!") } val inflowNPV:Double = netPresentValue(reinvestRate, positives) val outflowNPV:Double = netPresentValue(financeRate, negatives) val mirr = (pow(math.abs(inflowNPV/outflowNPV), (1.0 / (n-1))) * (1.0 + reinvestRate) - 1.0) mirr } def numberPeriodicPayments(rate:Double, pmt:Double, pv:Double, fv:Double = 0.0, when:PaymentTime = End) = { require(pmt != 0, "The payment of annuity(pmt) can not be zero!") val nper = if (0 == rate) { (-fv + pv)/pmt; } else { val z = pmt*(1.0 + rate*when.t)/rate log((z - fv)/(z + pv))/log(1.0 + rate) } nper } def ratePeriodicPayments(nper:Double, pmt:Double, pv:Double, fv:Double, when:PaymentTime = End, guess:Double = 0.1, tol:Double = 1E-06, maxiter:Int = 100) = { var rate = guess; var iter = 0 var close = false while(iter < maxiter && !close) { val nextRate = rate - annuityFDivGradf(nper, pmt, pv, fv, when, rate) val diff = abs(nextRate - rate) close = diff < tol iter += 1 rate = nextRate } if (close) Option[Double](rate) else None } //f(annuity)/f'(annuity) private def annuityFDivGradf(nper: Double, pmt: Double, pv: Double, fv: Double, when: PaymentTime, rate: Double) = { val t1 = pow(1.0 + rate, nper) val t2 = pow(1.0 + rate, nper - 1.0) val annuityF = fv + pv * t1 + pmt * (t1 - 1) * (1.0 + rate * when.t) / rate val gradAnnuityF = nper * t2 * pv - pmt * (t1 - 1.0) * (1.0 + rate * when.t) / pow(rate, 2.0) + nper * pmt * t2 * (1.0 + rate * when.t) / rate + pmt * (t1 - 1) * when.t / rate val fDivGradF = annuityF/gradAnnuityF fDivGradF } }
calippo/breeze
math/src/main/scala/breeze/numerics/financial/package.scala
Scala
apache-2.0
8,283
package es.weso.monads import org.scalatest._ import org.scalatest.prop.PropertyChecks import org.scalatest.prop.Checkers import es.weso.monads.Result._ import Stream._ class ResultSpec extends FunSpec with Matchers with Checkers { describe("a Result") { it("Should return a value") { val bm : Result[Int] = unit(2) bm.run should be (2 #:: Stream.empty) } it("Should fail") { val bm : Result[Int] = failure("example failed") intercept[ResultException]{ bm.run } } it("Should recover from fail") { val bm : Result[Int] = failure("example failed").orelse(unit(2)) bm.run should be (List(2).toStream) } it("Should be able to use flatMap") { val bm : Result[Int] = // for (x <- unit(2)) yield (x + 1) unit(2) flatMap { x => unit(x + 1)} bm.run should be (List(3).toStream) } it("Should be able to use for's") { val bm : Result[Int] = for (x <- unit(2)) yield x + 1 bm.run should be (List(3).toStream) } it("Should be able to use for to generate a pair") { val bm : Result[(Int,Int)] = for ( x <- unit(2) ; y <- unit(x + 1) ) yield (x,y) bm.run should be (List((2,3)).toStream) } it("A sequence with a fail...fails") { val u1 : Result[Int] = unit(1) val f : Result[Int] = failure("fail") val bm = for (x <- u1; y <- f) yield x + y bm.isFailure should be(true) } describe("pass all") { it("should pass all...if all pass") { val xs = List(1,2,3) val current = true def eval(x:Int,b:Boolean) = if (x > 0 && b) unit(true) else failure("x <= 0") passAll(xs,current,eval).isValid should be(true) } it("should not pass all...if one does not pass") { val xs = List(1,-2,3) val current = true def eval(x:Int,b:Boolean) = if (x > 0 && b) unit(true) else failure("x <= 0") passAll(xs,current,eval).isFailure should be(true) } } describe("pass Some") { it("should pass some...if all pass") { val xs = List(1,2,3) def eval(x:Int) = if (x > 0 ) unit(true) else failure("x <= 0") passSome(xs,eval).isValid should be(true) } it("should pass some...if one pass") { val xs = List(-1,2,-3) def eval(x:Int) = if (x > 0) unit(true) else failure("x <= 0") passSome(xs,eval).isValid should be(true) } it("should not pass ...if none pass") { val xs = List(-1,-2,-3) def eval(x:Int) = if (x > 0) unit(true) else failure("x <= 0") passSome(xs,eval).isValid should be(false) } } describe("Or else") { it("A sequence with a fail and orelse...recovers") { val u1 : Result[Int] = unit(1) val f : Result[Int] = failure("fail") val u2 : Result[Int] = unit(2) val bm1 = for (x <- u1; y <- f) yield (x + y) val bm2 = bm1 orelse u2 bm1.isFailure should be(true) bm2.isFailure should be(false) bm2.run should be (List((2)).toStream) } } describe("Merge") { it("Should merge two computations") { val comp1 : Result[Int] = Passed(Stream(1,2,3)) val comp2 : Result[Int] = Passed(Stream(4,5,6)) def comb(x:Int,y:Int) = x + y merge(comp1,comp2,comb) should be(Passed(Stream(5,6,7,6,7,8,7,8,9))) } it("Should merge a computation with values and the basic computation") { val comp1 : Result[Int] = Passed(Stream(1,2,3)) val comp2 : Result[Int] = Passed(Stream(0)) def comb(x:Int,y:Int) = x + y merge(comp1,comp2,comb) should be(Passed(Stream(1,2,3))) } it("Should merge a computation with values and a Failure computation") { val comp1 : Result[Int] = Passed(Stream(1,2,3)) val comp2 : Result[Int] = Failure("hi") def comb(x:Int,y:Int) = x + y merge(comp1,comp2,comb) should be(Passed(Stream(1,2,3))) } it("Should merge a computation with values and an empty computation") { val comp1 : Result[Int] = Passed(Stream(1,2,3)) val comp2 : Result[Int] = Passed(Stream(0)) def comb(x:Int,y:Int) = x + y merge(comp1,comp2,comb) should be(Passed(Stream(1,2,3))) } } describe("Combine All") { it("Should combine two computations") { val ls : List[Int] = List(1,2) def eval(n:Int):Result[String] = { Passed({(for (i <- 0 to n) yield i.toString).toStream })} def comb(x:String,y:String) = x + y combineAll(ls,eval,"0",comb) should be(Passed(Stream("000","010","020","100","110","120"))) } it("Should combine three computations even if one fails") { val ls : List[Int] = List(1,-1,2) def eval(n:Int):Result[String] = { if (n > 0) Passed({(for (i <- 0 to n) yield i.toString).toStream }) else Failure("neg") } def comb(x:String,y:String) = x + y combineAll(ls,eval,"0",comb) should be(Passed(Stream("000","010","020","100","110","120"))) } } describe("parts of a set") { it("pSet of 1,2") { val bm = parts(Set(1,2)) bm.run should be (List((Set(1,2),Set()),(Set(1),Set(2)),(Set(2),Set(1)),(Set(),Set(1,2))).toStream) } it("pSet of 1,2,3") { val bm = parts(Set(1,2,3)) bm.run.toSet should be ( Set((Set(1,2,3),Set()), (Set(1,2),Set(3)), (Set(1,3),Set(2)), (Set(2,3),Set(1)), (Set(1),Set(2,3)), (Set(2),Set(1,3)), (Set(3),Set(1,2)), (Set(),Set(1,2,3)) )) } } } }
labra/wiGenerator
src/test/scala/es/weso/monads/ResultSpec.scala
Scala
mit
5,580
import java.lang.annotation.ElementType object SCL8359 { class A[T] object A { implicit class B[T](a: A[T]) { def foo = 1 } } val e: A[ElementType] = new A /*start*/e.foo/*end*/ } //Int
katejim/intellij-scala
testdata/typeInference/bugs5/SCL8359.scala
Scala
apache-2.0
211
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.cloudml.zen.ml.clustering.algorithm import java.util.concurrent.ConcurrentLinkedQueue import breeze.linalg.{DenseVector => BDV, SparseVector => BSV} import breeze.numerics._ import com.github.cloudml.zen.ml.clustering.LDADefines._ import com.github.cloudml.zen.ml.clustering.{LDALogLikelihood, LDAPerplexity} import com.github.cloudml.zen.ml.util.BVDecompressor import com.github.cloudml.zen.ml.util.Concurrent._ import org.apache.spark.graphx2.impl.{ShippableVertexPartition => VertPartition, _} import scala.collection.JavaConversions._ import scala.collection.mutable import scala.concurrent.Future import scala.language.existentials abstract class LDAAlgorithm(numTopics: Int, numThreads: Int) extends Serializable { protected val dscp = numTopics >>> 3 def isByDoc: Boolean def samplePartition(numPartitions: Int, sampIter: Int, seed: Int, topicCounters: BDV[Count], numTokens: Long, numTerms: Int, alpha: Double, alphaAS: Double, beta: Double) (pid: Int, ep: EdgePartition[TA, Nvk]): EdgePartition[TA, Int] def countPartition(ep: EdgePartition[TA, Int]): Iterator[NvkPair] def aggregateCounters(vp: VertPartition[TC], cntsIter: Iterator[NvkPair]): VertPartition[TC] def perplexPartition(topicCounters: BDV[Count], numTokens: Long, numTerms: Int, alpha: Double, alphaAS: Double, beta: Double) (ep: EdgePartition[TA, Nvk]): (Double, Double, Double) def logLikelihoodPartition(topicCounters: BDV[Count], numTokens: Long, alpha: Double, beta: Double, alphaAS: Double) (vp: VertPartition[TC]): (Double, Double) def initEdgePartition(ep: EdgePartition[TA, _]): EdgePartition[TA, Int] = { ep.withVertexAttributes(new Array[Int](ep.vertexAttrs.length)) } def sampleGraph(edges: EdgeRDDImpl[TA, _], verts: VertexRDDImpl[TC], topicCounters: BDV[Count], seed: Int, sampIter: Int, numTokens: Long, numTerms: Int, alpha: Double, alphaAS: Double, beta: Double): EdgeRDDImpl[TA, Int] = { val newEdges = refreshEdgeAssociations(edges, verts) val numPartitions = newEdges.partitions.length val spf = samplePartition(numPartitions, sampIter, seed, topicCounters, numTokens, numTerms, alpha, alphaAS, beta) _ val partRDD = newEdges.partitionsRDD.mapPartitions(_.map { case (pid, ep) => val startedAt = System.nanoTime val newEp = spf(pid, ep) val elapsedSeconds = (System.nanoTime - startedAt) / 1e9 println(s"Partition sampling $sampIter takes: $elapsedSeconds secs") (pid, newEp) }, preservesPartitioning=true) newEdges.withPartitionsRDD(partRDD) } def updateVertexCounters(edges: EdgeRDDImpl[TA, Int], verts: VertexRDDImpl[TC]): VertexRDDImpl[TC] = { val shippedCounters = edges.partitionsRDD.mapPartitions(_.flatMap { case (_, ep) => countPartition(ep) }).partitionBy(verts.partitioner.get) // Below identical map is used to isolate the impact of locality of CheckpointRDD val isoRDD = verts.partitionsRDD.mapPartitions(_.seq, preservesPartitioning=true) val partRDD = isoRDD.zipPartitions(shippedCounters, preservesPartitioning=true)( (vpIter, cntsIter) => vpIter.map(aggregateCounters(_, cntsIter)) ) verts.withPartitionsRDD(partRDD) } def calcPerplexity(edges: EdgeRDDImpl[TA, _], verts: VertexRDDImpl[TC], topicCounters: BDV[Count], numTokens: Long, numTerms: Int, alpha: Double, alphaAS: Double, beta: Double): LDAPerplexity = { val newEdges = refreshEdgeAssociations(edges, verts) val ppf = perplexPartition(topicCounters, numTokens, numTerms, alpha, alphaAS, beta) _ val sumPart = newEdges.partitionsRDD.mapPartitions(_.map { case (_, ep) => ppf(ep) }) val (llht, wllht, dllht) = sumPart.collect().unzip3 val pplx = math.exp(-llht.par.sum / numTokens) val wpplx = math.exp(-wllht.par.sum / numTokens) val dpplx = math.exp(-dllht.par.sum / numTokens) new LDAPerplexity(pplx, wpplx, dpplx) } def calcLogLikelihood(verts: VertexRDDImpl[TC], topicCounters: BDV[Count], numTokens: Long, numDocs: Long, numTerms: Int, alpha: Double, alphaAS: Double, beta: Double): LDALogLikelihood = { val alphaSum = alpha * numTopics val betaSum = beta * numTerms val lpf = logLikelihoodPartition(topicCounters, numTokens, alpha, beta, alphaAS) _ val sumPart = verts.partitionsRDD.mapPartitions(_.map(lpf)) val (wllht, dllht) = sumPart.collect().unzip val normWord = Range(0, numTopics).par.map(i => lgamma(topicCounters(i) + betaSum)).sum val wllh = wllht.par.sum + numTopics * lgamma(betaSum) - normWord val dllh = dllht.par.sum + numDocs * lgamma(alphaSum) new LDALogLikelihood(wllh, dllh) } def refreshEdgeAssociations(edges: EdgeRDDImpl[TA, _], verts: VertexRDDImpl[TC]): EdgeRDDImpl[TA, Nvk] = { val shippedVerts = verts.partitionsRDD.mapPartitions(_.flatMap { vp => val rt = vp.routingTable val index = vp.index val values = vp.values val alls = mutable.Buffer[Future[Iterator[(Int, VertexAttributeBlock[TC])]]]() implicit val es = initExecutionContext(numThreads) Range(0, rt.numEdgePartitions).grouped(numThreads).flatMap { batch => val all = Future.traverse(batch.iterator) { pid => withFuture { val vids = rt.routingTable(pid)._1 val attrs = vids.map(vid => values(index.getPos(vid))) (pid, new VertexAttributeBlock(vids, attrs)) }} alls += all withAwaitResult(all) } ++ { withAwaitReadyAndClose(Future.sequence(alls.iterator)) Iterator.empty } }).partitionBy(edges.partitioner.get) // Below identical map is used to isolate the impact of locality of CheckpointRDD val isoRDD = edges.partitionsRDD.mapPartitions(_.seq, preservesPartitioning=true) val partRDD = isoRDD.zipPartitions(shippedVerts, preservesPartitioning=true)( (epIter, vabsIter) => epIter.map(Function.tupled((pid, ep) => { val g2l = ep.global2local val results = new Array[Nvk](ep.vertexAttrs.length) val thq = new ConcurrentLinkedQueue(1 to numThreads) val decomps = Array.fill(numThreads)(new BVDecompressor(numTopics)) implicit val es = initExecutionContext(numThreads) val all = Future.traverse(vabsIter) { case (_, vab) => withFuture { val thid = thq.poll() - 1 try { val decomp = decomps(thid) vab.iterator.foreach { case (vid, vdata) => results(g2l(vid)) = decomp.CV2BV(vdata) } } finally { thq.add(thid + 1) } }} withAwaitReadyAndClose(all) (pid, ep.withVertexAttributes(results)) })) ) edges.withPartitionsRDD(partRDD) } def collectTopicCounters(verts: VertexRDDImpl[TC]): BDV[Count] = { verts.partitionsRDD.mapPartitions(_.map { vp => val totalSize = vp.capacity val index = vp.index val mask = vp.mask val values = vp.values val sizePerthrd = { val npt = totalSize / numThreads if (npt * numThreads == totalSize) npt else npt + 1 } implicit val es = initExecutionContext(numThreads) val all = Range(0, numThreads).map { thid => withFuture { val decomp = new BVDecompressor(numTopics) val startPos = sizePerthrd * thid val endPos = math.min(sizePerthrd * (thid + 1), totalSize) val agg = new BDV(new Array[Count](numTopics)) var pos = mask.nextSetBit(startPos) while (pos < endPos && pos >= 0) { if (isTermId(index.getValue(pos))) { val bv = decomp.CV2BV(values(pos)) bv match { case v: BDV[Count] => agg :+= v case v: BSV[Count] => agg :+= v } } pos = mask.nextSetBit(pos + 1) } agg }} withAwaitResultAndClose(Future.reduce(all)(_ :+= _)) }).collect().par.reduce(_ :+= _) } }
cloudml/zen
ml/src/main/scala/com/github/cloudml/zen/ml/clustering/algorithm/LDAAlgorithm.scala
Scala
apache-2.0
8,873
object Test { trait A trait B trait Builder[From, To] { def buildFrom(x: From): To } implicit val a2bBuilder = new Builder[A, B] { override def buildFrom(x: A) = new B{} } implicit def a2b[From, To >: B](x: From)(implicit bl: Builder[From, To]): To = bl.buildFrom(x) def f(b: B) = println(b) def main(args: Array[String]) { val a: A = new A f(/*start*/a/*end*/) } } /* Seq(a2b, any2ArrowAssoc, any2Ensuring, any2stringadd, any2stringfmt), Some(a2b) */
ilinum/intellij-scala
testdata/implicits/implicitParameter/ImplicitWithImplicitParameter.scala
Scala
apache-2.0
491
package dotty.tools package dotc package typer import core._ import ast._ import Contexts._, Types._, Flags._, Symbols._ import ProtoTypes._ import NameKinds.{AvoidNameKind, UniqueName} import util.Spans._ import util.{Stats, SimpleIdentityMap} import Decorators._ import config.Printers.{gadts, typr} import annotation.tailrec import collection.mutable import scala.annotation.internal.sharable import config.Printers.gadts object Inferencing { import tpd._ /** Is type fully defined, meaning the type does not contain wildcard types * or uninstantiated type variables. As a side effect, this will minimize * any uninstantiated type variables, according to the given force degree, * but only if the overall result of `isFullyDefined` is `true`. * Variables that are successfully minimized do not count as uninstantiated. */ def isFullyDefined(tp: Type, force: ForceDegree.Value)(using Context): Boolean = { val nestedCtx = ctx.fresh.setNewTyperState() val result = try new IsFullyDefinedAccumulator(force)(using nestedCtx).process(tp) catch case ex: StackOverflowError => false // can happen for programs with illegal recusions, e.g. neg/recursive-lower-constraint.scala if (result) nestedCtx.typerState.commit() result } /** Try to fully define `tp`. Return whether constraint has changed. * Any changed constraint is kept. */ def canDefineFurther(tp: Type)(using Context): Boolean = val prevConstraint = ctx.typerState.constraint isFullyDefined(tp, force = ForceDegree.failBottom) && (ctx.typerState.constraint ne prevConstraint) /** The fully defined type, where all type variables are forced. * Throws an error if type contains wildcards. */ def fullyDefinedType(tp: Type, what: String, span: Span)(using Context): Type = if (isFullyDefined(tp, ForceDegree.all)) tp else throw new Error(i"internal error: type of $what $tp is not fully defined, pos = $span") // !!! DEBUG /** Instantiate selected type variables `tvars` in type `tp` in a special mode: * 1. If a type variable is constrained from below (i.e. constraint bound != given lower bound) * it is minimized. * 2. Otherwise, if the type variable is constrained from above, it is maximized. * 3. Otherwise, if the type variable has a lower bound != Nothing, it is minimized. * 4. Otherwise, if the type variable has an upper bound != Any, it is maximized. * If none of (1) - (4) applies, the type variable is left uninstantiated. * The method is called to instantiate type variables before an implicit search. */ def instantiateSelected(tp: Type, tvars: List[Type])(using Context): Unit = if (tvars.nonEmpty) IsFullyDefinedAccumulator( ForceDegree.Value(tvars.contains, IfBottom.flip), minimizeSelected = true ).process(tp) /** Instantiate any type variables in `tp` whose bounds contain a reference to * one of the parameters in `paramss`. */ def instantiateDependent(tp: Type, paramss: List[List[Symbol]])(using Context): Unit = { val dependentVars = new TypeAccumulator[Set[TypeVar]] { def apply(tvars: Set[TypeVar], tp: Type) = tp match { case tp: TypeVar if !tp.isInstantiated && TypeComparer.bounds(tp.origin) .namedPartsWith(ref => paramss.exists(_.contains(ref.symbol))) .nonEmpty => tvars + tp case _ => foldOver(tvars, tp) } } val depVars = dependentVars(Set(), tp) if (depVars.nonEmpty) instantiateSelected(tp, depVars.toList) } /** If `tp` is top-level type variable with a lower bound in the current constraint, * instantiate it from below. We also look for TypeVars in other places where * their instantiation could uncover new type members. However that search is best * effort only. It might miss type variables that appear in structures involving * alias types and type projections. * @param applied Test is done in a `tryInsertImplicitOnQualifier` application. * In this case, we always try to instantiate TypeVars in type arguments. * If `applied` is false, we only try that in arguments that may affect * the result type. */ def couldInstantiateTypeVar(tp: Type, applied: Boolean = false)(using Context): Boolean = tp.dealias match case tvar: TypeVar if !tvar.isInstantiated && ctx.typerState.constraint.contains(tvar) && tvar.hasLowerBound => tvar.instantiate(fromBelow = true) true case AppliedType(tycon, args) => // The argument in `args` that may potentially appear directly as result // and thereby influence the members of this type def argsInResult: List[Type] = tycon.stripTypeVar match case tycon: TypeRef => tycon.info match case MatchAlias(_) => args case TypeBounds(_, upper: TypeLambda) => upper.resultType match case ref: TypeParamRef if ref.binder == upper => args.lazyZip(upper.paramRefs).collect { case (arg, pref) if pref eq ref => arg }.toList case _ => Nil case _ => Nil case _ => Nil couldInstantiateTypeVar(tycon, applied) || (if applied then args else argsInResult).exists(couldInstantiateTypeVar(_, applied)) case RefinedType(parent, _, _) => couldInstantiateTypeVar(parent, applied) case tp: AndOrType => couldInstantiateTypeVar(tp.tp1, applied) || couldInstantiateTypeVar(tp.tp2, applied) case AnnotatedType(tp, _) => couldInstantiateTypeVar(tp, applied) case _ => false /** The accumulator which forces type variables using the policy encoded in `force` * and returns whether the type is fully defined. The direction in which * a type variable is instantiated is determined as follows: * 1. T is minimized if the constraint over T is only from below (i.e. * constrained lower bound != given lower bound and * constrained upper bound == given upper bound). * 2. T is maximized if the constraint over T is only from above (i.e. * constrained upper bound != given upper bound and * constrained lower bound == given lower bound). * * If (1) and (2) do not apply, and minimizeSelected is set: * 3. T is minimized if it has a lower bound (different from Nothing) in the * current constraint (the bound might come from T's declaration). * 4. Otherwise, T is maximized if it has an upper bound (different from Any) * in the currented constraint (the bound might come from T's declaration). * 5. Otherwise, T is not instantiated at all. * If (1) and (2) do not apply, and minimizeSelected is not set: * 6: T is maximized if it appears only contravariantly in the given type, * or if forceDegree is `flipBottom` and T has no lower bound different from Nothing. * 7. Otherwise, T is minimized. * * The instantiation for (6) and (7) is done in two phases: * 1st Phase: Try to instantiate minimizable type variables to * their lower bound. Record whether successful. * 2nd Phase: If first phase was successful, instantiate all remaining type variables * to their upper bound. */ private class IsFullyDefinedAccumulator(force: ForceDegree.Value, minimizeSelected: Boolean = false) (using Context) extends TypeAccumulator[Boolean] { private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = { val inst = tvar.instantiate(fromBelow) typr.println(i"forced instantiation of ${tvar.origin} = $inst") inst } private var toMaximize: List[TypeVar] = Nil def apply(x: Boolean, tp: Type): Boolean = tp.dealias match { case _: WildcardType | _: ProtoType => false case tvar: TypeVar if !tvar.isInstantiated => force.appliesTo(tvar) && ctx.typerState.constraint.contains(tvar) && { val direction = instDirection(tvar.origin) if minimizeSelected then if direction <= 0 && tvar.hasLowerBound then instantiate(tvar, fromBelow = true) else if direction >= 0 && tvar.hasUpperBound then instantiate(tvar, fromBelow = false) // else hold off instantiating unbounded unconstrained variable else if direction != 0 then instantiate(tvar, fromBelow = direction < 0) else if variance >= 0 && (force.ifBottom == IfBottom.ok || tvar.hasLowerBound) then instantiate(tvar, fromBelow = true) else if variance >= 0 && force.ifBottom == IfBottom.fail then return false else toMaximize = tvar :: toMaximize foldOver(x, tvar) } case tp => foldOver(x, tp) } def process(tp: Type): Boolean = // Maximize type vars in the order they were visited before */ def maximize(tvars: List[TypeVar]): Unit = tvars match case tvar :: tvars1 => maximize(tvars1) if !tvar.isInstantiated then instantiate(tvar, fromBelow = false) case nil => apply(true, tp) && ( toMaximize.isEmpty || { maximize(toMaximize) toMaximize = Nil // Do another round since the maximixing instances process(tp) // might have type uninstantiated variables themselves. } ) } def approximateGADT(tp: Type)(using Context): Type = { val map = new ApproximateGadtAccumulator val res = map(tp) assert(!map.failed) res } /** Approximates a type to get rid of as many GADT-constrained abstract types as possible. */ private class ApproximateGadtAccumulator(using Context) extends TypeMap { var failed = false /** GADT approximation proceeds differently from type variable approximation. * * Essentially, what we're doing is we're inferring a type ascription that * will remove as many GADT-constrained types as possible. This means that * we want to approximate type T to type S in such a way that no matter how * GADT-constrained types are instantiated, T <: S. In other words, the * relationship _necessarily_ must hold. * * We accomplish that by: * - replacing covariant occurences with upper GADT bound * - replacing contravariant occurences with lower GADT bound * - leaving invariant occurences alone * * Examples: * - If we have GADT cstr A <: Int, then for all A <: Int, Option[A] <: Option[Int]. * Therefore, we can approximate Option[A] ~~ Option[Int]. * - If we have A >: S <: T, then for all such A, A => A <: S => T. This * illustrates that it's fine to differently approximate different * occurences of same type. * - If we have A <: Int and F <: [A] => Option[A] (note the invariance), * then we should approximate F[A] ~~ Option[A]. That is, we should * respect the invariance of the type constructor. * - If we have A <: Option[B] and B <: Int, we approximate A ~~ * Option[B]. That is, we don't recurse into already approximated * types. Since GADT approximation is (for now) only used for member * selection, this behaviour is expected, as nested types cannot affect * member selection (note that given/extension lookup doesn't need GADT * approx, see gadt-approximation-interaction.scala). */ def apply(tp: Type): Type = tp.dealias match { case tp @ TypeRef(qual, nme) if variance != 0 && ctx.gadt.contains(tp.symbol) => val sym = tp.symbol val res = ctx.gadt.approximation(sym, fromBelow = variance < 0) gadts.println(i"approximated $tp ~~ $res") res case _: WildcardType | _: ProtoType => failed = true NoType case tp => mapOver(tp) } def process(tp: Type): Type = { apply(tp) } } /** For all type parameters occurring in `tp`: * If the bounds of `tp` in the current constraint are equal wrt =:=, * instantiate the type parameter to the lower bound's approximation * (approximation because of possible F-bounds). */ def replaceSingletons(tp: Type)(using Context): Unit = { val tr = new TypeTraverser { def traverse(tp: Type): Unit = { tp match { case param: TypeParamRef => val constraint = accCtx.typerState.constraint constraint.entry(param) match { case TypeBounds(lo, hi) if (hi frozen_<:< lo) => val inst = TypeComparer.approximation(param, fromBelow = true) typr.println(i"replace singleton $param := $inst") accCtx.typerState.constraint = constraint.replace(param, inst) case _ => } case _ => } traverseChildren(tp) } } tr.traverse(tp) } /** If `tree` has a type lambda type, infer its type parameters by comparing with expected type `pt` */ def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match { case tl: TypeLambda => val (tl1, tvars) = constrained(tl, tree) var tree1 = AppliedTypeTree(tree.withType(tl1), tvars) tree1.tpe <:< pt fullyDefinedType(tree1.tpe, "template parent", tree.span) tree1 case _ => tree } def isSkolemFree(tp: Type)(using Context): Boolean = !tp.existsPart(_.isInstanceOf[SkolemType]) /** The list of uninstantiated type variables bound by some prefix of type `T` which * occur in at least one formal parameter type of a prefix application. * Considered prefixes are: * - The function `f` of an application node `f(e1, .., en)` * - The function `f` of a type application node `f[T1, ..., Tn]` * - The prefix `p` of a selection `p.f`. * - The result expression `e` of a block `{s1; .. sn; e}`. */ def tvarsInParams(tree: Tree, locked: TypeVars)(using Context): List[TypeVar] = { @tailrec def boundVars(tree: Tree, acc: List[TypeVar]): List[TypeVar] = tree match { case Apply(fn, _) => boundVars(fn, acc) case TypeApply(fn, targs) => val tvars = targs.filter(_.isInstanceOf[InferredTypeTree]).tpes.collect { case tvar: TypeVar if !tvar.isInstantiated && ctx.typerState.ownedVars.contains(tvar) && !locked.contains(tvar) => tvar } boundVars(fn, acc ::: tvars) case Select(pre, _) => boundVars(pre, acc) case Block(_, expr) => boundVars(expr, acc) case _ => acc } @tailrec def occurring(tree: Tree, toTest: List[TypeVar], acc: List[TypeVar]): List[TypeVar] = if (toTest.isEmpty) acc else tree match { case Apply(fn, _) => fn.tpe.widen match { case mtp: MethodType => val (occ, nocc) = toTest.partition(tvar => mtp.paramInfos.exists(tvar.occursIn)) occurring(fn, nocc, occ ::: acc) case _ => occurring(fn, toTest, acc) } case TypeApply(fn, targs) => occurring(fn, toTest, acc) case Select(pre, _) => occurring(pre, toTest, acc) case Block(_, expr) => occurring(expr, toTest, acc) case _ => acc } occurring(tree, boundVars(tree, Nil), Nil) } /** The instantiation direction for given poly param computed * from the constraint: * @return 1 (maximize) if constraint is uniformly from above, * -1 (minimize) if constraint is uniformly from below, * 0 if unconstrained, or constraint is from below and above. */ private def instDirection(param: TypeParamRef)(using Context): Int = { val constrained = TypeComparer.fullBounds(param) val original = param.binder.paramInfos(param.paramNum) val cmp = TypeComparer val approxBelow = if (!cmp.isSubTypeWhenFrozen(constrained.lo, original.lo)) 1 else 0 val approxAbove = if (!cmp.isSubTypeWhenFrozen(original.hi, constrained.hi)) 1 else 0 approxAbove - approxBelow } /** Following type aliases and stripping refinements and annotations, if one arrives at a * class type reference where the class has a companion module, a reference to * that companion module. Otherwise NoType */ def companionRef(tp: Type)(using Context): Type = tp.underlyingClassRef(refinementOK = true) match { case tp: TypeRef => val companion = tp.classSymbol.companionModule if (companion.exists) companion.termRef.asSeenFrom(tp.prefix, companion.owner) else NoType case _ => NoType } /** Instantiate undetermined type variables so that type `tp` is maximized. * @return The list of type symbols that were created * to instantiate undetermined type variables that occur non-variantly */ def maximizeType(tp: Type, span: Span, fromScala2x: Boolean)(using Context): List[Symbol] = { Stats.record("maximizeType") val vs = variances(tp) val patternBindings = new mutable.ListBuffer[(Symbol, TypeParamRef)] vs foreachBinding { (tvar, v) => if !tvar.isInstantiated then if (v == 1) tvar.instantiate(fromBelow = false) else if (v == -1) tvar.instantiate(fromBelow = true) else { val bounds = TypeComparer.fullBounds(tvar.origin) if (bounds.hi <:< bounds.lo || bounds.hi.classSymbol.is(Final) || fromScala2x) tvar.instantiate(fromBelow = false) else { // We do not add the created symbols to GADT constraint immediately, since they may have inter-dependencies. // Instead, we simultaneously add them later on. val wildCard = newPatternBoundSymbol(UniqueName.fresh(tvar.origin.paramName), bounds, span, addToGadt = false) tvar.instantiateWith(wildCard.typeRef) patternBindings += ((wildCard, tvar.origin)) } } } val res = patternBindings.toList.map { (boundSym, _) => // substitute bounds of pattern bound variables to deal with possible F-bounds for (wildCard, param) <- patternBindings do boundSym.info = boundSym.info.substParam(param, wildCard.typeRef) boundSym } // We add the created symbols to GADT constraint here. if (res.nonEmpty) ctx.gadt.addToConstraint(res) res } type VarianceMap = SimpleIdentityMap[TypeVar, Integer] /** All occurrences of type vars in `tp` that satisfy predicate * `include` mapped to their variances (-1/0/1) in both `tp` and * `pt.finalResultType`, where * -1 means: only contravariant occurrences * +1 means: only covariant occurrences * 0 means: mixed or non-variant occurrences * * We need to take the occurences in `pt` into account because a type * variable created when typing the current tree might only appear in the * bounds of a type variable in the expected type, for example when * `ConstraintHandling#legalBound` creates type variables when approximating * a bound. * * Note: We intentionally use a relaxed version of variance here, * where the variance does not change under a prefix of a named type * (the strict version makes prefixes invariant). This turns out to be * better for type inference. In a nutshell, if a type variable occurs * like this: * * (U? >: x.type) # T * * we want to instantiate U to x.type right away. No need to wait further. */ private def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap = { Stats.record("variances") val constraint = ctx.typerState.constraint object accu extends TypeAccumulator[VarianceMap] { def setVariance(v: Int) = variance = v def apply(vmap: VarianceMap, t: Type): VarianceMap = t match { case t: TypeVar if !t.isInstantiated && accCtx.typerState.constraint.contains(t) => val v = vmap(t) if (v == null) vmap.updated(t, variance) else if (v == variance || v == 0) vmap else vmap.updated(t, 0) case _ => foldOver(vmap, t) } } /** Include in `vmap` type variables occurring in the constraints of type variables * already in `vmap`. Specifically: * - if `tvar` is covariant in `vmap`, include all variables in its lower bound * (because they influence the minimal solution of `tvar`), * - if `tvar` is contravariant in `vmap`, include all variables in its upper bound * at flipped variances (because they influence the maximal solution of `tvar`), * - if `tvar` is nonvariant in `vmap`, include all variables in its upper and lower * bounds as non-variant. * Do this in a fixpoint iteration until `vmap` stabilizes. */ def propagate(vmap: VarianceMap): VarianceMap = { var vmap1 = vmap def traverse(tp: Type) = { vmap1 = accu(vmap1, tp) } vmap.foreachBinding { (tvar, v) => val param = tvar.origin constraint.entry(param) match case TypeBounds(lo, hi) => accu.setVariance(v) if v >= 0 then traverse(lo) constraint.lower(param).foreach(p => traverse(constraint.typeVarOfParam(p))) if v <= 0 then traverse(hi) constraint.upper(param).foreach(p => traverse(constraint.typeVarOfParam(p))) case _ => } if (vmap1 eq vmap) vmap else propagate(vmap1) } propagate(accu(accu(SimpleIdentityMap.empty, tp), pt.finalResultType)) } /** Run the transformation after dealiasing but return the original type if it was a no-op. */ private def derivedOnDealias(tp: Type)(transform: Type => Type)(using Context) = { val dealiased = tp.dealias val transformed = transform(dealiased) if transformed eq dealiased then tp // return the original type, not the result of dealiasing else transformed } /** Replace every top-level occurrence of a wildcard type argument by * a fresh skolem type. The skolem types are of the form $i.CAP, where * $i is a skolem of type `scala.internal.TypeBox`, and `CAP` is its * type member. See the documentation of `TypeBox` for a rationale why we do this. */ def captureWildcards(tp: Type)(using Context): Type = derivedOnDealias(tp) { case tp @ AppliedType(tycon, args) if tp.hasWildcardArg => val tparams = tycon.typeParamSymbols val args1 = args.zipWithConserve(tparams.map(_.paramInfo.substApprox(tparams, args))) { case (TypeBounds(lo, hi), bounds) => val skolem = SkolemType(defn.TypeBoxClass.typeRef.appliedTo(lo | bounds.loBound, hi & bounds.hiBound)) TypeRef(skolem, defn.TypeBox_CAP) case (arg, _) => arg } if tparams.isEmpty then tp else tp.derivedAppliedType(tycon, args1) case tp: AndOrType => tp.derivedAndOrType(captureWildcards(tp.tp1), captureWildcards(tp.tp2)) case tp: RefinedType => tp.derivedRefinedType(captureWildcards(tp.parent), tp.refinedName, tp.refinedInfo) case tp: RecType => tp.derivedRecType(captureWildcards(tp.parent)) case tp: LazyRef => captureWildcards(tp.ref) case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) case _ => tp } } trait Inferencing { this: Typer => import Inferencing._ import tpd._ /** Interpolate undetermined type variables in the widened type of this tree. * @param tree the tree whose type is interpolated * @param pt the expected result type * @param locked the set of type variables of the current typer state that cannot be interpolated * at the present time * Eligible for interpolation are all type variables owned by the current typerstate * that are not in `locked` and whose `nestingLevel` is `>= ctx.nestingLevel`. * Type variables occurring co- (respectively, contra-) variantly in the tree type * or expected type are minimized (respectvely, maximized). Non occurring type variables are minimized if they * have a lower bound different from Nothing, maximized otherwise. Type variables appearing * non-variantly in the type are left untouched. * * Note that even type variables that do not appear directly in a type, can occur with * some variance in the type, because of the constraints. E.g if `X` occurs co-variantly in `T` * and we have a constraint * * Y <: X * * Then `Y` also occurs co-variantly in `T` because it needs to be minimized in order to constrain * `T` the least. See `variances` for more detail. */ def interpolateTypeVars(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = { val state = ctx.typerState // Note that some variables in `locked` might not be in `state.ownedVars` // anymore if they've been garbage-collected, so we can't use // `state.ownedVars.size > locked.size` as an early check to avoid computing // `qualifying`. val ownedVars = state.ownedVars if ((ownedVars ne locked) && !ownedVars.isEmpty) { val qualifying = ownedVars -- locked if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") val resultAlreadyConstrained = tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] if (!resultAlreadyConstrained) constrainResult(tree.symbol, tree.tpe, pt) // This is needed because it could establish singleton type upper bounds. See i2998.scala. val tp = tree.tpe.widen val vs = variances(tp, pt) // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors. // Reason: The errors might reflect unsatisfiable constraints. In that // case interpolating without taking account the constraints risks producing // nonsensical types that then in turn produce incomprehensible errors. // An example is in neg/i1240.scala. Without the condition in the next code line // we get for // // val y: List[List[String]] = List(List(1)) // // i1430.scala:5: error: type mismatch: // found : Int(1) // required: Nothing // val y: List[List[String]] = List(List(1)) // ^ // With the condition, we get the much more sensical: // // i1430.scala:5: error: type mismatch: // found : Int(1) // required: String // val y: List[List[String]] = List(List(1)) if state.reporter.hasUnreportedErrors then return tree def constraint = state.constraint type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)] val toInstantiate = new InstantiateQueue for tvar <- qualifying do if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then constrainIfDependentParamRef(tvar, tree) // Needs to be checked again, since previous interpolations could already have // instantiated `tvar` through unification. val v = vs(tvar) if v == null then // Even though `tvar` is non-occurring in `v`, the specific // instantiation we pick still matters because `tvar` might appear // in the bounds of a non-`qualifying` type variable in the // constraint. // In particular, if `tvar` was created as the upper or lower // bound of an existing variable by `LevelAvoidMap`, we // instantiate it in the direction corresponding to the // original variable which might be further constrained later. // Otherwise, we simply rely on `hasLowerBound`. val name = tvar.origin.paramName val fromBelow = name.is(AvoidNameKind.UpperBound) || !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint") toInstantiate += ((tvar, fromBelow)) else if v.intValue != 0 then typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint") toInstantiate += ((tvar, v.intValue == 1)) else if tvar.nestingLevel > ctx.nestingLevel then // Invariant: a type variable of level N can only appear // in the type of a tree whose enclosing scope is level <= N. typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint") comparing(_.atLevel(ctx.nestingLevel, tvar.origin)) else typr.println(i"no interpolation for nonvariant $tvar in $state") /** Instantiate all type variables in `buf` in the indicated directions. * If a type variable A is instantiated from below, and there is another * type variable B in `buf` that is known to be smaller than A, wait and * instantiate all other type variables before trying to instantiate A again. * Dually, wait instantiating a type variable from above as long as it has * upper bounds in `buf`. * * This is done to avoid loss of precision when forming unions. An example * is in i7558.scala: * * type Tr[+V1, +O1 <: V1] * extension [V2, O2 <: V2](tr: Tr[V2, O2]) def sl: Tr[V2, O2] = ??? * def as[V3, O3 <: V3](tr: Tr[V3, O3]) : Tr[V3, O3] = tr.sl * * Here we interpolate at some point V2 and O2 given the constraint * * V2 >: V3, O2 >: O3, O2 <: V2 * * where O3 and V3 are type refs with O3 <: V3. * If we interpolate V2 first to V3 | O2, the widenUnion algorithm will * instantiate O2 to V3, leading to the final constraint * * V2 := V3, O2 := V3 * * But if we instantiate O2 first to O3, and V2 next to V3, we get the * more flexible instantiation * * V2 := V3, O2 := O3 */ def doInstantiate(buf: InstantiateQueue): Unit = if buf.nonEmpty then val suspended = new InstantiateQueue while buf.nonEmpty do val first @ (tvar, fromBelow) = buf.head buf.dropInPlace(1) if !tvar.isInstantiated then val suspend = buf.exists{ (following, _) => if fromBelow then constraint.isLess(following.origin, tvar.origin) else constraint.isLess(tvar.origin, following.origin) } if suspend then suspended += first else tvar.instantiate(fromBelow) end if end while doInstantiate(suspended) end doInstantiate doInstantiate(toInstantiate) } } tree } /** If `tvar` represents a parameter of a dependent method type in the current `call` * approximate it from below with the type of the actual argument. Skolemize that * type if necessary to make it a Singleton. */ private def constrainIfDependentParamRef(tvar: TypeVar, call: Tree)(using Context): Unit = if tvar.origin.paramName.is(NameKinds.DepParamName) then representedParamRef(tvar.origin) match case ref: TermParamRef => def findArg(tree: Tree)(using Context): Tree = tree match case Apply(fn, args) => if fn.tpe.widen eq ref.binder then if ref.paramNum < args.length then args(ref.paramNum) else EmptyTree else findArg(fn) case TypeApply(fn, _) => findArg(fn) case Block(_, expr) => findArg(expr) case Inlined(_, _, expr) => findArg(expr) case _ => EmptyTree val arg = findArg(call) if !arg.isEmpty then var argType = arg.tpe.widenIfUnstable if !argType.isSingleton then argType = SkolemType(argType) argType <:< tvar case _ => end constrainIfDependentParamRef } /** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */ @sharable object ForceDegree { class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom) val none: Value = new Value(_ => false, IfBottom.ok) val all: Value = new Value(_ => true, IfBottom.ok) val failBottom: Value = new Value(_ => true, IfBottom.fail) val flipBottom: Value = new Value(_ => true, IfBottom.flip) } enum IfBottom: case ok, fail, flip
dotty-staging/dotty
compiler/src/dotty/tools/dotc/typer/Inferencing.scala
Scala
apache-2.0
33,321
package controllers import javax.inject.Inject import com.mohiva.play.silhouette.api._ import com.mohiva.play.silhouette.api.exceptions.ProviderException import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository import com.mohiva.play.silhouette.impl.authenticators.JWTAuthenticator import com.mohiva.play.silhouette.impl.providers._ import models.User import models.services.UserService import play.api.i18n.{ MessagesApi, Messages } import play.api.libs.concurrent.Execution.Implicits._ import play.api.libs.json.Json import play.api.mvc.Action import scala.concurrent.Future /** * The social auth controller. * * @param messagesApi The Play messages API. * @param env The Silhouette environment. * @param userService The user service implementation. * @param authInfoRepository The auth info service implementation. * @param socialProviderRegistry The social provider registry. */ class SocialAuthController @Inject() ( val messagesApi: MessagesApi, val env: Environment[User, JWTAuthenticator], userService: UserService, authInfoRepository: AuthInfoRepository, socialProviderRegistry: SocialProviderRegistry) extends Silhouette[User, JWTAuthenticator] with Logger { /** * Authenticates a user against a social provider. * * @param provider The ID of the provider to authenticate against. * @return The result to display. */ def authenticate(provider: String) = Action.async { implicit request => (socialProviderRegistry.get(provider) match { case Some(p: SocialProvider with CommonSocialProfileBuilder) => p.authenticate().flatMap { case Left(result) => Future.successful(result) case Right(authInfo) => for { profile <- p.retrieveProfile(authInfo) user <- userService.save(profile) authInfo <- authInfoRepository.save(profile.loginInfo, authInfo) authenticator <- env.authenticatorService.create(profile.loginInfo) token <- env.authenticatorService.init(authenticator) } yield { env.eventBus.publish(LoginEvent(user, request, request2Messages)) Ok(Json.obj("token" -> token)) } } case _ => Future.failed(new ProviderException(s"Cannot authenticate with unexpected social provider $provider")) }).recover { case e: ProviderException => logger.error("Unexpected provider error", e) Unauthorized(Json.obj("message" -> Messages("could.not.authenticate"))) } } }
anotherhale/play-silhouette-angular-seed
app/controllers/SocialAuthController.scala
Scala
apache-2.0
2,507
package coursier.cli.util object Guard { def apply(): Unit = { val experimental = Option(System.getenv("COURSIER_EXPERIMENTAL")).exists(s => s == "1" || s == "true") || java.lang.Boolean.getBoolean("coursier.experimental") if (!experimental) { System.err.println( "Command disabled. Set environment variable COURSIER_EXPERIMENTAL=1, " + "or Java property coursier.experimental=true." ) sys.exit(1) } } }
alexarchambault/coursier
modules/cli/src/main/scala/coursier/cli/util/Guard.scala
Scala
apache-2.0
469
/******************************************************************************* Copyright (c) 2012-2013, KAIST, S-Core. All rights reserved. Use is subject to license terms. This distribution may include materials developed by third parties. ******************************************************************************/ package kr.ac.kaist.jsaf.exceptions import _root_.java.lang.{Integer => JInteger} import xtc.parser.ParseError import xtc.parser.ParserBase import xtc.tree.Location class ParserError(parseError: ParseError, parser: ParserBase, start: JInteger) extends StaticError(parseError.msg, None) { def typeDescription() = "Parse Error" override def description() = { var result = parseError.msg val size = result.length if (size > 8 && result.substring(size-8,size).equals("expected")) result = "Syntax Error" else { if (!result.equals("")) result = "Syntax Error: " + result else result = "Syntax Error" } if (result.equals("")) result = "Unspecified cause" result } override def at = if (parseError.index == -1) "Unspecified location" else { val loc = parser.location(parseError.index) new Location(loc.file, loc.line + start, loc.column).toString } override def toString = String.format("%s:\n %s", at, description) }
daejunpark/jsaf
src/kr/ac/kaist/jsaf/exceptions/ParserError.scala
Scala
bsd-3-clause
1,348
import leon.annotation._ import leon.lang._ import leon.lang.synthesis._ object SortedListUnion { sealed abstract class List case class Cons(head: BigInt, tail: List) extends List case object Nil extends List def size(l: List): BigInt = (l match { case Nil => BigInt(0) case Cons(_, t) => BigInt(1) + size(t) }) ensuring(res => res >= 0) def content(l: List): Set[BigInt] = l match { case Nil => Set.empty[BigInt] case Cons(i, t) => Set(i) ++ content(t) } def isSorted(list: List): Boolean = list match { case Nil => true case Cons(_, Nil) => true case Cons(x1, Cons(x2, _)) if(x1 > x2) => false case Cons(_, xs) => isSorted(xs) } def split(in: List): (List, List) = { in match { case Cons(h1, Cons(h2, t)) => val r = split(t) (Cons(h1, r._1), Cons(h2, r._2)) case Cons(h1, Nil) => (in, Nil) case Nil => (Nil, Nil) } } def merge(in1: List, in2: List): List = { require(isSorted(in1) && isSorted(in2)) (in1, in2) match { case (Cons(h1, t1), Cons(h2, t2)) => if (h1 < h2) { Cons(h1, merge(t1, in2)) } else { Cons(h2, merge(in1, t2)) } case (l, Nil) => l case (Nil, l) => l } } ensuring { (out : List) => (content(out) == content(in1) ++ content(in2)) && isSorted(out) } def sort(in: List): List = { in match { case Cons(h1, Cons(h2, t)) => val s = split(in) merge(sort(s._1), sort(s._2)) case _ => ???[List] } } ensuring { (out : List) => content(out) == content(in) && isSorted(out) } }
regb/leon
testcases/synthesis/current/SortedList/MergeSortGuided.scala
Scala
gpl-3.0
1,653
package com.teamisotope.techexpansion.mineral import scala.collection.mutable object MineralRegistry { private val map: mutable.HashMap[String, Mineral] = new mutable.HashMap[String, Mineral]() private val ids: mutable.HashMap[Int, Mineral] = new mutable.HashMap[Int, Mineral]() private var currentId: Int = 0 def registerMineral(mineral: Mineral): Unit = { if (!(ids.contains(currentId) && !map.contains(mineral.getRegistryName))) { map.put(mineral.getRegistryName, mineral) ids.put(currentId, mineral) currentId+=1 } } def getMineralbyName(name: String): Option[Mineral] = { map.get(name) } def getMineralById(id: Int): Option[Mineral] = { ids.get(id) } }
collaborationmods/TechExpansion
src/main/scala/com/teamisotope/techexpansion/mineral/MineralRegistry.scala
Scala
gpl-3.0
719
package rl import collection.{ GenSeq, immutable, SortedMap } import Imports._ object QueryString { val DEFAULT_EXCLUSIONS = List("utm_source", "utm_medium", "utm_term", "utm_content", "utm_campaign", "sms_ss", "awesm") def apply(rawValue: String) = { rawValue.blankOption map { v ⇒ (v.indexOf('&') > -1, v.indexOf('=') > -1) match { case (true, true) | (false, true) ⇒ MapQueryString(v) case (true, false) ⇒ StringSeqQueryString(v) case (false, false) ⇒ StringQueryString(v) } } getOrElse EmptyQueryString } } trait QueryString extends UriNode { type Value def rawValue: String def value: Value def empty: Value def normalize: QueryString def apply() = rawValue.urlEncode } case object EmptyQueryString extends QueryString { def empty = "" type Value = String val value = empty val uriPart = empty val rawValue = empty val normalize = this } case class StringQueryString(rawValue: String) extends QueryString { val uriPart = "?" + rawValue.blankOption.map(_.urlEncode).getOrElse("") val value = rawValue.blankOption.map(_.urlEncode).getOrElse("") val empty = "" type Value = String def normalize = this } case class StringSeqQueryString(rawValue: String) extends QueryString { val value: Value = rawValue.blankOption.map(_.split("&").map(_.urlDecode).toList).getOrElse(Nil) val uriPart = "?" + value.map(_.urlEncode).mkString("?", "&", "") val empty = Nil type Value = List[String] def normalize = StringSeqQueryString(value.sortWith(_ >= _).map(_.urlEncode).mkString("?", "&", "")) } object MapQueryString { def parseString(rw: String) = { // this is probably an accident waiting to happen when people do actually mix stuff val semiColon = if (rw.indexOf(';') > -1) { rw.split(';').foldRight(Map[String, List[String]]()) { readQsPair _ } } else readQsPair(rw) val ampersand = if (rw.indexOf('&') > -1) { rw.split('&').foldRight(Map[String, List[String]]()) { readQsPair _ } } else { readQsPair(rw) } semiColon ++ ampersand } private def readQsPair(pair: String, current: Map[String, List[String]] = Map.empty) = { (pair split '=' toList) map { _.urlDecode } match { case item :: Nil ⇒ current + (item -> List[String]()) case item :: rest ⇒ if (!current.contains(item)) current + (item -> rest) else (current + (item -> (rest ::: current(item)).distinct)) case _ ⇒ current } } def apply(rawValue: String): MapQueryString = new MapQueryString(parseString(rawValue).toSeq, rawValue) } case class MapQueryString(initialValues: Seq[(String, Seq[String])], rawValue: String) extends QueryString { val uriPart = { "?" + mkString() } val empty = Map.empty[String, List[String]] def value: Value = Map(initialValues: _*) def normalize = copy(SortedMap(initialValues filter (k ⇒ !QueryString.DEFAULT_EXCLUSIONS.contains(k._1)): _*) toSeq) private def mkString(values: Value = value) = values map { case (k, v) ⇒ v.map(s ⇒ "%s=%s".format(k.urlEncode, s.urlEncode)).mkString("&") } mkString "&" type Value = immutable.Map[String, Seq[String]] }
scalatra/rl
core/src/main/scala/rl/QueryString.scala
Scala
mit
3,221
package com.codeboyyong.akkasample.util import java.util.HashMap import com.typesafe.config.ConfigFactory import akka.actor.ActorSystem object AkkaUtil { def startAkkaRemoteSystem(host: String, port: String, name: String = "myRemoteActorSystem"): ActorSystem = { val configureMap = new HashMap[String, Object] configureMap.put("akka.actor.provider", "akka.remote.RemoteActorRefProvider") //note: value is a list // configureMap.put("akka.remote.enabled-transports", // List("akka.remote.netty.tcp")); configureMap.put("akka.remote.netty.tcp.port",new Integer(port)); configureMap.put("akka.remote.netty.tcp.hostname", host); val akkaConfig = ConfigFactory.parseMap(configureMap); val actorSystem = ActorSystem.create(name, akkaConfig); return actorSystem; } }
codeboyyong/akka-sample
akka-sample/src/main/scala/com/codeboyyong/akkasample/util/AkkaUtil.scala
Scala
apache-2.0
812
package com.github.tarao.nonempty.collection import scala.collection.BuildFrom import scala.collection.immutable import scala.language.higherKinds /** Methods inherited from `Iterable` that preserve non-emptiness. * * @define orderDependent * * Note: might return different results for different * runs, unless the underlying collection type is * ordered. * @define willNotTerminateInf * * Note: will not terminate for infinite-sized collections. * @define willForceEvaluation * Note: Even when applied to a view or a lazy * collection it will always force the elements. * @define consumesAndProducesIterator * After calling this method, one should discard the * iterator it was called on, and use only the iterator * that was returned. Using the old iterator is * undefined, subject to change, and may result in * changes to the new iterator as well. * @define Coll `NonEmpty` * @define coll collection */ trait IterableOps[+A, +C <: Iterable[A]] extends Any { self: NonEmpty[A, C] => /** Given a collection factory `factory`, convert this collection to * the appropriate representation for the current element type * `A`. */ def to[B >: A, C2 <: immutable.Iterable[B]]( factory: scala.collection.Factory[A, C2] ): NonEmpty[B, C2] = unsafeApply[B, C2](value.to(factory)) def toList: NonEmpty[A, immutable.List[A]] = unsafeApply[A, immutable.List[A]](value.toList) def toVector: NonEmpty[A, immutable.Vector[A]] = unsafeApply[A, immutable.Vector[A]](value.toVector) def toMap[K, V](implicit ev: A <:< (K, V) ): NonEmpty[(K, V), immutable.Map[K, V]] = unsafeApply[(K, V), immutable.Map[K, V]](value.toMap) /** * @return This collection as a `NonEmpty[A, Seq[A]]`. This is * equivalent to `to(Seq)` but might be faster. */ def toSeq: NonEmpty[A, immutable.Seq[A]] = unsafeApply[A, immutable.Seq[A]](value.toSeq) def toIndexedSeq: NonEmpty[A, immutable.IndexedSeq[A]] = unsafeApply[A, immutable.IndexedSeq[A]](value.toIndexedSeq) /** Returns a new $coll containing the elements from the left hand * operand followed by the elements from the right hand * operand. The element type of the $coll is the most specific * superclass encompassing the element types of the two operands. * * @param suffix the iterable to append. * @tparam B the element type of the returned collection. * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. * @see [[scala.collection.IterableOps!.concat]] */ def concat[B >: A, C2 <: Iterable[B]](suffix: IterableOnce[B])(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[B, C2] = unsafeApply[B, C2](bf.fromSpecific(value)(value.concat(suffix))) /** Alias for `concat` * @see [[#concat]] * @see [[scala.collection.IterableOps!.++]] */ @inline def ++[B >: A, C2 <: Iterable[B]](suffix: IterableOnce[B])(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[B, C2] = concat[B, C2](suffix) /** Partitions this $coll into a map of ${coll}s according to some * discriminator function. * * @param f the discriminator function. * @tparam K the type of keys returned by the discriminator function. * @return A map from keys to ${coll}s such that the following * invariant holds: * {{{ * (xs groupBy f)(k) = xs filter (x => f(x) == k) * }}} * That is, every key `k` is bound to a $coll of those elements `x` * for which `f(x)` equals `k`. * @see [[scala.collection.IterableOps!.groupBy]] */ def groupBy[K, B >: A, C2 <: Iterable[B]](f: A => K)(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[(K, NonEmpty[B, C2]), immutable.Map[K, NonEmpty[B, C2]]] = unsafeApply[(K, NonEmpty[B, C2]), immutable.Map[K, NonEmpty[B, C2]]]( value.groupBy(f).map { case (k, v) => k -> unsafeApply[B, C2](bf.fromSpecific(value)(v)) } ) /** Partitions this $coll into a map of ${coll}s according to a * discriminator function `key`. Each element in a group is * transformed into a value of type `B` using the `value` function. * * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more * efficient. * * $willForceEvaluation * * @param key the discriminator function * @param f the element transformation function * @tparam K the type of keys returned by the discriminator function * @tparam B the type of values returned by the transformation function * @see [[scala.collection.IterableOps!.groupMap]] */ def groupMap[K, B, C2 <: Iterable[B]](key: A => K)(f: A => B)(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[(K, NonEmpty[B, C2]), immutable.Map[K, NonEmpty[B, C2]]] = unsafeApply[(K, NonEmpty[B, C2]), immutable.Map[K, NonEmpty[B, C2]]]( value.groupMap(key)(f).map { case (k, v) => k -> unsafeApply[B, C2](bf.fromSpecific(value)(v)) } ) /** Partitions this $coll into a map according to a discriminator * function `key`. All the values that have the same discriminator * are then transformed by the `value` function and then reduced * into a single value with the `reduce` function. * * It is equivalent to * `groupBy(key).mapValues(_.map(f).reduce(reduce))`, but more * efficient. * * $willForceEvaluation * @see [[scala.collection.IterableOps!.groupMapReduce]] */ def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): NonEmpty[(K, B), immutable.Map[K, B]] = unsafeApply[(K, B), immutable.Map[K, B]]( value.groupMapReduce(key)(f)(reduce) ) /** Partitions elements in fixed size ${coll}s. * * @param size the number of elements per group * @return An iterator producing ${coll}s of size `size`, except the * last will be less than size `size` if the elements don't * divide evenly. * @see [[scala.collection.IterableOps!.grouped]] */ def grouped[B >: A, C2 <: Iterable[B]](size: Int)(implicit bf: BuildFrom[C, B, C2], ): Iterator[NonEmpty[B, C2]] = value.grouped(size).map { v => unsafeApply[B, C2](bf.fromSpecific(value)(v)) } /** Builds a new $coll by applying a function to all elements of this * $coll. * * @param f the function to apply to each element. * @tparam B the element type of the returned $coll. * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. * @note Reuse: $consumesAndProducesIterator * @see [[scala.collection.IterableOps!.map]] */ def map[B, C2 <: Iterable[B]](f: A => B)(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[B, C2] = unsafeApply[B, C2](bf.fromSpecific(value)(value.map(f))) /** Computes a prefix scan of the elements of the collection. * * Note: The neutral element `z` may be applied more than once. * * @tparam B element type of the resulting collection * @param z neutral element for the operator `op` * @param op the associative operator for the scan * * @return a new $coll containing the prefix scan of the elements in this $coll * @see [[scala.collection.IterableOps!.scan]] */ def scan[B >: A, C2 <: Iterable[B]](z: B)(op: (B, B) => B)(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[B, C2] = unsafeApply[B, C2](bf.fromSpecific(value)(value.scan(z)(op))) /** Produces a $coll containing cumulative results of applying the * operator going left to right, including the initial value. * * $willNotTerminateInf * $orderDependent * * @tparam B the type of the elements in the resulting collection * @param z the initial value * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results * @note Reuse: $consumesAndProducesIterator * @see [[scala.collection.IterableOps!.scanLeft]] */ def scanLeft[B, C2 <: Iterable[B]](z: B)(op: (B, A) => B)(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[B, C2] = unsafeApply[B, C2](bf.fromSpecific(value)(value.scanLeft(z)(op))) /** Produces a collection containing cumulative results of applying * the operator going right to left. The head of the collection * is the last cumulative result. $willNotTerminateInf * $orderDependent $willForceEvaluation * * @tparam B the type of the elements in the resulting collection * @param z the initial value * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results * @see [[scala.collection.IterableOps!.scanRight]] */ def scanRight[B, C2 <: Iterable[B]](z: B)(op: (A, B) => B)(implicit bf: BuildFrom[C, B, C2] ): NonEmpty[B, C2] = unsafeApply[B, C2](bf.fromSpecific(value)(value.scanRight(z)(op))) /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in * `grouped`.) The "sliding window" step is set to one. * @see [[scala.collection.Iterator]], method `sliding` * * @param size the number of elements per group * @return An iterator producing ${coll}s of size `size`, except the * last element (which may be the only element) will be truncated * if there are fewer than `size` elements remaining to be grouped. * @see [[scala.collection.IterableOps!.sliding(size:Int)*]] */ def sliding[B >: A, C2 <: Iterable[B]](size: Int)(implicit bf: BuildFrom[C, B, C2] ): Iterator[NonEmpty[B, C2]] = value.sliding(size).map(v => unsafeApply[B, C2](bf.fromSpecific(value)(v))) /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) * @see [[scala.collection.Iterator]], method `sliding` * * @param size the number of elements per group * @param step the distance between the first elements of successive * groups * @return An iterator producing ${coll}s of size `size`, except the * last element (which may be the only element) will be truncated * if there are fewer than `size` elements remaining to be grouped. * @see [[scala.collection.IterableOps!.sliding(size:Int,step:Int)*]] */ def sliding[B >: A, C2 <: Iterable[B]](size: Int, step: Int)(implicit bf: BuildFrom[C, B, C2] ): Iterator[NonEmpty[B, C2]] = value.sliding(size, step).map(v => unsafeApply[B, C2](bf.fromSpecific(value)(v))) /** Transposes this $coll of iterable collections into * a $coll of ${coll}s. * * The resulting collection's type will be guided by the * static type of $coll. * * $willForceEvaluation * * @tparam B the type of the elements of each iterable collection. * @param asIterable an implicit conversion which asserts that the * element type of this $coll is an `Iterable`. * @return a two-dimensional $coll of ${coll}s which has as ''n''th row * the ''n''th column of this $coll. * @throws scala.IllegalArgumentException if all collections in this $coll * are not of the same size. * @see [[scala.collection.IterableOps!.transpose]] */ def transpose[A1 >: A, B, CC[X] <: Iterable[X]](implicit asIterable: A1 => Iterable[B], coll: C <:< CC[A1], bf1: BuildFrom[C, B, CC[B]], bf2: BuildFrom[C, NonEmpty[B, CC[B]], CC[NonEmpty[B, CC[B]]]] ): CC[NonEmpty[B, CC[B]]] = bf2.fromSpecific(value)(coll(value).transpose.map { v => unsafeApply[B, CC[B]](bf1.fromSpecific(value)(v)) }) /** Converts this $coll of pairs into two collections of the first and * second half of each pair. * * @tparam A1 the type of the first half of the element pairs * @tparam A2 the type of the second half of the element pairs * @param asPair an implicit conversion which asserts that the element type * of this $coll is a pair. * @return a pair of ${coll}s, containing the first, respectively * second half of each element pair of this $coll. * @see [[scala.collection.IterableOps!.unzip]] */ def unzip[A1, A2, B >: A, CC[X] <: Iterable[X]](implicit asPair: A => (A1, A2), coll: C => CC[B], bf1: BuildFrom[CC[B], A1, CC[A1]], bf2: BuildFrom[CC[B], A2, CC[A2]], ): (NonEmpty[A1, CC[A1]], NonEmpty[A2, CC[A2]]) = { val (a1, a2) = value.unzip(asPair) ( unsafeApply[A1, CC[A1]](bf1.fromSpecific(coll(value))(a1)), unsafeApply[A2, CC[A2]](bf2.fromSpecific(coll(value))(a2)), ) } /** Converts this $coll of triples into three collections of the * first, second, and third element of each triple. * * @tparam A1 the type of the first member of the element triples * @tparam A2 the type of the second member of the element triples * @tparam A3 the type of the third member of the element triples * @param asTriple an implicit conversion which asserts that the element * type of this $coll is a triple. * @return a triple of ${coll}s, containing the first, second, * respectively third member of each element * triple of this $coll. * @see [[scala.collection.IterableOps!.unzip3]] */ def unzip3[A1, A2, A3, B >: A, CC[X] <: Iterable[X]](implicit asTriple: A => (A1, A2, A3), coll: C => CC[B], bf1: BuildFrom[CC[B], A1, CC[A1]], bf2: BuildFrom[CC[B], A2, CC[A2]], bf3: BuildFrom[CC[B], A3, CC[A3]], ): (NonEmpty[A1, CC[A1]], NonEmpty[A2, CC[A2]], NonEmpty[A3, CC[A3]]) = { val (a1, a2, a3) = value.unzip3(asTriple) ( unsafeApply[A1, CC[A1]](bf1.fromSpecific(coll(value))(a1)), unsafeApply[A2, CC[A2]](bf2.fromSpecific(coll(value))(a2)), unsafeApply[A3, CC[A3]](bf3.fromSpecific(coll(value))(a3)), ) } /** Returns a $coll formed from this $coll and another iterable * collection by combining corresponding elements in pairs. If one * of the two collections is shorter than the other, placeholder * elements are used to extend the shorter collection to the length * of the longer. * * @param that the iterable providing the second half of each result pair * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. * @return a new collection of type `That` containing pairs consisting of * corresponding elements of this $coll and `that`. The length * of the returned collection is the maximum of the lengths of this $coll and `that`. * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. * @see [[scala.collection.IterableOps!.zipAll]] */ def zipAll[A1 >: A, B, CC[X] <: Iterable[X]]( that: Iterable[B], thisElem: A1, thatElem: B, )(implicit coll: C => CC[A1], bf: BuildFrom[CC[A1], (A1, B), CC[(A1, B)]], ): NonEmpty[(A1, B), CC[(A1, B)]] = { val v = bf.fromSpecific(coll(value))(value.zipAll(that, thisElem, thatElem)) unsafeApply[(A1, B), CC[(A1, B)]](v) } /** Zips this $coll with its indices. * * @return A new $coll containing pairs consisting of all elements of this $coll paired with their index. * Indices start at `0`. * @note Reuse: $consumesAndProducesIterator * @see [[scala.collection.IterableOps!.zipWithIndex]] */ def zipWithIndex[B >: A, CC[X] <: Iterable[X]](implicit coll: C => CC[B], bf: BuildFrom[CC[B], (B, Int), CC[(B, Int)]], ): NonEmpty[(B, Int), CC[(B, Int)]] = { val v = bf.fromSpecific(coll(value))(value.zipWithIndex) unsafeApply[(B, Int), CC[(B, Int)]](v) } }
tarao/nonempty-scala
src/main/scala/com/github/tarao/nonempty/collection/IterableOps.scala
Scala
mit
16,508
package mesosphere.marathon package storage import akka.actor.{ActorSystem, Scheduler} import akka.stream.Materializer import mesosphere.marathon.core.storage.backup.PersistentStoreBackup import mesosphere.marathon.core.storage.store.PersistenceStore import mesosphere.marathon.core.storage.store.impl.cache.LoadTimeCachingPersistenceStore import mesosphere.marathon.core.storage.store.impl.zk.{RichCuratorFramework, ZkId, ZkSerialized} import mesosphere.marathon.metrics.Metrics import mesosphere.marathon.state.RootGroup import mesosphere.marathon.storage.migration.{Migration, ServiceDefinitionRepository} import mesosphere.marathon.storage.repository._ import scala.collection.immutable.Seq import scala.concurrent.ExecutionContext /** * Provides the repositories for all persistable entities. */ trait StorageModule { val persistenceStore: PersistenceStore[_, _, _] val instanceRepository: InstanceRepository val deploymentRepository: DeploymentRepository val taskFailureRepository: TaskFailureRepository val groupRepository: GroupRepository val frameworkIdRepository: FrameworkIdRepository val runtimeConfigurationRepository: RuntimeConfigurationRepository val migration: Migration val leadershipInitializers: Seq[PrePostDriverCallback] val persistentStoreBackup: PersistentStoreBackup } object StorageModule { def apply(metrics: Metrics, conf: MarathonConf, curatorFramework: RichCuratorFramework)(implicit mat: Materializer, ctx: ExecutionContext, scheduler: Scheduler, actorSystem: ActorSystem ): StorageModule = { val currentConfig = StorageConfig(conf, curatorFramework) apply(metrics, currentConfig, RootGroup.NewGroupStrategy.UsingConfig(conf.newGroupEnforceRole()), conf.mesosRole()) } def apply(metrics: Metrics, config: StorageConfig, newGroupStrategy: RootGroup.NewGroupStrategy, defaultMesosRole: String)(implicit mat: Materializer, ctx: ExecutionContext, scheduler: Scheduler, actorSystem: ActorSystem ): StorageModule = { config match { case zk: CuratorZk => val store: PersistenceStore[ZkId, String, ZkSerialized] = zk.store(metrics) val appRepository = AppRepository.zkRepository(store) val podRepository = PodRepository.zkRepository(store) val groupRepository = GroupRepository.zkRepository(store, appRepository, podRepository, zk.groupVersionsCacheSize, newGroupStrategy) val instanceRepository = InstanceRepository.zkRepository(store) val deploymentRepository = DeploymentRepository.zkRepository( metrics, store, groupRepository, appRepository, podRepository, zk.maxVersions, zk.storageCompactionScanBatchSize, zk.storageCompactionInterval ) val taskFailureRepository = TaskFailureRepository.zkRepository(store) val frameworkIdRepository = FrameworkIdRepository.zkRepository(store) val runtimeConfigurationRepository = RuntimeConfigurationRepository.zkRepository(store) val leadershipInitializers = store match { case s: LoadTimeCachingPersistenceStore[_, _, _] => Seq(s) case _ => Nil } val backup = PersistentStoreBackup(store) val migration = new Migration( zk.availableFeatures, defaultMesosRole, store, appRepository, podRepository, groupRepository, deploymentRepository, instanceRepository, taskFailureRepository, frameworkIdRepository, ServiceDefinitionRepository.zkRepository(store), runtimeConfigurationRepository, backup, config ) StorageModuleImpl( store, instanceRepository, deploymentRepository, taskFailureRepository, groupRepository, frameworkIdRepository, runtimeConfigurationRepository, migration, leadershipInitializers, backup ) case mem: InMem => val store = mem.store(metrics) val appRepository = AppRepository.inMemRepository(store) val podRepository = PodRepository.inMemRepository(store) val instanceRepository = InstanceRepository.inMemRepository(store) val groupRepository = GroupRepository.inMemRepository(store, appRepository, podRepository, mem.groupVersionsCacheSize, newGroupStrategy) val deploymentRepository = DeploymentRepository.inMemRepository( metrics, store, groupRepository, appRepository, podRepository, mem.maxVersions, mem.storageCompactionScanBatchSize ) val taskFailureRepository = TaskFailureRepository.inMemRepository(store) val frameworkIdRepository = FrameworkIdRepository.inMemRepository(store) val runtimeConfigurationRepository = RuntimeConfigurationRepository.inMemRepository(store) val leadershipInitializers = store match { case s: LoadTimeCachingPersistenceStore[_, _, _] => Seq(s) case _ => Nil } val backup = PersistentStoreBackup(store) val migration = new Migration( mem.availableFeatures, defaultMesosRole, ???, appRepository, podRepository, groupRepository, deploymentRepository, instanceRepository, taskFailureRepository, frameworkIdRepository, ServiceDefinitionRepository.inMemRepository(store), runtimeConfigurationRepository, backup, config ) StorageModuleImpl( store, instanceRepository, deploymentRepository, taskFailureRepository, groupRepository, frameworkIdRepository, runtimeConfigurationRepository, migration, leadershipInitializers, backup ) } } } private[storage] case class StorageModuleImpl( persistenceStore: PersistenceStore[_, _, _], instanceRepository: InstanceRepository, deploymentRepository: DeploymentRepository, taskFailureRepository: TaskFailureRepository, groupRepository: GroupRepository, frameworkIdRepository: FrameworkIdRepository, runtimeConfigurationRepository: RuntimeConfigurationRepository, migration: Migration, leadershipInitializers: Seq[PrePostDriverCallback], persistentStoreBackup: PersistentStoreBackup ) extends StorageModule
mesosphere/marathon
src/main/scala/mesosphere/marathon/storage/StorageModule.scala
Scala
apache-2.0
6,621
/* * Copyright 2011-2022 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.check.css import java.nio.charset.StandardCharsets.UTF_8 import scala.util.Using import io.gatling.{ BaseSpec, ValidationValues } import io.gatling.commons.util.Io._ import jodd.lagarto.dom.NodeSelector class CssExtractorSpec extends BaseSpec with ValidationValues { private val cssSelectors = new CssSelectors(Long.MaxValue) private def prepared(file: String): NodeSelector = Using.resource(getClass.getResourceAsStream(file)) { is => val string = is.toString(UTF_8) cssSelectors.parse(string.toCharArray) } "count" should "support browser conditional tests and behave as a non-IE browser" in { val cssExtractor = CssExtractors.count("#helloworld", None, cssSelectors) cssExtractor(prepared("/IeConditionalTests.html")).succeeded shouldBe Some(1) } it should "return expected result with a class selector" in { val cssExtractor = CssExtractors.count(".nav-menu", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(3) } it should "return expected result with an id selector" in { val cssExtractor = CssExtractors.count("#twitter_button", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(1) } it should "return expected result with an :empty selector" in { val cssExtractor = CssExtractors.count(".frise:empty", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(1) } it should "return None when the selector doesn't match anything" in { val cssExtractor = CssExtractors.count("bad_selector", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(0) } "findAll" should "return expected result with a class selector" in { val cssExtractor = CssExtractors.findAll[String]("#social", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(List("Social")) } it should "return expected result with an id selector" in { val cssExtractor = CssExtractors.findAll[String](".nav", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(List("Sponsors", "Social")) } it should "return expected result with an attribute containing a given substring" in { val cssExtractor = CssExtractors.findAll[String](".article a[href*=api]", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(List("API Documentation")) } it should "return expected result with an element being the n-th child of its parent" in { val cssExtractor = CssExtractors.findAll[String](".article a:nth-child(2)", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(List("JMeter's")) } it should "return expected result with a predecessor selector" in { val cssExtractor = CssExtractors.findAll[String]("img ~ p", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(List("Efficient Load Testing")) } it should "return None when the selector doesn't match anything" in { val cssExtractor = CssExtractors.findAll[String]("bad_selector", None, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe None } it should "be able to extract a precise node attribute" in { val cssExtractor = CssExtractors.findAll[String]("#sample_requests", Some("href"), cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some(List("http://gatling.io/sample/requests.html")) } "find" should "return expected result with a class selector" in { val cssExtractor = CssExtractors.find[String](".nav", None, 1, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some("Social") } it should "return None when the index is out of the range of returned elements" in { val cssExtractor = CssExtractors.find[String](".nav", None, 3, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe None } it should "return None when the selector doesn't match anything" in { val cssExtractor = CssExtractors.find[String]("bad_selector", None, 1, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe None } it should "be able to extract a precise node attribute" in { val cssExtractor = CssExtractors.find[String](".nav", Some("id"), 1, cssSelectors) cssExtractor(prepared("/GatlingHomePage.html")).succeeded shouldBe Some("social") } it should "support filtered value with dots" in { val cssExtractor = CssExtractors.find[String]("input[name='javax.faces.ViewState']", Some("value"), 0, cssSelectors) cssExtractor( cssSelectors.parse( """<input type="hidden" name="javax.faces.ViewState" value="foo">""".toCharArray ) ).succeeded shouldBe Some("foo") } }
gatling/gatling
gatling-core/src/test/scala/io/gatling/core/check/css/CssExtractorSpec.scala
Scala
apache-2.0
5,566
package com.airbnb.aerosolve.training.pipeline import com.airbnb.aerosolve.core.{FunctionForm, ModelRecord} import org.junit.Assert._ import org.junit.Test import org.slf4j.LoggerFactory class ModelDebugTest { val log = LoggerFactory.getLogger("ModelDebugTest") @Test def modelRecordToString : Unit = { val r: ModelRecord = new ModelRecord() r.setFeatureFamily("f") r.setFeatureName("n") r.setMaxVal(1) r.setMinVal(0) r.setWeightVector(java.util.Arrays.asList(1.0,2.0)) r.setFunctionForm(FunctionForm.Point) assertEquals("f\u0001n\u00010.000000\u00011.000000\u0001[1.0, 2.0]\u0001\u00010.000000\u00010.000000\u00010.000000", ModelDebug.modelRecordToString(r)) } }
airbnb/aerosolve
training/src/test/scala/com/airbnb/aerosolve/training/pipeline/ModelDebugTest.scala
Scala
apache-2.0
724
package com.ponkotuy.proxy import com.netaporter.uri.Uri import com.ponkotuy.intercept.Interceptor import io.netty.buffer.ByteBuf import io.netty.channel.ChannelHandlerContext import io.netty.handler.codec.http.{HttpHeaders, HttpRequest} import org.littleshoot.proxy.{HttpFilters, HttpFiltersAdapter, HttpFiltersSourceAdapter} class KCFiltersSource(hosts: Set[String], interceptor: Interceptor) extends HttpFiltersSourceAdapter { private val noopFilters = new HttpFiltersAdapter(null) override def filterRequest(originalRequest: HttpRequest, ctx: ChannelHandlerContext): HttpFilters = if (hosts(HttpHeaders.getHost(originalRequest))) new AggregateContentFilters(originalRequest, ctx) { def finished(requestContent: ByteBuf, responseContent: ByteBuf): Unit = { val uri = Uri.parse(originalRequest.getUri) interceptor.input(uri, requestContent, responseContent) } } else noopFilters }
kxbmap/MyFleetGirls
client/src/main/scala/com/ponkotuy/proxy/KCFiltersSource.scala
Scala
mit
949
/* * Copyright 2017 Datamountaineer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datamountaineer.streamreactor.common.sink import com.datamountaineer.streamreactor.common.rowkeys.StringStructFieldsStringKeyBuilder import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct} import org.apache.kafka.connect.sink.SinkRecord import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class StringStructFieldsStringKeyBuilderTest extends AnyWordSpec with Matchers { "StructFieldsStringKeyBuilder" should { "raise an exception if the field is not present in the struct" in { intercept[IllegalArgumentException] { val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", Schema.STRING_SCHEMA) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) StringStructFieldsStringKeyBuilder(Seq("threshold")).build(sinkRecord) } } "create the row key based on one single field in the struct" in { val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", Schema.STRING_SCHEMA) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) StringStructFieldsStringKeyBuilder(Seq("firstName")).build(sinkRecord) shouldBe "Alex" } "create the row key based on one single field with doc in the struct" in { val firstNameSchema = SchemaBuilder.`type`(Schema.Type.STRING).doc("first name") val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", firstNameSchema) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) StringStructFieldsStringKeyBuilder(Seq("firstName")).build(sinkRecord) shouldBe "Alex" } "create the row key based on more thant one field in the struct" in { val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", Schema.STRING_SCHEMA) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) StringStructFieldsStringKeyBuilder(Seq("firstName", "age")).build(sinkRecord) shouldBe "Alex.30" } } }
datamountaineer/stream-reactor
kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/sink/StringStructFieldsStringKeyBuilderTest.scala
Scala
apache-2.0
3,504
/* * Copyright 2014–2017 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.physical.marklogic.cts import slamdata.Predef._ import quasar.RenderTree import quasar.physical.marklogic.xquery._ import quasar.physical.marklogic.xquery.syntax._ import scalaz.{Enum, Show} import scalaz.syntax.order._ import scalaz.std.anyVal._ sealed abstract class MatchDepth object MatchDepth { final case object Children extends MatchDepth final case object Descendants extends MatchDepth val toXQuery: MatchDepth => XQuery = { case Children => "1".xs case Descendants => "infinity".xs } implicit val enum: Enum[MatchDepth] = new Enum[MatchDepth] { def succ(md: MatchDepth) = md match { case Children => Descendants case Descendants => Children } def pred(md: MatchDepth) = md match { case Children => Descendants case Descendants => Children } def order(a: MatchDepth, b: MatchDepth) = asInt(a) ?|? asInt(b) private def asInt(md: MatchDepth): Int = md match { case Children => 1 case Descendants => 2 } } implicit val show: Show[MatchDepth] = Show.showFromToString implicit val renderTree: RenderTree[MatchDepth] = RenderTree.fromShow("MatchDepth") }
drostron/quasar
marklogic/src/main/scala/quasar/physical/marklogic/cts/MatchDepth.scala
Scala
apache-2.0
1,837
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.internal.compiler.v2_3.ast.rewriters case object normalizeMatchPredicates extends MatchPredicateNormalization(MatchPredicateNormalizerChain(PropertyPredicateNormalizer, LabelPredicateNormalizer))
HuangLS/neo4j
community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/ast/rewriters/normalizeMatchPredicates.scala
Scala
apache-2.0
1,020
package synereo.client.modules import diode.AnyAction._ import diode.ModelRO import diode.react.ModelProxy import japgolly.scalajs.react import japgolly.scalajs.react._ import japgolly.scalajs.react.extra.router.RouterCtl import japgolly.scalajs.react.vdom.prefix_<^._ import shared.dtos.CloseSessionRequest import shared.models.UserModel import synereo.client.SYNEREOMain._ import synereo.client.components.Bootstrap.CommonStyle import synereo.client.components._ import synereo.client.css.{DashboardCSS, LoginCSS, SynereoCommanStylesCSS} import synereo.client.handlers._ import synereo.client.logger import synereo.client.services.{CoreApi, SYNEREOCircuit} import synereo.client.utils.{ContentUtils, I18N} import scala.concurrent.ExecutionContext.Implicits.global import scala.scalajs.js import scala.scalajs.js.JSON import scala.util.{Failure, Success} import scalacss.ScalaCssReact._ //scalastyle:off object MainMenu { val introductionConnectProxy = SYNEREOCircuit.connect(_.introduction) // val userProxy = SYNEREOMain.userProxy @inline private def bss = GlobalStyles.bootstrapStyles case class Props(ctl: RouterCtl[Loc], currentLoc: Loc, proxy: ModelProxy[UserModel]) case class State(showProfileImageUploadModal: Boolean = false, showNodeSettingModal: Boolean = false, showAboutInfoModal: Boolean = false, lang: js.Dynamic = SYNEREOCircuit.zoom(_.i18n.language).value) class MainMenuBackend(t: BackendScope[Props, State]) { def mounted() = Callback { SYNEREOCircuit.subscribe(SYNEREOCircuit.zoom(_.i18n.language))(e => updateLang(e)) } def updateLang(reader: ModelRO[js.Dynamic]) = { t.modState(s => s.copy(lang = reader.value)).runNow() } def showImageUploadModal(): react.Callback = Callback { // logger.log.debug("main menu showImageUploadModal") SYNEREOCircuit.dispatch(ToggleImageUploadModal()) } def showAboutInfoModal(): react.Callback = Callback { // logger.log.debug("main menu showAboutInfoModal") SYNEREOCircuit.dispatch(ToggleAboutInfoModal()) } def showNodeSettingModal(): react.Callback = Callback { // logger.log.debug("main menu showNodeSettingModal") SYNEREOCircuit.dispatch(ToggleNodeSettingModal()) } def showNewMessageModal(): react.Callback = Callback { // logger.log.debug("main menu showNewMessageModal") SYNEREOCircuit.dispatch(ToggleNewMessageModal()) } def changeLang(lang: String): react.Callback = Callback { CoreApi.getLang(lang).onComplete { case Success(res) => SYNEREOCircuit.dispatch(ChangeLang(JSON.parse(res))) case Failure(_) => logger.log.error(s"failed to load language for ${lang}") } } } private val MainMenu = ReactComponentB[Props]("MainMenu") .initialState(State()) .backend(new MainMenuBackend(_)) .renderPS((scope, props, state) => { val uri = SYNEREOCircuit.zoom(_.sessionRootModel.sessionUri).value <.div(^.className := "container-fluid")( if (props.proxy.value.isLoggedIn) { val model = props.proxy.value <.div(^.className := "row")( <.div(^.className := "label-selectize-container-main")( if (props.currentLoc == DashboardLoc) { <.div( <.div(^.className := "pull-left")( <.button(^.className := "btn", ^.onClick --> scope.backend.showNewMessageModal(), SynereoCommanStylesCSS.Style.createPostButton, <.img(^.src := "./assets/synereo-images/CreatePost.gif", SynereoCommanStylesCSS.Style.createPostImg) ) ), <.div( SearchComponent(SearchComponent.Props()) ) ) } else { <.span() } ), <.div(^.className := "nav navbar-nav navbar-right", SynereoCommanStylesCSS.Style.mainMenuNavbar)( <.ul(^.className := "nav nav-pills")( <.li(SynereoCommanStylesCSS.Style.userNameNavBar)( <.span(<.img(^.className := "hidden-xss img-responsive", ^.src := "./assets/synereo-images/bubble.png", SynereoCommanStylesCSS.Style.userNameNavBarBubbleImage)), <.span(SynereoCommanStylesCSS.Style.userNameNavBarText, <.div(SynereoCommanStylesCSS.Style.userNameOverflow)(model.name), <.div(^.className := "text-center")( // <.span(model.networkMode.toUpperCase), <.button(^.`type` := "button", ^.className := "btn", SynereoCommanStylesCSS.Style.ampsDropdownToggleBtn)( /*<.img(^.src := "./assets/synereo-images/ampsIcon.PNG")*/ "data-toggle".reactAttr := "tooltip", "title".reactAttr := "AMP Balance", "data-placement".reactAttr := "right", <.img(^.src := "./assets/synereo-images/amptoken.png", DashboardCSS.Style.ampTokenImg), // <.span(Icon.cogs), <.span(DashboardCSS.Style.ampbalancetext)(model.networkMode + " " + model.balanceAmp + " / " + model.balanceBtc) ), <.span(introductionConnectProxy(introProxy => if (introProxy.value.introResponse.length != 0) { // ConfirmIntroReqModal(ConfirmIntroReqModal.Props("", Seq(DashboardCSS.Style.confirmIntroReqBtn), MIcon.sms, "")) <.a(^.href := "/#notifications", DashboardCSS.Style.confirmIntroReqBtn, <.span(<.button(bss.labelOpt(CommonStyle.danger), bss.labelAsBadge, DashboardCSS.Style.inputBtnRadius, introProxy.value.introResponse.length)) ) } else { <.span() } ) ) ) ) ), <.li(SynereoCommanStylesCSS.Style.mainMenuUserActionDropdownLi)( <.div()( <.button(^.className := "btn ", ^.`type` := "button", "data-toggle".reactAttr := "dropdown", SynereoCommanStylesCSS.Style.mainMenuUserActionDropdownBtn)( <.img(^.src := model.imgSrc, SynereoCommanStylesCSS.Style.userAvatar) ), // <.div(^.className := "dropdown-arrow-small"), <.ul(^.className := "dropdown-menu ", SynereoCommanStylesCSS.Style.userActionsMenu)( <.li(<.a(^.onClick --> scope.backend.showAboutInfoModal())(state.lang.selectDynamic("ABOUT").toString)), <.li(<.a(^.onClick --> scope.backend.showImageUploadModal())(state.lang.selectDynamic("CHANGE_PROFILE_PICTURE").toString)), <.li(<.a(^.onClick --> scope.backend.showNodeSettingModal())(state.lang.selectDynamic("NODE_SETTINGS").toString)), <.li(<.a(^.onClick --> Callback(ContentUtils.closeSessionReq(CloseSessionRequest(uri))))(state.lang.selectDynamic("SIGN_OUT").toString)) ) // if (state.showProfileImageUploadModal) // userProxy(userProxy => ProfileImageUploaderForm(ProfileImageUploaderForm.Props($.backend.showImageUploadModal, "Profile Image Uploader", userProxy))) // else if (state.showNodeSettingModal) // NodeSettingModal(NodeSettingModal.Props($.backend.showNodeSettingModal)) // else if (state.showAboutInfoModal) // AboutInfoModal(AboutInfoModal.Props($.backend.showAboutInfoModal)) // else // Seq.empty[ReactElement] //NewImage(NewImage.Props("", Seq(UserProfileViewCSS.Style.newImageBtn), Icon.camera, "", "", <.img(^.src := model.imgSrc, SynereoCommanStylesCSS.Style.userAvatar))) ) ), <.li(SynereoCommanStylesCSS.Style.featureHide)( // NewMessage(NewMessage.Props("Create a post", Seq(SynereoCommanStylesCSS.Style.createPostButton), /*Icon.envelope*/ "", "create-post-button", "create-post-button", (<.span(^.className := "vertical-text-post-btn", "POST")))) ) ) ) ) } else { <.ul(^.className := "nav navbar-right nav-pills", SynereoCommanStylesCSS.Style.nonLoggedInMenu)( <.li( <.a(^.href := "http://www.synereo.com/", LoginCSS.Style.navLiAStyle)( // <.span(LoginCSS.Style.navLiAIcon)(MIcon.helpOutline), // renderLang.asInstanceOf[I18N]. // I18N.En.MainMenu.WATCH_THE_VIDEO state.lang.selectDynamic("WATCH_THE_VIDEO").toString ) ), <.li(^.className := "", LoginCSS.Style.watchVideoBtn)( <.a(^.href := "http://www.synereo.com/", LoginCSS.Style.navLiAStyle)( state.lang.selectDynamic("WHAT_IS_SYNEREO").toString // AppUtils.getFromLang("WHAT_IS_SYNEREO") ) ) ) }, <.div(SynereoCommanStylesCSS.Style.changeLanguageDropdownContainer, SynereoCommanStylesCSS.Style.featureHide)( <.button(^.className := "btn btn-default", ^.`type` := "button", "data-toggle".reactAttr := "dropdown", SynereoCommanStylesCSS.Style.changeLangBtn)( state.lang.selectDynamic("LANG_NAME").toString match { case "undefined" => "EN_US" case _ => state.lang.selectDynamic("LANG_NAME").toString }, Icon.caretDown ), <.ul(^.className := "dropdown-menu", SynereoCommanStylesCSS.Style.langSelectMenu)( <.li(<.a(^.onClick --> scope.backend.changeLang(I18N.Lang.en_us))("En-US")), <.li(<.a(^.onClick --> scope.backend.changeLang(I18N.Lang.ch_man))("Chinese")), <.li(<.a(^.onClick --> scope.backend.changeLang(I18N.Lang.fr))("French")), <.li(<.a(^.onClick --> scope.backend.changeLang(I18N.Lang.hindi))("Hindi")) ) ) ) }) .componentDidMount(scope => scope.backend.mounted()) .build def apply(props: Props) = MainMenu(props) }
LivelyGig/ProductWebUI
sclient/src/main/scala/synereo/client/modules/MainMenu.scala
Scala
apache-2.0
10,609
package com.github.ellchow.scaramouch.collection import scalaz.{ Ordering => _ , _ }, Scalaz._ import scalaz.stream._, Process._ import scalaz.concurrent._ import scala.collection.immutable.TreeMap import com.github.ellchow.scaramouch.collection.ScalazStreamExtra._ object Join { /* cogroup 2 processes - ASSUMES both are sorted by K */ def groupByKey[L, K](lefts: Process[Task,(K,L)])( implicit kord: Ordering[K]): Process[Task, (K, Vector[L])] = { val padded = lefts.map(_.some) ++ emit(none) val grped = (padded |> group[Option[(K, L)]](_.map(_._1) == _.map(_._1))) grped.map{ xs => val ys = xs.collect{ case Some(x) => x } ys.head._1 -> ys.map(_._2) } } /* cogroup 2 processes - ASSUMES both are sorted by K */ def coGroup[L, R, K](lefts: Process[Task,(K,L)], rights: Process[Task,(K,R)])( implicit kord: Ordering[K]): Process[Task,(K, (Vector[L],Vector[R]))] = { val chunkedLeft: Process[Task,Option[(K, Vector[L])]] = groupByKey(lefts).map(_.some) ++ constant(none) val chunkedRight: Process[Task,Option[(K, Vector[R])]] = groupByKey(rights).map(_.some) ++ constant(none) sealed trait Action case object Left extends Action case object Right extends Action case object Both extends Action def go(prevL: Option[(K, Vector[L])], prevR: Option[(K, Vector[R])], action: Option[Action], maxL: Option[K], maxR: Option[K]): Tee[Option[(K,Vector[L])], Option[(K,Vector[R])], (K,(Vector[L],Vector[R]))] = { action match { case Some(Both) => for { l <- awaitL[Option[(K, Vector[L])]] r <- awaitR[Option[(K, Vector[R])]] out <- { val checkL = { (for { max <- maxL ; curr <- l } yield kord.lt(max, curr._1)).getOrElse(true) } val checkR = { (for { max <- maxR ; curr <- r } yield kord.lt(max, curr._1)) .getOrElse(true) } (checkL, checkR) match { case (true, true) => go(l, r, None, l.map(_._1.some).getOrElse(maxL), r.map(_._1.some).getOrElse(maxR)) case (false, _) => def e = new IllegalArgumentException("left is not sorted") Halt(Cause.Error(e)) case (_, false) => def e = new IllegalArgumentException("right is not sorted") Halt(Cause.Error(e)) } } } yield out case Some(Left) => for { l <- awaitL[Option[(K, Vector[L])]] out <- { val checkL = { (for { max <- maxL ; curr <- l } yield kord.lt(max, curr._1)) .getOrElse(true) } checkL match { case true => go(l, prevR, None, l.map(_._1.some).getOrElse(maxL), maxR) case false => def e = new IllegalArgumentException("left is not sorted") Halt(Cause.Error(e)) } } } yield out case Some(Right) => for { r <- awaitR[Option[(K, Vector[R])]] out <- { val checkR = { (for { max <- maxR ; curr <- r } yield kord.lt(max, curr._1)) .getOrElse(true) } checkR match { case true => go(prevL, r, None, maxL, r.map(_._1.some).getOrElse(maxR)) case false => def e = new IllegalArgumentException("right is not sorted") Halt(Cause.Error(e)) } } } yield out case None => (prevL, prevR) match { case (None, None) => halt case (Some((kl, ls)), Some((kr, rs))) => if (kord.lt(kl, kr)) { emit(kl -> (ls, Vector.empty)) ++ go(none, prevR, Left.some, maxL, maxR) } else if (kord.gt(kl,kr)) { emit(kr -> (Vector.empty, rs)) ++ go(prevL, none, Right.some, maxL, maxR) } else { emit(kl -> (ls, rs)) ++ go(none, none, Both.some, maxL, maxR) } case (Some((kl, ls)), None) => emit(kl -> (ls, Vector.empty)) ++ go(none, none, Left.some, maxL, maxR) case (None, Some((kr, rs))) => emit(kr -> (Vector.empty, rs)) ++ go(none, none, Right.some, maxL, maxR) } } } chunkedLeft.tee(chunkedRight)(go(none, none, Both.some, none, none)) } def fullOuterJoin[L, R, K : Ordering](lefts: Process[Task,(K,L)], rights: Process[Task,(K,R)]): Process[Task,(K, (Option[L], Option[R]))] = { for { (k, (ls, rs)) <- coGroup(lefts, rights) ol <- if (ls.nonEmpty) emitAll(ls.map(_.some)) else emit(None) or <- if (rs.nonEmpty) emitAll(rs.map(_.some)) else emit(None) } yield (k, (ol, or)) } def leftOuterJoin[L, R, K : Ordering](lefts: Process[Task,(K,L)], rights: Process[Task,(K,R)]): Process[Task,(K, (L, Option[R]))] = fullOuterJoin(lefts,rights) .collect{ case x@(k, (Some(l), r)) => (k, (l, r)) } def rightOuterJoin[L, R, K : Ordering](lefts: Process[Task,(K,L)], rights: Process[Task,(K,R)]): Process[Task,(K, (Option[L],R))] = fullOuterJoin(lefts,rights) .collect{ case x@(k, (l, Some(r))) => (k, (l, r)) } def innerJoin[L, R, K : Ordering](lefts: Process[Task,(K,L)], rights: Process[Task,(K,R)]): Process[Task,(K, (L,R))] = for { (k, (ls, rs)) <- coGroup(lefts, rights) l <- emitAll(ls) r <- emitAll(rs) } yield (k, (l, r)) def meld[K](lefts: Process[Task,K], rights: Process[Task,K])( implicit kord: Ordering[K]): Process[Task,K] = { coGroup(lefts.map(k => (k, k)), rights.map(k => (k, k))) .flatMap{ case (_, (k1, k2)) => emitAll(k1) ++ emitAll(k2) } } def meldAll[K : Ordering](ps: Vector[Process[Task,K]]): Process[Task,K] = { if (ps.isEmpty) { halt } else if (ps.size == 1) { ps.head } else if (ps.size == 2) { meld(ps(0), ps(1)) } else { meldAll(ps.grouped(2).map(xs => meldAll(xs)).toVector) } } }
ellchow/scaramouch
scaramouch-collection/src/main/scala/com/github/ellchow/scaramouch/collection/Join.scala
Scala
apache-2.0
6,435
package Import import Import.FileImport.FileImporter.{BrokenLines, PlayerList} import Import.FileImport.FileImporter import Import.ImportOptimizer.ImportOptimizer.FinalPlayers import RestConnection.TeamRequest import akka.actor.{ActorRef, Props, Actor} /** * Created by yannick on 13.02.16. */ object ImportManager { val name = "import-manger" def props(master: ActorRef) = Props(new ImportManager(master)) } case class PlayerListWithWeightVector(playerList: PlayerList, weightVector: Vector[Int]) class ImportManager(master: ActorRef) extends Actor{ val importer = context.actorOf(FileImporter.props(self), FileImporter.name) val importOptimizer = context.actorOf(ImportOptimizer.ImportOptimizer.props(self), ImportOptimizer.ImportOptimizer.name) var weightVector = Vector.empty[Int] def receive = importFile def importFile: Receive = { case p @ TeamRequest(_,_,w) => importer ! p weightVector = w case br @ BrokenLines(_) => master ! br case players @ PlayerList(_) => importOptimizer ! PlayerListWithWeightVector(players, weightVector) context become optimizeFile } def optimizeFile: Receive = { case f @ FinalPlayers(players, meanVals) => master ! f } }
yannick-cw/tournament_planer
hatplaner/src/main/scala/Import/ImportManager.scala
Scala
mit
1,220
package org.jetbrains.plugins.scala package lang.refactoring.changeSignature.changeInfo import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScClass import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.ScalaParameterInfo import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil /** * Nikolay.Tropin * 2014-08-29 */ private[changeInfo] trait ParametersChangeInfo { this: ScalaChangeInfo => private val oldParameters = ScalaParameterInfo.allForMethod(function) private val oldParametersArray = oldParameters.flatten.toArray private val oldParameterNames: Array[String] = oldParametersArray.map(_.name) private val oldParameterTypes: Array[String] = oldParametersArray.map(_.getTypeText) val toRemoveParm: Array[Boolean] = oldParametersArray.zipWithIndex.map { case (_, i) => !newParameters.exists(_.oldIndex == i) } val isParameterSetOrOrderChanged: Boolean = { oldParameters.map(_.length) != newParams.map(_.length) || newParameters.zipWithIndex.exists {case (p, i) => p.oldIndex != i} } val isParameterNamesChanged: Boolean = newParameters.zipWithIndex.exists { case (p, i) => p.oldIndex == i && p.getName != getOldParameterNames(i) } val isParameterTypesChanged: Boolean = newParameters.zipWithIndex.exists { case (p, i) => (p.oldIndex == i) && (p.getTypeText != getOldParameterTypes(i) || p.isRepeatedParameter != oldParametersArray(i).isRepeatedParameter || p.isByName != oldParametersArray(i).isByName) } val wasVararg: Boolean = false val isObtainsVarags: Boolean = false val isRetainsVarargs: Boolean = false val isArrayToVarargs: Boolean = false def newParameters: Seq[ScalaParameterInfo] = newParams.flatten override def getOldParameterNames: Array[String] = oldParameterNames override def getOldParameterTypes: Array[String] = oldParameterTypes def defaultParameterForJava(p: ScalaParameterInfo, idx: Int): String = { if (this.isAddDefaultArgs) { if (this.function.isConstructor) { this.function.containingClass match { case c: ScClass => val className = ScalaNamesUtil.toJavaName(c.name) s"$className.$$lessinit$$greater$$default$$${idx + 1}()" case _ => p.defaultValue } } else s"${this.getNewName}$$default$$${idx + 1}()" } else p.defaultValue } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/changeInfo/ParametersChangeInfo.scala
Scala
apache-2.0
2,415
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.webmvc.support.action import org.beangle.data.model.Entity import org.beangle.web.action.support.{ActionSupport, MimeSupport} import org.beangle.web.action.annotation.{mapping, param, response} import org.beangle.web.action.context.Params class RestfulService[T <: Entity[_]] extends ActionSupport with EntityAction[T] with MimeSupport { @response def index(): Any = { getInt("page") match { case Some(_) => entityDao.search(getQueryBuilder) case None => entityDao.search(getQueryBuilder.limit(null)) } } @response @mapping(value = "{id}") def info(@param("id") id: String): T = { Params.converter.convert(id, entityDao.domain.getEntity(entityName).get.id.clazz) match { case None => null.asInstanceOf[T] case Some(entityId) => getModel[T](entityName, entityId) } } }
beangle/webmvc
support/src/main/scala/org/beangle/webmvc/support/action/RestfulService.scala
Scala
lgpl-3.0
1,581
/* * Copyright DataGenerator Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.datagenerator.common.Graph import org.finra.datagenerator.common.NodeData.DisplayableData import scala.beans.BeanProperty /** * Describes the creation of first-added node in a graph * @param dataToAdd Data to add * @tparam T Node data type */ class AddInitialNodeDescription[+T <: DisplayableData](@BeanProperty val dataToAdd: T) extends EdgeCreationDescription[T]
mibrahim/DataGenerator
dg-common/src/main/scala/org/finra/datagenerator/common/Graph/AddInitialNodeDescription.scala
Scala
apache-2.0
997
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\ * @ @ * * # # # # (c) 2017 CAB * * # # # # # # * * # # # # # # # # # # # # * * # # # # # # # # # * * # # # # # # # # # # # # # # # # # # * * # # # # # # # # # # # # # # # # # * * # # # # # # # # # # # # # * * # # # # # # # # # # # # # # * * # # # # # # # # # # # # # # # # # # * * @ @ * \\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */ package mathact.core.sketch.infrastructure.instance import mathact.core.model.messages.Msg import mathact.core.sketch.blocks.WorkbenchLike /** Sketch instance * Created by CAB on 17.10.2016. */ private[core] object SketchInstance { //Local messages case class SketchInstanceBuilt(instance: WorkbenchLike) extends Msg case class SketchInstanceBuiltError(error: Throwable) extends Msg case object SketchInstanceBuildTimeout extends Msg}
AlexCAB/MathAct
mathact_core/src/main/scala/mathact/core/sketch/infrastructure/instance/SketchInstance.scala
Scala
mit
1,559
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.admin import org.scalatest.junit.JUnit3Suite import kafka.zk.ZooKeeperTestHarness import junit.framework.Assert._ import kafka.utils.{ZkUtils, TestUtils} import kafka.server.{KafkaServer, KafkaConfig} import org.junit.Test import kafka.common._ import kafka.producer.{ProducerConfig, Producer} import java.util.Properties import kafka.api._ import kafka.consumer.SimpleConsumer import kafka.producer.KeyedMessage import kafka.common.TopicAndPartition import kafka.api.PartitionOffsetRequestInfo class DeleteTopicTest extends JUnit3Suite with ZooKeeperTestHarness { @Test def testDeleteTopicWithAllAliveReplicas() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) verifyTopicDeletion(topic, servers) servers.foreach(_.shutdown()) } @Test def testResumeDeleteTopicWithRecoveredFollower() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) // shut down one follower replica val leaderIdOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0) assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined) val follower = servers.filter(s => s.config.brokerId != leaderIdOpt.get).last follower.shutdown() // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // check if all replicas but the one that is shut down has deleted the log assertTrue("Replicas 0,1 have not deleted log in 1000ms", TestUtils.waitUntilTrue(() => servers.filter(s => s.config.brokerId != follower.config.brokerId) .foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isEmpty), 1000)) // ensure topic deletion is halted assertTrue("Admin path /admin/delete_topic/test path deleted in 1000ms even when a follower replica is down", TestUtils.waitUntilTrue(() => ZkUtils.pathExists(zkClient, ZkUtils.getDeleteTopicPath(topic)), 500)) // restart follower replica follower.startup() verifyTopicDeletion(topic, servers) servers.foreach(_.shutdown()) } @Test def testResumeDeleteTopicOnControllerFailover() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // shut down the controller to trigger controller failover during delete topic val controllerId = ZkUtils.getController(zkClient) val controller = servers.filter(s => s.config.brokerId == controllerId).head controller.shutdown() // ensure topic deletion is halted assertTrue("Admin path /admin/delete_topic/test path deleted in 500ms even when a replica is down", TestUtils.waitUntilTrue(() => ZkUtils.pathExists(zkClient, ZkUtils.getDeleteTopicPath(topic)), 500)) // restart follower replica controller.startup() // wait until admin path for delete topic is deleted, signaling completion of topic deletion assertTrue("Admin path /admin/delete_topic/test path not deleted in 4000ms even after a follower replica is restarted", TestUtils.waitUntilTrue(() => !ZkUtils.pathExists(zkClient, ZkUtils.getDeleteTopicPath(topic)), 4000)) assertTrue("Topic path /brokers/topics/test not deleted after /admin/delete_topic/test path is deleted", TestUtils.waitUntilTrue(() => !ZkUtils.pathExists(zkClient, ZkUtils.getTopicPath(topic)), 100)) // ensure that logs from all replicas are deleted if delete topic is marked successful in zookeeper assertTrue("Replica logs not deleted after delete topic is complete", servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isEmpty)) servers.foreach(_.shutdown()) } @Test def testRequestHandlingDuringDeleteTopic() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // shut down one follower replica var leaderIdOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0) assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined) val follower = servers.filter(s => s.config.brokerId != leaderIdOpt.get).last follower.shutdown() // test if produce requests are failed with UnknownTopicOrPartitionException during delete topic val props1 = new Properties() props1.put("metadata.broker.list", servers.map(s => s.config.hostName + ":" + s.config.port).mkString(",")) props1.put("serializer.class", "kafka.serializer.StringEncoder") props1.put("request.required.acks", "1") val producerConfig1 = new ProducerConfig(props1) val producer1 = new Producer[String, String](producerConfig1) try{ producer1.send(new KeyedMessage[String, String](topic, "test", "test1")) fail("Test should fail because the topic is being deleted") } catch { case e: FailedToSendMessageException => case oe: Throwable => fail("fails with exception", oe) } finally { producer1.close() } // test if fetch requests fail during delete topic servers.filter(s => s.config.brokerId != follower.config.brokerId).foreach { server => val consumer = new SimpleConsumer(server.config.hostName, server.config.port, 1000000, 64*1024, "") val request = new FetchRequestBuilder() .clientId("test-client") .addFetch(topic, 0, 0, 10000) .build() val fetched = consumer.fetch(request) val fetchResponse = fetched.data(topicAndPartition) assertTrue("Fetch should fail with UnknownTopicOrPartitionCode", fetchResponse.error == ErrorMapping.UnknownTopicOrPartitionCode) } // test if offset requests fail during delete topic servers.filter(s => s.config.brokerId != follower.config.brokerId).foreach { server => val consumer = new SimpleConsumer(server.config.hostName, server.config.port, 1000000, 64*1024, "") val offsetRequest = new OffsetRequest(Map(topicAndPartition -> new PartitionOffsetRequestInfo(OffsetRequest.LatestTime, 1))) val offsetResponse = consumer.getOffsetsBefore(offsetRequest) val errorCode = offsetResponse.partitionErrorAndOffsets(topicAndPartition).error assertTrue("Offset request should fail with UnknownTopicOrPartitionCode", errorCode == ErrorMapping.UnknownTopicOrPartitionCode) // test if offset fetch requests fail during delete topic val offsetFetchRequest = new OffsetFetchRequest("test-group", Seq(topicAndPartition)) val offsetFetchResponse = consumer.fetchOffsets(offsetFetchRequest) val offsetFetchErrorCode = offsetFetchResponse.requestInfo(topicAndPartition).error assertTrue("Offset fetch request should fail with UnknownTopicOrPartitionCode", offsetFetchErrorCode == ErrorMapping.UnknownTopicOrPartitionCode) // TODO: test if offset commit requests fail during delete topic } // restart follower replica follower.startup() verifyTopicDeletion(topic, servers) servers.foreach(_.shutdown()) } @Test def testPreferredReplicaElectionDuringDeleteTopic() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) var leaderIdOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0) assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined) // shut down the controller to move the leader to a non preferred replica before delete topic val preferredReplicaId = 0 val preferredReplica = servers.filter(s => s.config.brokerId == preferredReplicaId).head preferredReplica.shutdown() preferredReplica.startup() val newLeaderIdOpt = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 3000, leaderIdOpt) assertTrue("New leader should be elected prior to delete topic", newLeaderIdOpt.isDefined) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // test preferred replica election val preferredReplicaElection = new PreferredReplicaLeaderElectionCommand(zkClient, Set(topicAndPartition)) preferredReplicaElection.moveLeaderToPreferredReplica() val leaderAfterPreferredReplicaElectionOpt = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 1000, newLeaderIdOpt) assertTrue("Preferred replica election should not move leader during delete topic", leaderAfterPreferredReplicaElectionOpt.isEmpty || leaderAfterPreferredReplicaElectionOpt.get == newLeaderIdOpt.get) val newControllerId = ZkUtils.getController(zkClient) val newController = servers.filter(s => s.config.brokerId == newControllerId).head assertFalse("Preferred replica election should fail", newController.kafkaController.controllerContext.partitionsUndergoingPreferredReplicaElection.contains(topicAndPartition)) verifyTopicDeletion(topic, servers) servers.foreach(_.shutdown()) } @Test def testDeleteTopicDuringPreferredReplicaElection() { val topic = "test" val topicAndPartition = TopicAndPartition(topic, 0) val servers = createTestTopicAndCluster(topic) var leaderIdOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0) assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined) // shut down the controller to move the leader to a non preferred replica before delete topic val preferredReplicaId = 0 val preferredReplica = servers.filter(s => s.config.brokerId == preferredReplicaId).head preferredReplica.shutdown() preferredReplica.startup() val newLeaderIdOpt = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 3000, leaderIdOpt) assertTrue("New leader should be elected prior to delete topic", newLeaderIdOpt.isDefined) // test preferred replica election val preferredReplicaElection = new PreferredReplicaLeaderElectionCommand(zkClient, Set(topicAndPartition)) preferredReplicaElection.moveLeaderToPreferredReplica() // start topic deletion during preferred replica election. This should halt topic deletion but eventually // complete it successfully AdminUtils.deleteTopic(zkClient, topic) val newControllerId = ZkUtils.getController(zkClient) val newController = servers.filter(s => s.config.brokerId == newControllerId).head assertTrue("Preferred replica election should succeed after 1000ms", TestUtils.waitUntilTrue(() => !newController.kafkaController.controllerContext.partitionsUndergoingPreferredReplicaElection.contains(topicAndPartition), 1000)) verifyTopicDeletion(topic, servers) servers.foreach(_.shutdown()) } @Test def testPartitionReassignmentDuringDeleteTopic() { val expectedReplicaAssignment = Map(0 -> List(0, 1, 2)) val topic = "test" val topicAndPartition = TopicAndPartition(topic, 0) val brokerConfigs = TestUtils.createBrokerConfigs(4) brokerConfigs.foreach(p => p.setProperty("delete.topic.enable", "true")) // create brokers val allServers = brokerConfigs.map(b => TestUtils.createServer(new KafkaConfig(b))) val servers = allServers.filter(s => expectedReplicaAssignment(0).contains(s.config.brokerId)) // create the topic AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment) // wait until replica log is created on every broker assertTrue("Replicas for topic test not created in 1000ms", TestUtils.waitUntilTrue(() => servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isDefined), 1000)) var leaderIdOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0) assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // start partition reassignment at the same time right after delete topic. In this case, reassignment will fail since // the topic is being deleted // reassign partition 0 val oldAssignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, 0) val newReplicas = Seq(1, 2, 3) val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas)) assertTrue("Partition reassignment should fail for [test,0]", reassignPartitionsCommand.reassignPartitions()) // wait until reassignment is completed TestUtils.waitUntilTrue(() => { val partitionsBeingReassigned = ZkUtils.getPartitionsBeingReassigned(zkClient).mapValues(_.newReplicas); ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded(zkClient, topicAndPartition, newReplicas, Map(topicAndPartition -> newReplicas), partitionsBeingReassigned) == ReassignmentFailed; }, 1000) val controllerId = ZkUtils.getController(zkClient) val controller = servers.filter(s => s.config.brokerId == controllerId).head assertFalse("Partition reassignment should fail", controller.kafkaController.controllerContext.partitionsBeingReassigned.contains(topicAndPartition)) val assignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, 0) assertEquals("Partition should not be reassigned to 0, 1, 2", oldAssignedReplicas, assignedReplicas) verifyTopicDeletion(topic, servers) allServers.foreach(_.shutdown()) } @Test def testDeleteTopicDuringPartitionReassignment() { val expectedReplicaAssignment = Map(0 -> List(0, 1, 2)) val topic = "test" val topicAndPartition = TopicAndPartition(topic, 0) val brokerConfigs = TestUtils.createBrokerConfigs(4) brokerConfigs.foreach(p => p.setProperty("delete.topic.enable", "true")) // create brokers val allServers = brokerConfigs.map(b => TestUtils.createServer(new KafkaConfig(b))) val servers = allServers.filter(s => expectedReplicaAssignment(0).contains(s.config.brokerId)) // create the topic AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment) // wait until replica log is created on every broker assertTrue("Replicas for topic test not created in 1000ms", TestUtils.waitUntilTrue(() => servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isDefined), 1000)) var leaderIdOpt = ZkUtils.getLeaderForPartition(zkClient, topic, 0) assertTrue("Leader should exist for partition [test,0]", leaderIdOpt.isDefined) // start partition reassignment at the same time right before delete topic. In this case, reassignment will succeed // reassign partition 0 val newReplicas = Seq(1, 2, 3) val reassignPartitionsCommand = new ReassignPartitionsCommand(zkClient, Map(topicAndPartition -> newReplicas)) assertTrue("Partition reassignment failed for test, 0", reassignPartitionsCommand.reassignPartitions()) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // wait until reassignment is completed TestUtils.waitUntilTrue(() => { val partitionsBeingReassigned = ZkUtils.getPartitionsBeingReassigned(zkClient).mapValues(_.newReplicas); ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded(zkClient, topicAndPartition, newReplicas, Map(topicAndPartition -> newReplicas), partitionsBeingReassigned) == ReassignmentCompleted; }, 1000) val controllerId = ZkUtils.getController(zkClient) val controller = servers.filter(s => s.config.brokerId == controllerId).head assertFalse("Partition reassignment should complete", controller.kafkaController.controllerContext.partitionsBeingReassigned.contains(topicAndPartition)) val assignedReplicas = ZkUtils.getReplicasForPartition(zkClient, topic, 0) assertEquals("Partition should be reassigned to 1,2,3", newReplicas, assignedReplicas) verifyTopicDeletion(topic, allServers) allServers.foreach(_.shutdown()) } @Test def testDeleteTopicDuringAddPartition() { val topic = "test" val servers = createTestTopicAndCluster(topic) val newPartition = TopicAndPartition(topic, 1) // add partitions to topic AdminUtils.addPartitions(zkClient, topic, 2, "0:1:2,0:1:2") // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // test if topic deletion is resumed verifyTopicDeletion(topic, servers) // verify that new partition doesn't exist on any broker either assertTrue("Replica logs not for new partition [test,1] not deleted after delete topic is complete", TestUtils.waitUntilTrue(() => servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(newPartition).isEmpty), 1000)) servers.foreach(_.shutdown()) } @Test def testAddPartitionDuringDeleteTopic() { val topic = "test" val topicAndPartition = TopicAndPartition(topic, 0) val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) // add partitions to topic val newPartition = TopicAndPartition(topic, 1) AdminUtils.addPartitions(zkClient, topic, 2, "0:1:2,0:1:2") verifyTopicDeletion(topic, servers) // verify that new partition doesn't exist on any broker either assertTrue("Replica logs not deleted after delete topic is complete", servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(newPartition).isEmpty)) servers.foreach(_.shutdown()) } @Test def testRecreateTopicAfterDeletion() { val expectedReplicaAssignment = Map(0 -> List(0, 1, 2)) val topic = "test" val topicAndPartition = TopicAndPartition(topic, 0) val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) verifyTopicDeletion(topic, servers) // re-create topic on same replicas AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment) // wait until leader is elected val leaderIdOpt = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 1000) assertTrue("New leader should be elected after re-creating topic test", leaderIdOpt.isDefined) // check if all replica logs are created assertTrue("Replicas for topic test not created in 1000ms", TestUtils.waitUntilTrue(() => servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isDefined), 1000)) servers.foreach(_.shutdown()) } @Test def testTopicConfigChangesDuringDeleteTopic() { val topic = "test" val servers = createTestTopicAndCluster(topic) val topicConfigs = new Properties() topicConfigs.put("segment.ms", "1000000") // start topic deletion AdminUtils.deleteTopic(zkClient, topic) verifyTopicDeletion(topic, servers) // make topic config changes try { AdminUtils.changeTopicConfig(zkClient, topic, topicConfigs) fail("Should fail with AdminOperationException for topic doesn't exist") } catch { case e: AdminOperationException => // expected } servers.foreach(_.shutdown()) } @Test def testAutoCreateAfterDeleteTopic() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, topic) verifyTopicDeletion(topic, servers) // test if first produce request after topic deletion auto creates the topic val props = new Properties() props.put("metadata.broker.list", servers.map(s => s.config.hostName + ":" + s.config.port).mkString(",")) props.put("serializer.class", "kafka.serializer.StringEncoder") props.put("producer.type", "sync") props.put("request.required.acks", "1") props.put("message.send.max.retries", "1") val producerConfig = new ProducerConfig(props) val producer = new Producer[String, String](producerConfig) try{ producer.send(new KeyedMessage[String, String](topic, "test", "test1")) } catch { case e: FailedToSendMessageException => fail("Topic should have been auto created") case oe: Throwable => fail("fails with exception", oe) } // test the topic path exists assertTrue("Topic not auto created", ZkUtils.pathExists(zkClient, ZkUtils.getTopicPath(topic))) // wait until leader is elected val leaderIdOpt = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 1000) assertTrue("New leader should be elected after re-creating topic test", leaderIdOpt.isDefined) try { producer.send(new KeyedMessage[String, String](topic, "test", "test1")) } catch { case e: FailedToSendMessageException => fail("Topic should have been auto created") case oe: Throwable => fail("fails with exception", oe) } finally { producer.close() } servers.foreach(_.shutdown()) } @Test def testDeleteNonExistingTopic() { val topicAndPartition = TopicAndPartition("test", 0) val topic = topicAndPartition.topic val servers = createTestTopicAndCluster(topic) // start topic deletion AdminUtils.deleteTopic(zkClient, "test2") // verify delete topic path for test2 is removed from zookeeper verifyTopicDeletion("test2", servers) // verify that topic test is untouched assertTrue("Replicas for topic test not created in 1000ms", TestUtils.waitUntilTrue(() => servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isDefined), 1000)) // test the topic path exists assertTrue("Topic test mistakenly deleted", ZkUtils.pathExists(zkClient, ZkUtils.getTopicPath(topic))) // topic test should have a leader val leaderIdOpt = TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0, 1000) assertTrue("Leader should exist for topic test", leaderIdOpt.isDefined) servers.foreach(_.shutdown()) } private def createTestTopicAndCluster(topic: String): Seq[KafkaServer] = { val expectedReplicaAssignment = Map(0 -> List(0, 1, 2)) val topicAndPartition = TopicAndPartition(topic, 0) val brokerConfigs = TestUtils.createBrokerConfigs(3) brokerConfigs.foreach(p => p.setProperty("delete.topic.enable", "true")) // create brokers val servers = brokerConfigs.map(b => TestUtils.createServer(new KafkaConfig(b))) // create the topic AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkClient, topic, expectedReplicaAssignment) // wait until replica log is created on every broker assertTrue("Replicas for topic test not created in 1000ms", TestUtils.waitUntilTrue(() => servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isDefined), 1000)) servers } private def verifyTopicDeletion(topic: String, servers: Seq[KafkaServer]) { val topicAndPartition = TopicAndPartition(topic, 0) // wait until admin path for delete topic is deleted, signaling completion of topic deletion assertTrue("Admin path /admin/delete_topic/test path not deleted in 1000ms even after a replica is restarted", TestUtils.waitUntilTrue(() => !ZkUtils.pathExists(zkClient, ZkUtils.getDeleteTopicPath(topic)), 1000)) assertTrue("Topic path /brokers/topics/test not deleted after /admin/delete_topic/test path is deleted", TestUtils.waitUntilTrue(() => !ZkUtils.pathExists(zkClient, ZkUtils.getTopicPath(topic)), 100)) // ensure that logs from all replicas are deleted if delete topic is marked successful in zookeeper assertTrue("Replica logs not deleted after delete topic is complete", servers.foldLeft(true)((res, server) => res && server.getLogManager().getLog(topicAndPartition).isEmpty)) } }
unix1986/universe
tool/kafka-0.8.1.1-src/core/src/test/scala/unit/kafka/admin/DeleteTopicTest.scala
Scala
bsd-2-clause
24,766
package model import play.api.libs.json._ /** * Represents the Swagger definition for FavoriteImpl. * @param additionalProperties Any additional properties this model may have. */ @javax.annotation.Generated(value = Array("org.openapitools.codegen.languages.ScalaPlayFrameworkServerCodegen"), date = "2022-02-13T02:38:35.589632Z[Etc/UTC]") case class FavoriteImpl( `class`: Option[String], links: Option[FavoriteImpllinks], item: Option[PipelineImpl] additionalProperties: ) object FavoriteImpl { implicit lazy val favoriteImplJsonFormat: Format[FavoriteImpl] = { val realJsonFormat = Json.format[FavoriteImpl] val declaredPropNames = Set("`class`", "links", "item") Format( Reads { case JsObject(xs) => val declaredProps = xs.filterKeys(declaredPropNames) val additionalProps = JsObject(xs -- declaredPropNames) val restructuredProps = declaredProps + ("additionalProperties" -> additionalProps) val newObj = JsObject(restructuredProps) realJsonFormat.reads(newObj) case _ => JsError("error.expected.jsobject") }, Writes { favoriteImpl => val jsObj = realJsonFormat.writes(favoriteImpl) val additionalProps = jsObj.value("additionalProperties").as[JsObject] val declaredProps = jsObj - "additionalProperties" val newObj = declaredProps ++ additionalProps newObj } ) } }
cliffano/swaggy-jenkins
clients/scala-play-server/generated/app/model/FavoriteImpl.scala
Scala
mit
1,455
/* Taken from https://github.com/mattdesl/lwjgl-basics/blob/master/test/mdesl/test/FileDrop.java * master - cdf5c33c5365848e7e052036e1ff549dfc6c109f. * Rewritten to Scala for Java 8 by fehu. */ package feh.tec.cvis.gui import java.awt.Container import java.awt.datatransfer.{Transferable, UnsupportedFlavorException, DataFlavor} import java.awt.dnd._ import java.awt.event.{HierarchyEvent, HierarchyListener} import java.io.{BufferedReader, File} import java.net.URI import javax.swing.JComponent import javax.swing.border.Border import scala.collection.convert.decorateAsScala._ import feh.util._ import scala.swing.{Component, Dialog} trait FileDrop { def filesDraggedIn(): Boolean def filesDraggedOut() def filesDropped: List[File] => Unit def component: Either[JComponent, Component] protected lazy val c: JComponent = component.right.map(_.peer).merge protected lazy val dropListener: DropTargetListener = new DropTargetListener{ def dragOver(dtde: DropTargetDragEvent): Unit = {} def dragExit(dte: DropTargetEvent): Unit = filesDraggedOut() def drop(dtde: DropTargetDropEvent): Unit = { debugLog( "FileDrop: drop event." ) val tr = dtde.getTransferable if(tr.isDataFlavorSupported(DataFlavor.javaFileListFlavor)){ dtde.acceptDrop(java.awt.dnd.DnDConstants.ACTION_COPY) debugLog( "FileDrop: file list accepted." ) try filesDropped(tr.getTransferData(DataFlavor.javaFileListFlavor) .asInstanceOf[java.util.List[File]].asScala.toList ) catch { case ex: UnsupportedFlavorException => Dialog.showMessage(title = "File Drop Error", message = "Your system doesn't support file drop", messageType = Dialog.Message.Warning) } } filesDraggedOut() dtde.getDropTargetContext.dropComplete(true) } def dropActionChanged(dtde: DropTargetDragEvent): Unit = { debugLog("FileDrop: dragEnter event." ) acceptOrReject(dtde, dtde.getTransferable) } def dragEnter(dtde: DropTargetDragEvent) = { debugLog("FileDrop: dragEnter event." ) acceptOrReject(dtde, dtde.getTransferable) } } makeDropTarget(c, recursive = true) protected def acceptOrReject(dtde: DropTargetDragEvent, tr: Transferable) = { debugLog("isFileListDrop_? " + isFileListDrop_?(dtde)) val fdi = filesDraggedIn() debugLog("filesDraggedIn" + fdi) debugLog("processRepresentationClassReader(dtde, tr)" + processRepresentationClassReader(dtde, tr)) if (isFileListDrop_?(dtde) && fdi || processRepresentationClassReader(dtde, tr).nonEmpty && filesDraggedIn()) { dtde.acceptDrag(DnDConstants.ACTION_COPY) debugLog("FileDrop: event accepted.") } else { dtde.rejectDrag() debugLog("FileDrop: event rejected.") } } protected def isFileListDrop_?(ev: DropTargetDragEvent) = ev.getCurrentDataFlavors.toList match { // ev.getCurrentDataFlavors case Nil => debugLog("FileDrop: no data flavors."); false case flavors => debugLog("Flavors: " + flavors) flavors.exists { flavor => flavor equals DataFlavor.javaFileListFlavor //|| flavor.isRepresentationClassReader } } protected def processRepresentationClassReader(dtde: DropTargetDragEvent, tr: Transferable) = dtde.getCurrentDataFlavors.collect{ case flavor if flavor.isRepresentationClassReader => val reader = new BufferedReader(flavor.getReaderForText(tr)) // causes some exceptions Y[List[File], List[File]]( rec => acc => if(reader.ready()) new File(new URI(reader.readLine())) :: acc else acc )(Nil) }.flatten protected def makeDropTarget(c: java.awt.Component, recursive: Boolean = true ): Unit = { // Make drop target val dt = new DropTarget() dt.addDropTargetListener( dropListener ) // Listen for hierarchy changes and remove the drop target when the parent gets cleared out. c.addHierarchyListener( new HierarchyListener { def hierarchyChanged(e: HierarchyEvent) = { debugLog( "FileDrop: Hierarchy changed." ) Option(c.getParent).map(_ => new DropTarget(c, dropListener)) getOrElse c.setDropTarget(null) } }) if (c.getParent != null) new DropTarget(c, dropListener) if (recursive) c match { case cc: Container => cc.getComponents foreach (makeDropTarget(_)) } } var DEBUG = false protected def debugLog(msg: Any) = if (DEBUG) println(msg.toString) } object FileDrop{ trait BorderOnDrag{ self: FileDrop => def filesInBorder: Border protected lazy val normalBorder = c.getBorder def filesDraggedIn() = { c.setBorder(filesInBorder); true } def filesDraggedOut() = c.setBorder(normalBorder) normalBorder // init lazy val } object BorderOnDrag{ def apply(comp: Either[JComponent, Component], borderOnDrag: Border, dropped: List[File] => Unit) = new FileDrop with BorderOnDrag{ def filesDropped = dropped def component = comp def filesInBorder = borderOnDrag } } implicit class FileDropCreationWrapper(c: Component){ def onFilesDropped(borderOnDrag: Border, f: List[File] => Unit) = BorderOnDrag(Right(c), borderOnDrag, f) } implicit class FileDropCreationJWrapper(c: JComponent){ def onFilesDropped(borderOnDrag: Border, f: List[File] => Unit) = BorderOnDrag(Left(c), borderOnDrag, f) } }
fehu/comp-vis
gui/src/main/scala/feh/tec/cvis/gui/FileDrop.scala
Scala
mit
5,665
// This class represents an intermediate spread, where "long x" or "short x" has been defined, but the option that // is bought or sold has not been defined yet. A SpreadBuilder must of followed by: of <Option> to do anything with it. class SpreadBuilder { var count : Integer = 0 var spread : Spread = null def of (op : Option): Spread = { require(count != 0, "SpreadBuilder without count provided") if (spread == null) { op.costBasis *= count new Spread and (op contracts count) } else { op.costBasis *= count spread and (op contracts count) } } }
Nic0S/Options-DSL
src/SpreadBuilder.scala
Scala
apache-2.0
603
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.plan.nodes.datastream import org.apache.calcite.plan.{RelOptCluster, RelTraitSet} import org.apache.calcite.rel.`type`.RelDataType import org.apache.calcite.rel.core.AggregateCall import org.apache.calcite.rel.{RelNode, RelWriter, SingleRel} import org.apache.flink.api.java.tuple.Tuple import org.apache.flink.streaming.api.datastream.{AllWindowedStream, DataStream, KeyedStream, WindowedStream} import org.apache.flink.streaming.api.windowing.assigners._ import org.apache.flink.streaming.api.windowing.windows.{Window => DataStreamWindow} import org.apache.flink.table.api.{StreamQueryConfig, StreamTableEnvironment, TableException} import org.apache.flink.table.calcite.FlinkRelBuilder.NamedWindowProperty import org.apache.flink.table.codegen.CodeGenerator import org.apache.flink.table.expressions.ExpressionUtils._ import org.apache.flink.table.plan.logical._ import org.apache.flink.table.plan.nodes.CommonAggregate import org.apache.flink.table.plan.schema.RowSchema import org.apache.flink.table.plan.nodes.datastream.DataStreamGroupWindowAggregate._ import org.apache.flink.table.plan.rules.datastream.DataStreamRetractionRules import org.apache.flink.table.runtime.aggregate.AggregateUtil._ import org.apache.flink.table.runtime.aggregate._ import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo} import org.apache.flink.table.typeutils.TypeCheckUtils.isTimeInterval import org.apache.flink.table.typeutils.{RowIntervalTypeInfo, TimeIntervalTypeInfo} class DataStreamGroupWindowAggregate( window: LogicalWindow, namedProperties: Seq[NamedWindowProperty], cluster: RelOptCluster, traitSet: RelTraitSet, inputNode: RelNode, namedAggregates: Seq[CalcitePair[AggregateCall, String]], schema: RowSchema, inputSchema: RowSchema, grouping: Array[Int]) extends SingleRel(cluster, traitSet, inputNode) with CommonAggregate with DataStreamRel { override def deriveRowType(): RelDataType = schema.logicalType override def needsUpdatesAsRetraction = true override def consumesRetractions = true def getGroupings: Array[Int] = grouping def getWindowProperties: Seq[NamedWindowProperty] = namedProperties override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = { new DataStreamGroupWindowAggregate( window, namedProperties, cluster, traitSet, inputs.get(0), namedAggregates, schema, inputSchema, grouping) } override def toString: String = { s"Aggregate(${ if (!grouping.isEmpty) { s"groupBy: (${groupingToString(inputSchema.logicalType, grouping)}), " } else { "" } }window: ($window), " + s"select: (${ aggregationToString( inputSchema.logicalType, grouping, getRowType, namedAggregates, namedProperties) }))" } override def explainTerms(pw: RelWriter): RelWriter = { super.explainTerms(pw) .itemIf("groupBy", groupingToString(inputSchema.logicalType, grouping), !grouping.isEmpty) .item("window", window) .item( "select", aggregationToString( inputSchema.logicalType, grouping, schema.logicalType, namedAggregates, namedProperties)) } override def translateToPlan( tableEnv: StreamTableEnvironment, queryConfig: StreamQueryConfig): DataStream[CRow] = { val inputDS = input.asInstanceOf[DataStreamRel].translateToPlan(tableEnv, queryConfig) val physicalNamedAggregates = namedAggregates.map { namedAggregate => new CalcitePair[AggregateCall, String]( inputSchema.mapAggregateCall(namedAggregate.left), namedAggregate.right) } val consumeRetraction = DataStreamRetractionRules.isAccRetract(input) if (consumeRetraction) { throw new TableException( "Retraction on windowed GroupBy aggregation is not supported yet. " + "Note: Windowed GroupBy aggregation should not follow a " + "non-windowed GroupBy aggregation.") } val outRowType = CRowTypeInfo(schema.physicalTypeInfo) val aggString = aggregationToString( inputSchema.logicalType, grouping, schema.logicalType, namedAggregates, namedProperties) val keyedAggOpName = s"groupBy: (${groupingToString(inputSchema.logicalType, grouping)}), " + s"window: ($window), " + s"select: ($aggString)" val nonKeyedAggOpName = s"window: ($window), select: ($aggString)" val generator = new CodeGenerator( tableEnv.getConfig, false, inputSchema.physicalTypeInfo) val needMerge = window match { case SessionGroupWindow(_, _, _) => true case _ => false } val physicalGrouping = grouping.map(inputSchema.mapIndex) // grouped / keyed aggregation if (physicalGrouping.length > 0) { val windowFunction = AggregateUtil.createAggregationGroupWindowFunction( window, physicalGrouping.length, physicalNamedAggregates.size, schema.physicalArity, namedProperties) val keyedStream = inputDS.keyBy(physicalGrouping: _*) val windowedStream = createKeyedWindowedStream(window, keyedStream) .asInstanceOf[WindowedStream[CRow, Tuple, DataStreamWindow]] val (aggFunction, accumulatorRowType, aggResultRowType) = AggregateUtil.createDataStreamAggregateFunction( generator, physicalNamedAggregates, inputSchema.physicalType, inputSchema.physicalFieldTypeInfo, schema.physicalType, physicalGrouping, needMerge) windowedStream .aggregate(aggFunction, windowFunction, accumulatorRowType, aggResultRowType, outRowType) .name(keyedAggOpName) } // global / non-keyed aggregation else { val windowFunction = AggregateUtil.createAggregationAllWindowFunction( window, schema.physicalArity, namedProperties) val windowedStream = createNonKeyedWindowedStream(window, inputDS) .asInstanceOf[AllWindowedStream[CRow, DataStreamWindow]] val (aggFunction, accumulatorRowType, aggResultRowType) = AggregateUtil.createDataStreamAggregateFunction( generator, physicalNamedAggregates, inputSchema.physicalType, inputSchema.physicalFieldTypeInfo, schema.physicalType, Array[Int](), needMerge) windowedStream .aggregate(aggFunction, windowFunction, accumulatorRowType, aggResultRowType, outRowType) .name(nonKeyedAggOpName) } } } object DataStreamGroupWindowAggregate { private def createKeyedWindowedStream( groupWindow: LogicalWindow, stream: KeyedStream[CRow, Tuple]): WindowedStream[CRow, Tuple, _ <: DataStreamWindow] = groupWindow match { case TumblingGroupWindow(_, timeField, size) if isProctimeAttribute(timeField) && isTimeIntervalLiteral(size)=> stream.window(TumblingProcessingTimeWindows.of(toTime(size))) case TumblingGroupWindow(_, timeField, size) if isProctimeAttribute(timeField) && isRowCountLiteral(size)=> stream.countWindow(toLong(size)) case TumblingGroupWindow(_, timeField, size) if isRowtimeAttribute(timeField) && isTimeIntervalLiteral(size) => stream.window(TumblingEventTimeWindows.of(toTime(size))) case TumblingGroupWindow(_, _, size) => // TODO: EventTimeTumblingGroupWindow should sort the stream on event time // before applying the windowing logic. Otherwise, this would be the same as a // ProcessingTimeTumblingGroupWindow throw new UnsupportedOperationException( "Event-time grouping windows on row intervals are currently not supported.") case SlidingGroupWindow(_, timeField, size, slide) if isProctimeAttribute(timeField) && isTimeIntervalLiteral(slide) => stream.window(SlidingProcessingTimeWindows.of(toTime(size), toTime(slide))) case SlidingGroupWindow(_, timeField, size, slide) if isProctimeAttribute(timeField) && isRowCountLiteral(size) => stream.countWindow(toLong(size), toLong(slide)) case SlidingGroupWindow(_, timeField, size, slide) if isRowtimeAttribute(timeField) && isTimeIntervalLiteral(size)=> stream.window(SlidingEventTimeWindows.of(toTime(size), toTime(slide))) case SlidingGroupWindow(_, _, size, slide) => // TODO: EventTimeTumblingGroupWindow should sort the stream on event time // before applying the windowing logic. Otherwise, this would be the same as a // ProcessingTimeTumblingGroupWindow throw new UnsupportedOperationException( "Event-time grouping windows on row intervals are currently not supported.") case SessionGroupWindow(_, timeField, gap) if isProctimeAttribute(timeField) => stream.window(ProcessingTimeSessionWindows.withGap(toTime(gap))) case SessionGroupWindow(_, timeField, gap) if isRowtimeAttribute(timeField) => stream.window(EventTimeSessionWindows.withGap(toTime(gap))) } private def createNonKeyedWindowedStream( groupWindow: LogicalWindow, stream: DataStream[CRow]): AllWindowedStream[CRow, _ <: DataStreamWindow] = groupWindow match { case TumblingGroupWindow(_, timeField, size) if isProctimeAttribute(timeField) && isTimeIntervalLiteral(size) => stream.windowAll(TumblingProcessingTimeWindows.of(toTime(size))) case TumblingGroupWindow(_, timeField, size) if isProctimeAttribute(timeField) && isRowCountLiteral(size)=> stream.countWindowAll(toLong(size)) case TumblingGroupWindow(_, _, size) if isTimeInterval(size.resultType) => stream.windowAll(TumblingEventTimeWindows.of(toTime(size))) case TumblingGroupWindow(_, _, size) => // TODO: EventTimeTumblingGroupWindow should sort the stream on event time // before applying the windowing logic. Otherwise, this would be the same as a // ProcessingTimeTumblingGroupWindow throw new UnsupportedOperationException( "Event-time grouping windows on row intervals are currently not supported.") case SlidingGroupWindow(_, timeField, size, slide) if isProctimeAttribute(timeField) && isTimeIntervalLiteral(size) => stream.windowAll(SlidingProcessingTimeWindows.of(toTime(size), toTime(slide))) case SlidingGroupWindow(_, timeField, size, slide) if isProctimeAttribute(timeField) && isRowCountLiteral(size)=> stream.countWindowAll(toLong(size), toLong(slide)) case SlidingGroupWindow(_, timeField, size, slide) if isRowtimeAttribute(timeField) && isTimeIntervalLiteral(size)=> stream.windowAll(SlidingEventTimeWindows.of(toTime(size), toTime(slide))) case SlidingGroupWindow(_, _, size, slide) => // TODO: EventTimeTumblingGroupWindow should sort the stream on event time // before applying the windowing logic. Otherwise, this would be the same as a // ProcessingTimeTumblingGroupWindow throw new UnsupportedOperationException( "Event-time grouping windows on row intervals are currently not supported.") case SessionGroupWindow(_, timeField, gap) if isProctimeAttribute(timeField) && isTimeIntervalLiteral(gap) => stream.windowAll(ProcessingTimeSessionWindows.withGap(toTime(gap))) case SessionGroupWindow(_, timeField, gap) if isRowtimeAttribute(timeField) && isTimeIntervalLiteral(gap) => stream.windowAll(EventTimeSessionWindows.withGap(toTime(gap))) } }
fanyon/flink
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/DataStreamGroupWindowAggregate.scala
Scala
apache-2.0
12,466
import org.apache.spark.{SparkConf, SparkContext} object TestSpark { def main(args: Array[String]) { val sc = new SparkContext(new SparkConf() .setAppName("TestSpark") .set("spark.executor.memory", "512M") .set("spark.mesos.mesosExecutor.cores", "1") .set("spark.speculation", "true") .set("spark.mesos.coarse", "false") ) val sqlContext = new org.apache.spark.sql.SQLContext(sc) sqlContext.sql("SELECT 1") } }
saagie/platform-test
job-spark/src/main/scala/TestSpark.scala
Scala
gpl-2.0
465
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.spark import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext} import org.apache.spark.sql.sources.{CreatableRelationProvider, BaseRelation, RelationProvider} import org.apache.phoenix.spark._ class DefaultSource extends RelationProvider with CreatableRelationProvider { // Override 'RelationProvider.createRelation', this enables DataFrame.load() override def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation = { verifyParameters(parameters) new PhoenixRelation( parameters("table"), parameters("zkUrl"), parameters.contains("dateAsTimestamp") )(sqlContext) } // Override 'CreatableRelationProvider.createRelation', this enables DataFrame.save() override def createRelation(sqlContext: SQLContext, mode: SaveMode, parameters: Map[String, String], data: DataFrame): BaseRelation = { if (!mode.equals(SaveMode.Overwrite)) { throw new Exception("SaveMode other than SaveMode.OverWrite is not supported") } verifyParameters(parameters) // Save the DataFrame to Phoenix data.saveToPhoenix(parameters("table"), zkUrl = parameters.get("zkUrl")) // Return a relation of the saved data createRelation(sqlContext, parameters) } // Ensure the required parameters are present def verifyParameters(parameters: Map[String, String]): Unit = { if (parameters.get("table").isEmpty) throw new RuntimeException("No Phoenix 'table' option defined") if (parameters.get("zkUrl").isEmpty) throw new RuntimeException("No Phoenix 'zkUrl' option defined") } }
RCheungIT/phoenix
phoenix-spark/src/main/scala/org/apache/phoenix/spark/DefaultSource.scala
Scala
apache-2.0
2,436
package tu.coreservice.action.way2think /** * Test class for Simulation. * @author max talanov * date 2012-05-28 * time: 11:38 PM */ import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.FunSuite import simulation.Simulation import tu.model.knowledge.domain.{Concept, ConceptNetwork} import tu.model.knowledge.KnowledgeURI import tu.coreservice.utilities.TestDataGenerator @RunWith(classOf[JUnitRunner]) class SimulationTest extends FunSuite { test("test Ok") { assert(condition = true) } test("Simulation exact match should work") { val sim = new Simulation() val res: Option[ConceptNetwork] = sim(TestDataGenerator.generateProblemDescriptionAnnotatedNarrative, TestDataGenerator.generateDomainModelConceptNetwork) // check concepts val concepts: List[Concept] = TestDataGenerator.generateProblemDescriptionAnnotatedNarrative.concepts res match { case Some(instanceNetwork: ConceptNetwork) => { val checkedNodes = instanceNetwork.nodes.filter( (c: Concept) => { c.generalisations.frames.exists { uriConceptPair: Pair[KnowledgeURI, Concept] => { concepts.contains(uriConceptPair._2) } } } ) assert(checkedNodes.size > 0) } case None => assert(false) } } test("Simulation ambiguious references should work") { val sim = new Simulation() val res: Option[ConceptNetwork] = sim(TestDataGenerator.generateProblemDescriptionAnnotatedNarrativeAmbiguous, TestDataGenerator.generateDomainModelConceptNetwork) // check concepts val concepts: List[Concept] = TestDataGenerator.generateProblemDescriptionAnnotatedNarrative.concepts res match { case Some(instanceNetwork: ConceptNetwork) => { val checkedNodes = instanceNetwork.nodes.filter( (c: Concept) => { c.generalisations.frames.exists { uriConceptPair: Pair[KnowledgeURI, Concept] => { concepts.contains(uriConceptPair._2) } } } ) assert(checkedNodes.size > 0) } case None => assert(false) } } }
keskival/2
coreservice.action.way2think/src/test/scala/tu/coreservice/action/way2think/SimulationTest.scala
Scala
gpl-3.0
2,245
package org.scalaide.core package quickassist import org.eclipse.jdt.ui.text.java.IJavaCompletionProposal import org.junit.AfterClass import org.junit.Assert import org.junit.BeforeClass import org.junit.Test import testsetup.SDTTestUtils import scala.util.control.Exception import org.scalaide.core.internal.quickassist.abstractimpl.ImplAbstractMembers import org.scalaide.core.internal.quickassist.abstractimpl.AbstractMemberProposal object ImplAbstractMemberTest extends QuickAssistTest { @BeforeClass def createProject() = create("assist") @AfterClass def deleteProject() = delete() } /** This test suite requires the UI. */ class ImplAbstractMemberTest extends QuickAssistTestHelper { import ImplAbstractMemberTest._ val quickAssist = new ImplAbstractMembers def createSource(packageName: String, unitName: String)(contents: String) = createSourceFile(packageName, unitName)(contents) def assistsFor(contents: String, expected: String): Unit = runQuickAssistWith(contents) { p => Assert.assertTrue("Abstract member not found", p.nonEmpty) val displayString = p.head.getDisplayString() Assert.assertEquals("Changes unexpected", expected, displayString) } def assistsNumFor(contents: String, expected: Int) = { val unit = createSource("test", "Test.scala")(contents.filterNot(_ == '^')) try { val Seq(pos) = SDTTestUtils.positionsOf(contents.toCharArray(), "^") val proposals = quickAssist.compute(InvocationContext(unit, pos, 0, Nil)) Assert.assertTrue("Abstract member not found", proposals.nonEmpty) val abstractNum = proposals.filter(_.isInstanceOf[AbstractMemberProposal]).size Assert.assertEquals("Incorrect num", expected, abstractNum) } finally unit.delete(true, null) } @Test def assistAbstrDef(): Unit = { assistsFor(""" trait TestTrait { def foo: Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo(): Int'") } @Test def assistAbstrVal(): Unit = { assistsFor(""" trait TestTrait { val foo: Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement val 'foo(): Int'") } @Test def assistAbstrVar(): Unit = { assistsFor(""" trait TestTrait { var foo: Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement var 'foo(): Int'") } @Test def assistAbstrDefWithParams1(): Unit = { assistsFor(""" trait TestTrait { def foo(x: Double): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo(Double): Int'") } @Test def assistAbstrDefWithParams2(): Unit = { assistsFor(""" trait TestTrait { def foo(x: Double, y: Map[Int, Float]): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo(Double, Map[Int,Float]): Int'") } @Test def assistAbstrDefWithParams3(): Unit = { assistsFor(""" trait TestTrait { def foo(x: Double, y: Int)(z: Int): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo(Double, Int)(Int): Int'") } @Test def assistAbstrDefWithParams4(): Unit = { assistsFor(""" trait TestTrait { def foo(x: Double, y: Int)(w: Int)(z: String): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo(Double, Int)(Int)(String): Int'") } @Test def assistAbstrDefWithParams5(): Unit = { assistsFor(""" trait TestTrait { def foo(x: Double, y: Int)()(z: String): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo(Double, Int)()(String): Int'") } @Test def assistAbstrDefWithTypeParams1(): Unit = { assistsFor(""" trait TestTrait { def foo[T](y: Int): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo[T](Int): Int'") } @Test def assistAbstrDefWithTypeParams2(): Unit = { assistsFor(""" trait TestTrait { def foo[X, Y, Z](y: Int): Int } class Test extends TestTrait { ^ } """.stripMargin, "Implement def 'foo[X,Y,Z](Int): Int'") } @Test def assistNoAbstrDef(): Unit = { noAssistsFor(""" class Test extends TestTrait { ^ } """.stripMargin) } def testEnv(keyw: String, method: String = "") = s""" abstract class AbstrClass { val foo1: Int var foo2: Double def foo3: Double } trait TestTrait1 { def test1[X](y: Int): Int def test2(x: Int): Double def test3: Float } trait TestTrait2 extends TestTrait1 { def test3 = 1.0F val bar1: List[Int] var bar2: Int def bar3: Double } $keyw Test extends AbstrClass with TestTrait2 { $method ^ } """.stripMargin @Test def assistAbstrDefNumInClass(): Unit = { assistsNumFor(testEnv("class"), 8) } @Test def assistAbstrDefNumInAbstrClass(): Unit = { assistsNumFor(testEnv("abstract class"), 8) } @Test def assistAbstrDefNumInTrait(): Unit = { assistsNumFor(testEnv("trait"), 8) } @Test def assistAbstrDefNumInClassWithMethod(): Unit = { assistsNumFor(testEnv("class", "def foo3 = 42.0"), 7) } @Test def assistAbstrDefNumInAbstrClassWithMethod(): Unit = { assistsNumFor(testEnv("abstract class", "def foo3 = 42.0"), 7) } @Test def assistAbstrDefNumInTraitWithMethod(): Unit = { assistsNumFor(testEnv("trait", "def foo3 = 42.0"), 7) } }
Kwestor/scala-ide
org.scala-ide.sdt.core.tests/src/org/scalaide/core/quickassist/ImplAbstractMemberTest.scala
Scala
bsd-3-clause
5,901
package com.mpakhomov.actors import java.net.InetSocketAddress import java.text.DecimalFormat import java.time.format.DateTimeFormatter import akka.actor.{Actor, ActorLogging, ActorRef, Props} import akka.io.{IO, Tcp} import akka.util.{ByteString, Timeout} import com.mpakhomov.actors.CandlestickAggregatorActor.{GetDataForLastMinute, GetDataForLastNMinutes} import com.mpakhomov.model.Candlestick import scala.collection.mutable import scala.concurrent.Await import scala.concurrent.duration._ class ServerActor(val addr: InetSocketAddress, val candlestickAggregatorActor: ActorRef) extends Actor with ActorLogging { import ServerActor._ import akka.io.Tcp._ import akka.pattern.ask import context.system // a set of connected clients val clients = mutable.Set[ActorRef]() // for ask `?` syntax and futures implicit val timeout = Timeout(1.minutes) IO(Tcp) ! Bind(self, addr) def receive = { case CommandFailed(b: Bind) => log.error("CommandFailed") context stop self case c @ Connected(remote, local) => handleNewConnection(sender()) case SendDataForLastMinute => sendDataForLastMinute() case c: ConnectionClosed => handleClientDisconnected(sender()) } def handleNewConnection(connection: ActorRef): Unit = { val connection = sender() connection ! Register(self) clients += connection log.info("New client connected. Sending data for the last 10 minutes") val future = (candlestickAggregatorActor ? GetDataForLastNMinutes).mapTo[Seq[Candlestick]] val data = Await.result(future, timeout.duration) val jsonStr = buildJsonStr(data) log.info(s"Sending to the client:\n$jsonStr") connection ! Write(ByteString(jsonStr)) } def sendDataForLastMinute(): Unit = { val future = (candlestickAggregatorActor ? GetDataForLastMinute).mapTo[Seq[Candlestick]] val data = Await.result(future, timeout.duration) val jsonStr = buildJsonStr(data) log.info(s"Sending to the clients:\n$jsonStr") for (m <- clients) m ! Write(ByteString(jsonStr)) } def handleClientDisconnected(connection: ActorRef): Unit = { log.info("Client disconnected") clients.remove(connection) } } object ServerActor { // messages case object SendDataForLastMinute def props(addr: InetSocketAddress, candlestickAggregator: ActorRef) = Props(classOf[ServerActor], addr, candlestickAggregator) def candleStick2Json(c: Candlestick): String = { // datetime format looks similar to ISO_INSTANT, but not exactly the same // to https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html val dt = c.timestamp.toLocalDateTime val dtf1 = DateTimeFormatter.ofPattern("yyyy-MM-dd") val dtf2 = DateTimeFormatter.ofPattern("HH:mm") val timestamp = dt.format(dtf1) + "T" + dt.format(dtf2) + ":00Z" // just in case: format doubles exactly at it is in the spec val formatter = new DecimalFormat("#.#") val open = formatter.format(c.open) val low = formatter.format(c.low) val high = formatter.format(c.high) val close = formatter.format(c.close) s"""{ "ticker": "${c.ticker}", "timestamp": "${timestamp}", "open": ${open}, "high": ${high},""" + s""" "low": ${low}, "close": ${close}, "volume": ${c.volume} }""" } // I extracted it to a function, so that it's easier to verify in my integration testing def buildJsonStr(data: Seq[Candlestick]): String = data.map(candleStick2Json(_)).mkString("\n") }
mpakhomov/akka-io-demo
src/main/scala/com/mpakhomov/actors/ServerActor.scala
Scala
apache-2.0
3,480
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package config import akka.actor.ActorSystem import com.google.inject.ImplementedBy import com.google.inject.name.Named import com.typesafe.config.Config import javax.inject.{ Inject, Singleton } import play.api.Configuration import play.api.libs.ws.WSClient import uk.gov.hmrc.http._ import uk.gov.hmrc.play.audit.http.HttpAuditing import uk.gov.hmrc.play.audit.http.connector.AuditConnector import uk.gov.hmrc.play.http.ws._ @ImplementedBy(classOf[HttpVerbs]) trait WSHttpT extends HttpGet with HttpPut with HttpPost with HttpDelete with HttpPatch with WSHttp @Singleton class HttpVerbs @Inject() (@Named("appName") val appName: String, val auditConnector: AuditConnector, val wsClient: WSClient, val actorSystem: ActorSystem, config: Configuration) extends WSHttpT with HttpAuditing { override val hooks = Seq(AuditingHook) override def configuration: Config = config.underlying }
hmrc/fset-faststream
app/config/microserviceWiring.scala
Scala
apache-2.0
1,525
package com.gx.mediator import java.io.ByteArrayOutputStream import org.scalatest.{FlatSpec, Matchers} /** * Copyright 2018 josephguan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ class MediatorSpec extends FlatSpec with Matchers { it should "receive message from other countries" in { val china = new China val usa = new USA val canada = new Canada // united nations organization val united = new UnitedNations united.addMember(china) united.addMember(usa) united.addMember(canada) val buffer = new ByteArrayOutputStream() Console.withOut(buffer) { china.declare("Hello World") } val msg = buffer.toString.split("\r\n") msg(1) should be("USA received: 'Hello World'") msg(2) should be("Canada received: 'Hello World'") } }
josephguan/scala-design-patterns
behavioral/mediator/src/test/scala/com/gx/mediator/MediatorSpec.scala
Scala
apache-2.0
1,333
package scalabpe.plugin.cache import java.net.InetSocketAddress import java.util.LinkedList import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.Executors import java.util.concurrent.ThreadPoolExecutor import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.locks.ReentrantLock import scala.collection.mutable.ArrayBuffer import org.jboss.netty.bootstrap.ClientBootstrap import org.jboss.netty.buffer.ChannelBuffer import org.jboss.netty.channel.Channel import org.jboss.netty.channel.ChannelFuture import org.jboss.netty.channel.ChannelFutureListener import org.jboss.netty.channel.ChannelHandlerContext import org.jboss.netty.channel.ChannelPipeline import org.jboss.netty.channel.ChannelPipelineFactory import org.jboss.netty.channel.ChannelStateEvent import org.jboss.netty.channel.Channels import org.jboss.netty.channel.ExceptionEvent import org.jboss.netty.channel.MessageEvent import org.jboss.netty.channel.group.DefaultChannelGroup import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory import org.jboss.netty.handler.timeout.IdleStateAwareChannelHandler import org.jboss.netty.handler.timeout.IdleStateEvent import org.jboss.netty.handler.timeout.IdleStateHandler import org.jboss.netty.util.HashedWheelTimer import org.jboss.netty.util.ThreadNameDeterminer import org.jboss.netty.util.ThreadRenamingRunnable import org.jboss.netty.util.Timeout import org.jboss.netty.util.TimerTask import scalabpe.core.ArrayBufferInt import scalabpe.core.Dumpable import scalabpe.core.Logging import scalabpe.core.NamedThreadFactory import scalabpe.core.QuickTimer import scalabpe.core.QuickTimerEngine import scalabpe.core.SelfCheckResult object RedisNettyClient4Cluster { val count = new AtomicInteger(1) } class RedisNettyClient4Cluster( val soc: RedisSoc4Cluster, val addrstr: String, val connectTimeout: Int = 15000, val pingInterval: Int = 60000, val timerInterval: Int = 100, val reconnectInterval: Int = 1, val reuseAddress: Boolean = false) extends Logging with Dumpable { var bossThreadFactory: NamedThreadFactory = _ var workThreadFactory: NamedThreadFactory = _ var timerThreadFactory: NamedThreadFactory = _ var factory: NioClientSocketChannelFactory = _ var bootstrap: ClientBootstrap = _ var channelHandler: ChannelHandler = _ var bossExecutor: ThreadPoolExecutor = _ var workerExecutor: ThreadPoolExecutor = _ var timer: HashedWheelTimer = _ var qte: QuickTimerEngine = _ val slots = new Array[SlotInfo](16384) // 每个slot对应的地址 val slotsLock = new ReentrantLock(false) val conns = new ConcurrentHashMap[String, ConnInfo] // 每个地址对应的连接 val dataMap = new ConcurrentHashMap[Int, TimeoutInfo]() val connected = new AtomicBoolean() val shutdown = new AtomicBoolean() init def dump() { log.info("--- addrstr=" + addrstr) val buff = new StringBuilder buff.append("timer.threads=").append(1).append(",") buff.append("bossExecutor.getPoolSize=").append(bossExecutor.getPoolSize).append(",") buff.append("bossExecutor.getQueue.size=").append(bossExecutor.getQueue.size).append(",") buff.append("workerExecutor.getPoolSize=").append(workerExecutor.getPoolSize).append(",") buff.append("workerExecutor.getQueue.size=").append(workerExecutor.getQueue.size).append(",") buff.append("channels.size=").append(conns.size).append(",") buff.append("connectedCount=").append(connectedCount()).append(",") buff.append("dataMap.size=").append(dataMap.size).append(",") log.info(buff.toString) qte.dump() } def init(): Unit = { channelHandler = new ChannelHandler(this) // without this line, the thread name of netty will not be changed ThreadRenamingRunnable.setThreadNameDeterminer(ThreadNameDeterminer.CURRENT); // or PROPOSED bossThreadFactory = new NamedThreadFactory("redisboss" + RedisNettyClient4Cluster.count.getAndIncrement()) bossExecutor = Executors.newCachedThreadPool(bossThreadFactory).asInstanceOf[ThreadPoolExecutor] workThreadFactory = new NamedThreadFactory("rediswork" + RedisNettyClient4Cluster.count.getAndIncrement()) workerExecutor = Executors.newCachedThreadPool(workThreadFactory).asInstanceOf[ThreadPoolExecutor] timerThreadFactory = new NamedThreadFactory("redistimer" + RedisNettyClient4Cluster.count.getAndIncrement()) timer = new HashedWheelTimer(timerThreadFactory, 1, TimeUnit.SECONDS) qte = new QuickTimerEngine(onTimeout, timerInterval) factory = new NioClientSocketChannelFactory(bossExecutor, workerExecutor) bootstrap = new ClientBootstrap(factory); bootstrap.setPipelineFactory(new PipelineFactory()); bootstrap.setOption("tcpNoDelay", true); bootstrap.setOption("keepAlive", true); bootstrap.setOption("connectTimeoutMillis", connectTimeout); if (reuseAddress) bootstrap.setOption("reuseAddress", true); else bootstrap.setOption("reuseAddress", false); val addrs = addrstr.split(",") for (i <- 0 until addrs.size) { val connInfo = new ConnInfo(addrs(i)) conns.put(connInfo.addr, connInfo) var ss = connInfo.addr.split(":") var host = ss(0) var port = ss(1).toInt val future = bootstrap.connect(new InetSocketAddress(host, port)) future.addListener(new ChannelFutureListener() { def operationComplete(future: ChannelFuture) { onConnectCompleted(future, connInfo) } }) } val maxWait = connectTimeout.min(5000) val now = System.currentTimeMillis var t = 0L while (!connected.get() && (t - now) < maxWait) { Thread.sleep(50) t = System.currentTimeMillis } val first = getFirstConn() if (first != null) { slots(0) = new SlotInfo(first) // 设一个默认地址,避免无法提供服务 } log.info("nettyredisclient4cluster started, {}, connected={}", addrstr, connected.get()) } def getFirstConn(): String = { val addrs = addrstr.split(",") for (i <- 0 until addrs.size) { val connInfo = conns.get(addrs(i)) if (connInfo != null) { connInfo.lock.lock() try { if (connInfo.ch != null) { return connInfo.addr } } finally { connInfo.lock.unlock() } } } null } def close(): Unit = { shutdown.set(true) if (factory != null) { log.info("stopping nettyredisclient4cluster {}", addrstr) timer.stop() timer = null val allChannels = new DefaultChannelGroup("netty-client-redis-scala") val it = conns.values().iterator() while (it.hasNext()) { val connInfo = it.next() connInfo.lock.lock() try { if (connInfo.ch != null && connInfo.ch.isOpen) { allChannels.add(connInfo.ch) } } finally { connInfo.lock.unlock() } } val future = allChannels.close() future.awaitUninterruptibly() factory.releaseExternalResources() factory = null } qte.close() log.info("nettyredisclient4cluster stopped {}", addrstr) } def selfcheck(): ArrayBuffer[SelfCheckResult] = { val buff = new ArrayBuffer[SelfCheckResult]() var errorId = 65301001 val it = conns.values().iterator() while (it.hasNext()) { val connInfo = it.next() connInfo.lock.lock() try { if (connInfo.ch == null) { val msg = "sos [" + connInfo.addr + "] has error" buff += new SelfCheckResult("SCALABPE.REDIS", errorId, true, msg) } } finally { connInfo.lock.unlock() } } if (buff.size == 0) { buff += new SelfCheckResult("SCALABPE.REDIS", errorId) } buff } def reconnect(connInfo: ConnInfo) { // 超过5分钟并且不在slot的地址里的,删除连接不再重试 connInfo.lock.lock() try { val now = System.currentTimeMillis if (now - connInfo.disconnected >= 5 * 60 * 1000) { if (!checkAddrValid(connInfo.addr)) { log.warn("addr is not valid after 5 minutes, remove it, addr={}", connInfo.addr) conns.remove(connInfo.addr) return } } } finally { connInfo.lock.unlock() } var ss = connInfo.addr.split(":") var host = ss(0) var port = ss(1).toInt log.info("reconnect called, addr={}", connInfo.addr) val future = bootstrap.connect(new InetSocketAddress(host, port)) future.addListener(new ChannelFutureListener() { def operationComplete(future: ChannelFuture) { onConnectCompleted(future, connInfo) } }) } def onConnectCompleted(f: ChannelFuture, connInfo: ConnInfo): Unit = { if (f.isCancelled()) { log.error("connect cancelled, addr=%s".format(connInfo.addr)) if (timer != null) { // while shutdowning timer.newTimeout(new TimerTask() { def run(timeout: Timeout) { reconnect(connInfo) } }, reconnectInterval, TimeUnit.SECONDS) } } else if (!f.isSuccess()) { log.error("connect failed, addr=%s,e=%s".format(connInfo.addr, f.getCause.getMessage)) if (timer != null) { // while shutdowning timer.newTimeout(new TimerTask() { def run(timeout: Timeout) { reconnect(connInfo) } }, reconnectInterval, TimeUnit.SECONDS) } } else { val ch = f.getChannel log.info("connect ok, addr=%s,channelId=%s,clientAddr=%s".format(connInfo.addr, ch.getId, ch.getLocalAddress.toString)) connInfo.lock.lock() try { if (connInfo.ch == null) { val theConnId = parseIpPort(ch.getRemoteAddress.toString) + ":" + ch.getId connInfo.ch = ch connInfo.connId = theConnId connInfo.seqs.clear() connInfo.disconnected = 0 } } finally { connInfo.lock.unlock() } if (connectedCount() >= addrstr.split(",").size) { connected.set(true) } } } def connectedCount(): Int = { var cnt = 0 val it = conns.values().iterator() while (it.hasNext()) { val connInfo = it.next() connInfo.lock.lock() try { if (connInfo.ch != null) { cnt += 1 } } finally { connInfo.lock.unlock() } } cnt } def checkAddrValid(addr: String): Boolean = { slotsLock.lock() try { var i = 0 while (i < slots.length) { val v = slots(i) if (v.master == addr) return true i += 1 } return false } finally { slotsLock.unlock() } } def getAddr(slot: Int): String = { if (slot < 0 || slot >= slots.length) return null slotsLock.lock() try { val v = slots(slot) if (v != null) return v.master var i = 0 while (i < slots.length) { // 找第一个有地址的slot用来发送请求 val v2 = slots(i) if (v2 != null) return v2.master i += 1 } return null } finally { slotsLock.unlock() } } def changeSlotAddr(min: Int, max: Int, addr: String) { for (i <- min to max) { changeSlotAddr(i, addr, wait = false) } } def changeSlotAddr(slot: Int, addr: String, wait: Boolean = true) { if (slot < 0 || slot >= slots.length) return slotsLock.lock() try { val v = slots(slot) if (v != null && v.master == addr) return slots(slot) = new SlotInfo(addr) val connInfo = conns.get(addr) if (connInfo != null) return } finally { slotsLock.unlock() } val connInfo = new ConnInfo(addr) conns.put(addr, connInfo) val ss = addr.split(":") val host = ss(0) val port = ss(1).toInt val future = bootstrap.connect(new InetSocketAddress(host, port)) val finished = new AtomicBoolean(false) future.addListener(new ChannelFutureListener() { def operationComplete(future: ChannelFuture) { onConnectCompleted(future, connInfo) finished.set(true) } }) // 最多等2秒, 不能用future.await, future await返回了不表示onConnectCompleted执行完了 val now = System.currentTimeMillis var t = 0L while (!finished.get() && (t - now) < 2000) { Thread.sleep(50) t = System.currentTimeMillis } } def sendByAddr(sequence: Int, buff: ChannelBuffer, timeout: Int, addr: String, asking: Boolean = false, hasReply: Boolean = true): Boolean = { val connInfo = conns.get(addr) if (connInfo == null) return false connInfo.lock.lock() try { if (connInfo.ch == null) return false if (!connInfo.ch.isOpen) { connInfo.ch = null connInfo.connId = null connInfo.seqs.clear() connInfo.disconnected = System.currentTimeMillis log.error("channel not opened, connId={}", connInfo.connId) return false } if (asking) { val (asking_sequence, asking_buff) = soc.generateAsking() val t = qte.newTimer(timeout, asking_sequence) val ti = new TimeoutInfo(asking_sequence, connInfo.connId, t) dataMap.put(asking_sequence, ti) if (hasReply) connInfo.seqs.offer(asking_sequence) connInfo.ch.write(asking_buff); } val t = qte.newTimer(timeout, sequence) val ti = new TimeoutInfo(sequence, connInfo.connId, t) dataMap.put(sequence, ti) if (hasReply) connInfo.seqs.offer(sequence) connInfo.ch.write(buff); return true } finally { connInfo.lock.unlock() } } def sendBySlot(sequence: Int, buff: ChannelBuffer, timeout: Int, slot: Int, hasReply: Boolean = true): Boolean = { val addr = getAddr(slot) if (addr == null) return false sendByAddr(sequence, buff, timeout, addr, asking = false, hasReply = hasReply) } def removeChannel(connId: String): Unit = { if (shutdown.get()) { return } val addr = parseAddrFromConnId(connId) val connInfo = conns.get(addr) if (connInfo == null) return connInfo.lock.lock() try { connInfo.ch = null connInfo.connId = null connInfo.seqs.clear() connInfo.disconnected = System.currentTimeMillis } finally { connInfo.lock.unlock() } timer.newTimeout(new TimerTask() { def run(timeout: Timeout) { reconnect(connInfo) } }, reconnectInterval, TimeUnit.SECONDS) } def onTimeout(data: Any): Unit = { val sequence = data.asInstanceOf[Int] val ti = dataMap.remove(sequence) if (ti != null) { soc.timeoutError(sequence, ti.connId) } else { //log.error("timeout but sequence not found, seq={}",sequence) } } def getSequenceFromQueue(connId: String): Tuple2[Boolean, Int] = { val addr = parseAddrFromConnId(connId) val connInfo = conns.get(addr) if (connInfo == null) return (false, 0) connInfo.lock.lock() try { val seq = connInfo.seqs.poll() return new Tuple2(true, seq) } catch { case e: Throwable => return new Tuple2(false, 0) } finally { connInfo.lock.unlock() } } def onReceive(buff: ChannelBuffer, connId: String): Unit = { val (ok, sequence) = getSequenceFromQueue(connId) if (ok) { val ti = dataMap.remove(sequence) if (ti != null) { ti.timer.cancel() } else { //log.warn("receive but sequence not found, seq={}",sequence) } try { soc.receive(sequence, buff, connId) } catch { case e: Throwable => log.error("exception in receive, e=" + e.getMessage, e) throw e } } } def onNetworkError(connId: String): Unit = { removeChannel(connId) val seqs = new ArrayBufferInt() val i = dataMap.values().iterator while (i.hasNext()) { val info = i.next() if (info.connId == connId) { seqs += info.sequence } } for (sequence <- seqs) { val ti = dataMap.remove(sequence) if (ti != null) { ti.timer.cancel() soc.networkError(sequence, connId) } else { //log.error("network error but sequence not found, seq={}",sequence) } } } def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent): Unit = { val ch = e.getChannel val connId = ctx.getAttachment().asInstanceOf[String] val buf = e.getMessage().asInstanceOf[ChannelBuffer] onReceive(buf, connId) } def channelConnected(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = { val ch = e.getChannel val connId = parseIpPort(ch.getRemoteAddress.toString) + ":" + ch.getId ctx.setAttachment(connId); } def channelDisconnected(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = { val ch = e.getChannel val connId = ctx.getAttachment().asInstanceOf[String] onNetworkError(connId) log.info("channelDisconnected id={}", connId) } def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent): Unit = { val ch = e.getChannel val connId = ctx.getAttachment().asInstanceOf[String]; log.error("exceptionCaught connId={},e={}", connId, e) if (ch.isOpen) ch.close() } def channelIdle(ctx: ChannelHandlerContext, e: IdleStateEvent): Unit = { val ch = e.getChannel val connId = ctx.getAttachment().asInstanceOf[String]; val addr = parseAddrFromConnId(connId) val (sequence, buff) = soc.generatePing() val connInfo = conns.get(addr) if (connInfo == null) return connInfo.lock.lock() try { if (connInfo.ch == null) return connInfo.seqs.offer(sequence) connInfo.ch.write(buff); } finally { connInfo.lock.unlock() } } def parseIpPort(s: String): String = { val p = s.indexOf("/") if (p >= 0) s.substring(p + 1) else s } def parseAddrFromConnId(s: String): String = { val p = s.lastIndexOf(":") if (p >= 0) s.substring(0, p) else s } class PipelineFactory extends Object with ChannelPipelineFactory { def getPipeline(): ChannelPipeline = { val pipeline = Channels.pipeline(); pipeline.addLast("timeout", new IdleStateHandler(timer, 0, 0, pingInterval / 1000)); pipeline.addLast("decoder", new RedisFrameDecoder()); pipeline.addLast("handler", channelHandler); pipeline; } } class ChannelHandler(val client: RedisNettyClient4Cluster) extends IdleStateAwareChannelHandler with Logging { override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent): Unit = { client.messageReceived(ctx, e) } override def channelIdle(ctx: ChannelHandlerContext, e: IdleStateEvent): Unit = { client.channelIdle(ctx, e) } override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent): Unit = { client.exceptionCaught(ctx, e) } override def channelConnected(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = { client.channelConnected(ctx, e) } override def channelDisconnected(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = { client.channelDisconnected(ctx, e) } } class TimeoutInfo(val sequence: Int, val connId: String, val timer: QuickTimer) class SlotInfo(val master: String, var slave: String = null) class ConnInfo(val addr: String) { val lock = new ReentrantLock(false) val seqs = new LinkedList[Int]() var ch: Channel = null var connId: String = null var disconnected = System.currentTimeMillis() } }
bruceran/scalabpe
src/scalabpe/plugin/cache/redisnettyclient_cluster.scala
Scala
apache-2.0
22,336
package kornell.server.ep import kornell.server.jdbc.repository.EnrollmentRepo import java.math.BigDecimal /** * Simple Event Processing */ object EnrollmentSEP { def onProgress(enrollmentUUID: String): Unit = { EnrollmentRepo(enrollmentUUID).updateProgress() } def onAssessment(enrollmentUUID: String): Unit = EnrollmentRepo(enrollmentUUID).updateAssessment() def onPreAssessmentScore(enrollmentUUID: String, score: BigDecimal): Unit = EnrollmentRepo(enrollmentUUID).updatePreAssessmentScore(score) def onPostAssessmentScore(enrollmentUUID: String, score: BigDecimal): Unit = EnrollmentRepo(enrollmentUUID).updatePostAssessmentScore(score) }
Craftware/Kornell
kornell-api/src/main/scala/kornell/server/ep/EnrollmentSEP.scala
Scala
apache-2.0
678
package edu.tum.cs.isabelle.impl import java.nio.file.Path import scala.concurrent.ExecutionContext import edu.tum.cs.isabelle.api @api.Implementation(identifier = "2014") final class Environment protected(home: Path) extends api.Environment(home) { isabelle.Isabelle_System.init( isabelle_home = home.toAbsolutePath.toString, cygwin_root = home.resolve("contrib/cygwin").toAbsolutePath.toString ) api.Environment.patchSettings(isabelle.Isabelle_System, variables) private def destMarkup(markup: isabelle.Markup) = (markup.name, markup.properties) protected[isabelle] val exitTag = isabelle.Markup.EXIT protected[isabelle] val functionTag = isabelle.Markup.FUNCTION protected[isabelle] val initTag = isabelle.Markup.INIT protected[isabelle] val protocolTag = isabelle.Markup.PROTOCOL lazy val executionContext = isabelle.Future.execution_context protected[isabelle] type Session = isabelle.Session private lazy val options = isabelle.Options.init() private def mkPaths(path: Option[Path]) = path.map(p => isabelle.Path.explode(isabelle.Isabelle_System.posix_path(p.toAbsolutePath.toString))).toList private def progress(config: api.Configuration) = new isabelle.Build.Progress { logger.info(s"Building $config ...") override def echo(msg: String) = logger.info(s"${config.session}: $msg") override def theory(session: String, theory: String) = logger.info(s"${config.session}: theory $theory ($session)") } protected[isabelle] def build(config: api.Configuration) = isabelle.Build.build( options = options, progress = progress(config), build_heap = true, dirs = mkPaths(config.path), verbose = true, sessions = List(config.session) ) protected[isabelle] def create(config: api.Configuration, consumer: (api.Markup, api.XML.Body) => Unit) = { val content = isabelle.Build.session_content(options, false, mkPaths(config.path), config.session) val resources = new isabelle.Resources(content.loaded_theories, content.known_theories, content.syntax) val session = new isabelle.Session(resources) def convertXML(tree: isabelle.XML.Tree): api.XML.Tree = tree match { case isabelle.XML.Text(content) => api.XML.text(content) case isabelle.XML.Elem(markup, body) => api.XML.elem(destMarkup(markup), body.map(convertXML)) } session.all_messages += isabelle.Session.Consumer[isabelle.Prover.Message]("firehose") { case msg: isabelle.Prover.Protocol_Output => consumer(destMarkup(msg.message.markup), api.XML.bodyFromYXML(msg.text)) case msg: isabelle.Prover.Output => consumer(destMarkup(msg.message.markup), msg.message.body.map(convertXML)) case _ => } session.start("Isabelle" /* name is ignored anyway */, List("-r", "-q", config.session)) session } protected[isabelle] def sendCommand(session: Session, name: String, args: List[String]) = session.protocol_command(name, args: _*) protected[isabelle] def sendOptions(session: Session) = session.protocol_command("Prover.options", isabelle.YXML.string_of_body(options.encode)) protected[isabelle] def dispose(session: Session) = session.stop() }
wneuper/libisabelle
pide/2014/src/main/scala/impl/Environment.scala
Scala
mit
3,216
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.jdbc import java.sql.Types import org.apache.spark.sql.types._ private object DerbyDialect extends JdbcDialect { override def canHandle(url: String): Boolean = url.startsWith("jdbc:derby") override def getCatalystType( sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = { if (sqlType == Types.REAL) Option(FloatType) else None } override def getJDBCType(dt: DataType): Option[JdbcType] = dt match { case StringType => Option(JdbcType("CLOB", java.sql.Types.CLOB)) case ByteType => Option(JdbcType("SMALLINT", java.sql.Types.SMALLINT)) case ShortType => Option(JdbcType("SMALLINT", java.sql.Types.SMALLINT)) case BooleanType => Option(JdbcType("BOOLEAN", java.sql.Types.BOOLEAN)) // 31 is the maximum precision and 5 is the default scale for a Derby DECIMAL case t: DecimalType if t.precision > 31 => Option(JdbcType("DECIMAL(31,5)", java.sql.Types.DECIMAL)) case _ => None } override def isCascadingTruncateTable(): Option[Boolean] = Some(false) }
pgandhi999/spark
sql/core/src/main/scala/org/apache/spark/sql/jdbc/DerbyDialect.scala
Scala
apache-2.0
1,876
/* * Copyright (c) 2015-2017 EpiData, Inc. */ package cassandra import com.chrisomeara.pillar.Migration import com.chrisomeara.pillar.Migrator import com.chrisomeara.pillar.Registry import com.chrisomeara.pillar.Reporter import com.chrisomeara.pillar.ReplicationOptions import com.datastax.driver.core._ import java.io.File import java.io.PrintStream import java.util.Date import org.joda.time.Instant import play.api.Logger import play.api.Play import util.EpidataMetrics /** * Singleton object for managing the server's connection to a Cassandra * database and executing queries. */ object DB { private var connection: Option[Connection] = None /** * Connect to cassandra. On connect, the keyspace is created and migrated if * necessary. */ def connect(nodeNames: String, keyspace: String, username: String, password: String) = { connection = Some(new Connection(nodeNames, keyspace, username, password)) } /** Generate a prepared statement. */ def prepare(statementSpec: String) = connection.get.prepare(statementSpec) /** Execute a previously prepared statement. */ def execute(statement: Statement): ResultSet = { val rs = connection.get.execute(statement) rs } /** Execute a previously prepared statement. */ def batchExecute(statements: List[Statement]): ResultSet = { val t0 = EpidataMetrics.getCurrentTime val batch = new BatchStatement() statements.foreach(s => batch.add(s)) // execute the batch val rs = connection.get.execute(batch) EpidataMetrics.increment("DB.batchExecute", t0) rs } /** Execute a CQL statement by binding ordered attributes. */ def cql(statement: String, args: AnyRef*): ResultSet = { connection.get.execute(new SimpleStatement(statement, args: _*)) } /** Execute a CQL statement by binding named attributes. */ def cql(statement: String, args: Map[String, Any]): ResultSet = { val boundStatement = new BoundStatement(connection.get.prepare(statement)) args.foreach { case (key, value: String) => boundStatement.setString(key, value) case (key, value: Int) => boundStatement.setInt(key, value) case (key, value: Double) => boundStatement.setDouble(key, value) case (key, value: Date) => boundStatement.setDate(key, LocalDate.fromMillisSinceEpoch(value.getTime)) case _ => throw new IllegalArgumentException("Unexpected args.") } connection.get.execute(boundStatement) } def close { connection.get.close } def session = connection.get.session } private class TerseMigrationReporter(stream: PrintStream) extends Reporter { override def initializing( session: Session, keyspace: String, replicationOptions: ReplicationOptions ) { } override def migrating(session: Session, dateRestriction: Option[Date]) { } override def applying(migration: Migration) { stream.println( // scalastyle:ignore s"Applying migration ${migration.authoredAt.getTime}: ${migration.description}" ) } override def reversing(migration: Migration) { stream.println( // scalastyle:ignore s"Reversing migration ${migration.authoredAt.getTime}: ${migration.description}" ) } override def destroying(session: Session, keyspace: String) { } } private class Connection(nodeNames: String, keyspace: String, username: String, password: String) { val cluster = nodeNames.split(',').foldLeft(Cluster.builder())({ (builder, nodeName) => try { builder.addContactPoint(nodeName).withCredentials(username, password) } catch { case e: IllegalArgumentException => Logger.warn(e.getMessage); builder } }).build() val session = cluster.connect() val reporter = new TerseMigrationReporter(System.out) val registry = { import play.api.Play.current Registry.fromDirectory( Play.application.getFile("conf/pillar/migrations/epidata"), reporter ) } // Create keyspace if necessary. Migrator(registry, reporter).initialize(session, keyspace) // Use the specified keyspace. session.execute(s"USE ${keyspace}") // Perform migrations if necessary. Migrator(registry, reporter).migrate(session) def prepare(statementSpec: String) = session.prepare(statementSpec) def execute(statement: Statement) = session.execute(statement) def close = { session.close() cluster.close() } }
epidataio/epidata-community
play/app/cassandra/DB.scala
Scala
apache-2.0
4,368
import org.apache.hadoop.conf.Configuration import org.apache.spark.{SparkContext, SparkConf} import org.apache.spark.rdd.RDD import org.bson.BSONObject import com.mongodb.hadoop.{ MongoInputFormat, MongoOutputFormat, BSONFileInputFormat, BSONFileOutputFormat} import com.mongodb.hadoop.io.MongoUpdateWritable import com.mongodb.BasicDBList import java.io._ import com.cloudera.datascience.lsa._ import com.cloudera.datascience.lsa.ParseWikipedia._ import com.cloudera.datascience.lsa.RunLSA._ import org.apache.spark.rdd.EmptyRDD import scala.collection.mutable.ListBuffer import org.apache.spark.mllib.linalg._ import org.apache.spark.mllib.linalg.distributed.RowMatrix import breeze.linalg.{DenseMatrix => BDenseMatrix, DenseVector => BDenseVector, SparseVector => BSparseVector} import org.apache.spark.mllib.regression._ import org.apache.spark.rdd._ import org.apache.spark.mllib.tree.RandomForest import org.apache.spark.mllib.tree.model.RandomForestModel import org.apache.spark.mllib.util.MLUtils @transient val mongoConfig = new Configuration() mongoConfig.set("mongo.input.uri", "mongodb://localhost:27017/cordir.project") val documents = sc.newAPIHadoopRDD( mongoConfig, // Configuration classOf[MongoInputFormat], // InputFormat classOf[Object], // Key type classOf[BSONObject]) // Value type mongoConfig.set("mongo.input.uri", "mongodb://localhost:27017/cordir.projetDocConcept") val documentsDocConcept = sc.newAPIHadoopRDD( mongoConfig, // Configuration classOf[MongoInputFormat], // InputFormat classOf[Object], // Key type classOf[BSONObject]) // Value type def mergeBSON( a:BSONObject, b:BSONObject ) : BSONObject = { a.putAll(b) return a } def generateArray( a:BasicDBList ) : Array[Double] = { var ree:Array[Double] =Array.fill[Double](a.size())(0) for(i <- 0 to a.size()-1){ ree(i)=a.get(i).asInstanceOf[Double] } return ree } var joinedDocuments=documents.map(a=>(a._1.toString,a._2)).join(documentsDocConcept.map(a=>(a._1.toString,a._2))).map(a => (a._1,mergeBSON(a._2._1,a._2._2))) //joinedDocuments.map(a => (a._1,mergeBSON(a._2._1,a._2._2))).take(1).foreach(println) val data=joinedDocuments.map(a => LabeledPoint(a._2.get("totalCost").asInstanceOf[Double],new DenseVector(generateArray(a._2.get("value").asInstanceOf[BasicDBList])))) val splits = data.randomSplit(Array(0.7, 0.3)) val (trainingData, testData) = (splits(0), splits(1)) // Train a RandomForest model. // Empty categoricalFeaturesInfo indicates all features are continuous. val numClasses = 2 val categoricalFeaturesInfo = Map[Int, Int]() val numTrees = 1000// Use more in practice. val featureSubsetStrategy = "auto" // Let the algorithm choose. val impurity = "variance" val maxDepth = 10 val maxBins = 64 val model = RandomForest.trainRegressor(trainingData, categoricalFeaturesInfo, numTrees, featureSubsetStrategy, impurity, maxDepth, maxBins) // Evaluate model on test instances and compute test error val labelsAndPredictions = testData.map { point => val prediction = model.predict(point.features) (point.label, prediction) } val testMSE = labelsAndPredictions.map{ case(v, p) => math.pow((v - p), 2)}.mean() println("Test Mean Squared Error = " + testMSE) //println("Learned regression forest model:\n" + model.toDebugString) // Save and load model model.save(sc, "target/tmp/myRandomForestRegressionModel") exit
StatisticalProject/CORDIR
sparkForet.scala
Scala
apache-2.0
3,488
package controllers import com.madgag.playgithub.auth.{AuthController, Client} import lib.GithubAppConfig object Auth extends AuthController { override val authClient: Client = GithubAppConfig.authClient }
guardian/gu-who
app/controllers/Auth.scala
Scala
apache-2.0
210
package sampler.distribution import sampler.distribution.Distribution.from import sampler.samplable.SamplableSyntax import scala.annotation.tailrec trait CommonDistributions extends DistributionImplicits with SamplableSyntax { def always[T](value: T): Distribution[T] = Pure(value) /** Uniform distribution of doubles * * @param lower the lower bound of the distribution (inclusive) * @param upper the upper bound of the distribution (exclusive) */ def uniform(lower: Double, upper: Double) = from{r => (upper - lower) * r.nextDouble() + lower } /** Uniform distribution of integers * * @param lower the lower bound of the distribution (inclusive) * @param upper the upper bound of the distribution (exclusive) */ def uniform(lower: Int, upper: Int) = from{r => r.nextInt(upper - lower) + lower } /** Samples from a an indexed sequence of values with uniform weighting. * * @param items the sequence of values to be sampled from */ def uniform[T](items: IndexedSeq[T]) = { val size = items.size from{r => items(r.nextInt(size))} } /** Samples from multiple values without replacement using uniform weighting. * * @param items the sequence of values to be sampled from * @param sampleSize the number of items to be selected from the set * * @example * {{{ * implicit val r = Random * val model = Distribution.withoutReplacement(IndexedSeq("red", "blue", "green", "yellow"), 2) * * model.sample * res1: List[String] = List(blue, green) * }}} * */ //TODO reverse list? def withoutReplacement[T](items: IndexedSeq[T], sampleSize: Int) = from{r => def sample() = { @tailrec def takeAnother(acc: List[T], bag: IndexedSeq[T]): List[T] = { if(acc.size == sampleSize) acc else{ val item = bag(r.nextInt(bag.size)) takeAnother(item +: acc, bag diff List(item)) } } takeAnother(Nil, items) } } //TODO test, urgently! //TODO reverse list? def withoutReplacement( populationSize: Int, populationTrue: Int, stopWhen: IndexedSeq[Boolean] => Boolean = _ => false ) = { assert(populationSize >= populationTrue) from{r => @tailrec def take(acc: IndexedSeq[Boolean], nTrue: Int, size: Int): IndexedSeq[Boolean] = { if(size ==0 || stopWhen(acc)) acc else { val item = r.nextInt(size) <= nTrue take(acc :+ item, if(item) nTrue - 1 else nTrue, size - 1) } } take(IndexedSeq.empty[Boolean], populationSize, populationTrue) } } /** Coins toss * * @param probSuccess the probability of success */ def bernoulli(probSuccess: Double): Distribution[Boolean] = from{r => r.nextBoolean(probSuccess) } //TODO test def binomial(probSuccess: Double, trials: Double) = bernoulli(probSuccess) .until(_.size == trials) .map(_.count(identity)) //TODO test def negativeBinomial(numFailures: Int, probSuccess: Double) = { bernoulli(probSuccess) .until(_.count(!_) == numFailures) .map(_.size) } //TODO test def geometric(probSuccess: Double) = bernoulli(probSuccess) .until(_.last) .map(_.size) //TODO test def hypergeometric(trials: Int, populationSize: Int, populationSuccesses: Int) = withoutReplacement(populationSize, populationSuccesses,_.size == trials) .map(_.count(identity)) //TODO test def exponential(rate: Double) = uniform(0.0,1.0).map(x => - math.log(x) / rate) //TODO test def poisson(rate: Double) = exponential(rate).until(_.sum >= 1).map(_.size - 1) def piecewiseLinear(points: (Double, Double)*): Distribution[Double] ={ case class Segment(xPositionOffset: Double, width: Double, gradient: Double, intercept: Double) { assume(intercept >= 0) assume(width > 0) val mSgn = if(gradient < 0) -1 else 1 val mAbs = math.abs(gradient) val c = intercept val integral = (mSgn * mAbs * width * 0.5 + c) * width def inverse(y: Double) = if(mAbs == 0) y / c else (mSgn * math.sqrt(c * c + mSgn * 2 * mAbs * y) - mSgn * c ) / mAbs val dist = Distribution.uniform(0.0, integral).map{y => inverse(y) + xPositionOffset } } val segments = points.sliding(2) .map{ case Seq((x1, y1), (x2, y2)) => assume(x1 < x2) val width = x2 - x1 val gradient = (y2 - y1) / width val intercept = y1 Segment(x1, width, gradient, intercept) } .toIndexedSeq val segmentDist = Distribution.fromWeightsTable( segments.map(s => s -> s.integral).toMap ) //Choose the segment then sample a value within it segmentDist.flatMap(_.dist) } }
tearne/Sampler
sampler-core/src/main/scala/sampler/distribution/CommonDistributions.scala
Scala
apache-2.0
4,631
package io.youi import scala.scalajs.js @js.native trait ElementFeatures extends js.Object { def scrollIntoView(options: ScrollIntoViewOptions): Unit }
outr/youi
dom/src/main/scala/io/youi/ElementFeatures.scala
Scala
mit
156
/* * Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com> */ package scalaguide.sql // #named-database // ###insert: package controllers import javax.inject.Inject import play.api.mvc.{BaseController, ControllerComponents} import play.api.db.{Database, NamedDatabase} // inject "orders" database instead of "default" class ScalaInjectNamed @Inject()( @NamedDatabase("orders") db: Database, val controllerComponents: ControllerComponents ) extends BaseController { // do whatever you need with the db } // #named-database
Shruti9520/playframework
documentation/manual/working/scalaGuide/main/sql/code/ScalaInjectNamed.scala
Scala
apache-2.0
546
package models.actor import models.binding._ import models.enums._ import models.provider._ import play.libs._ import play.api.libs.concurrent.Execution.Implicits._ import scala.concurrent._ import scala.concurrent.duration._ import scala.xml._ import akka.actor._ import akka.pattern._ import akka.util.Timeout import akka.actor.SupervisorStrategy._ import scalaxb.DataRecord import java.net.URI class RegistryService(val databases: Seq[Database]) extends Actor { import models.actor.RegistryService._ import models.actor.DataProvider._ implicit val timeout = Timeout(10.seconds) override val supervisorStrategy = OneForOneStrategy() { case _: ActorInitializationException => Stop case _: ActorKilledException => Stop case _: Exception => Restart } override def preStart = initChildActors def receive = { case reg: RegisterProvider => sender ! register(reg.props, reg.id) } private def register(props: Props, id: String) = { val provider: ActorRef = context.actorOf(props, id) Akka.system.scheduler.schedule(30.minutes, 24.hours, provider, UpdateData) } private def initChildActors: Unit = { databases map { database => val id: String = UrlProvider.encodeURI(database.id) database.typeValue match { case Simulation => register( Props(new SimDataProvider(database)), id) case Observation => register( Props(new ObsDataProvider(database)), id) } } } } object RegistryService { import models.actor.ConfigService._ import models.actor.DataProvider._ import models.binding.Simulation implicit val timeout = Timeout(1.minute) // message formats trait RegistryMessage case class RegisterProvider(val props: Props, val id: String) extends RegistryMessage private val registry: ActorSelection = Akka.system.actorSelection("user/registry") private def getChilds(databases: Seq[Database]): Seq[ActorSelection] = { databases map { d => getChild(d.id) } } // @TODO check if child exists and is alive private def getChild(id: URI): ActorSelection = { Akka.system.actorSelection("user/registry/" + UrlProvider.encodeURI(id)) } // Hack method for checking the id of the request against the database id private def validateId(msg: GetElement, dbId: URI): Boolean = { val msgId = msg.id.get .replace(ESimulationModel.toString, ERepository.toString) .replace(ESimulationRun.toString, ERepository.toString) .replace(ENumericalOutput.toString, ERepository.toString) .replace(EGranule.toString, ERepository.toString) .replace(EObservatory.toString, ERepository.toString) .replace(EInstrument.toString, ERepository.toString) .replace(ENumericalData.toString, ERepository.toString) //println("Computed search Id="+msgId) msgId.contains(dbId.toString) } private def getElement(msg: GetElement): Future[Either[Spase, RequestError]] = { implicit val timeout = Timeout(1.minute) //println("ResourceID="+msg.id) for { databases <- ConfigService.request(GetRegistryDatabases).mapTo[Seq[Database]] provider <- msg.id match { case Some(id) if(databases.exists(d => validateId(msg, d.id))) => { val db: Database = databases.find(d => validateId(msg, d.id)).get val provider: ActorSelection = getChild(db.id) (msg.dType, db.typeValue) match { case (e: SimElement, Simulation) => (provider ? msg).mapTo[Spase] map { s => { s.ResourceEntity match { case Seq() => Right(RequestError(ERequestError.UNKNOWN_ENTITY)) case _ => Left(s) }} } case (e: ObsElement, Observation) => (provider ? msg).mapTo[Spase] map { s => { s.ResourceEntity match { case Seq() => Right(RequestError(ERequestError.UNKNOWN_ENTITY)) case _ => Left(s) }} } // generic element must always exist case (e: GenElement, _) => (provider ? msg).mapTo[Spase] map { Left(_) } case _ => future { Right(RequestError(ERequestError.UNKNOWN_ENTITY)) } } } case None => { val result = msg.dType match { case e: SimElement => Future.sequence(getChilds(databases.filter(_.typeValue == Simulation)) map { provider => (provider ? msg).mapTo[Spase] map { _.ResourceEntity } }) case e: ObsElement => Future.sequence(getChilds(databases.filter(_.typeValue == Observation)) map { provider => (provider ? msg).mapTo[Spase] map { _.ResourceEntity } }) case e: GenElement => Future.sequence(getChilds(databases) map { provider => (provider ? msg).mapTo[Spase] map { _.ResourceEntity } }) } result.map(records => Left(Spase(Number2u462u462, records.flatten, "en"))) } case _ => future { Right(RequestError(ERequestError.UNKNOWN_PROVIDER)) } } } yield provider } def registerChild(props: Props, id: String) = { (registry ? RegisterProvider(props, id)) } // general methods def getStatus(): Future[Seq[DataProvider.Status]] = { implicit val timeout = Timeout(1.minute) for { databases <- ConfigService.request(GetRegistryDatabases).mapTo[Seq[Database]] provider <- { Future.sequence(getChilds(databases) map { provider => (provider ? GetStatus).mapTo[DataProvider.Status] }) } } yield provider } def getTree(id: Option[String] = None): Future[Either[Spase, RequestError]] = { implicit val timeout = Timeout(1.minute) for { databases <- ConfigService.request(GetRegistryDatabases).mapTo[Seq[Database]] provider <- { id match { case Some(id) if databases.exists(d => id.contains(d.id.toString)) => { val provider: ActorSelection = getChild(databases.find(d => id.contains(d.id.toString)).get.id) (provider ? GetTree).mapTo[Spase] map { Left(_) } } // give correct error in the interface case Some(id) if databases.exists(d => id.contains(d.id.toString) && d.typeValue == Observation) => { future { Right(RequestError(ERequestError.NOT_IMPLEMENTED)) } } case None => { val result = Future.sequence(getChilds(databases) map { provider => (provider ? GetTree).mapTo[Spase] map { _.ResourceEntity } }) result.map(records => Left(Spase(Number2u462u462, records.flatten, "en"))) } case _ => future { Right(RequestError(ERequestError.UNKNOWN_PROVIDER)) } } } } yield provider } def getRepository(id: Option[String] = None): Future[Either[Spase, RequestError]] = getElement(GetElement(ERepository, id)) def getRepositoryType(dbType: Databasetype): Future[Spase] = { implicit val timeout = Timeout(10.seconds) for { databases <- ConfigService.request(GetDatabaseType(dbType)).mapTo[Seq[Database]] providers <- { val result = Future.sequence(getChilds(databases) map { provider => (provider ? GetElement(ERepository, None)).mapTo[Spase] map { _.ResourceEntity } }) result.map(records => Spase(Number2u462u462, records.flatten, "en")) } } yield providers } // simulations methods def getSimulationModel(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(ESimulationModel, Some(id.replace(ERepository.toString, ESimulationModel.toString)), r)) } case _ => getElement(GetElement(ESimulationModel, id, r)) } } def getSimulationRun(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(ESimulationRun, Some(id.replace(ERepository.toString, ESimulationRun.toString)), r)) } case _ => getElement(GetElement(ESimulationRun, id, r)) } } def getNumericalOutput(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(ENumericalOutput, Some(id.replace(ERepository.toString, ENumericalOutput.toString)), r)) } case _ => getElement(GetElement(ENumericalOutput, id, r)) } } def getGranule(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(EGranule, Some(id.replace(ERepository.toString, EGranule.toString)), r)) } case _ => getElement(GetElement(EGranule, id, r)) } } // observations methods def getObservatory(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(EObservatory, Some(id.replace(ERepository.toString, EObservatory.toString)), r)) } case _ => getElement(GetElement(EObservatory, id)) } } def getInstrument(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(EInstrument, Some(id.replace(ERepository.toString, EInstrument.toString)), r)) } case _ => getElement(GetElement(EInstrument, id)) } } def getNumericalData(id: Option[String], r: Boolean): Future[Either[Spase, RequestError]] = { // check if a repository id is given as parameter id match { case Some(id) if(id.contains(ERepository.toString)) => { getElement(GetElement(ENumericalData, Some(id.replace(ERepository.toString, ENumericalData.toString)), r)) } case _ => getElement(GetElement(ENumericalData, id)) } } }
FlorianTopf/impex-portal
app/models/actor/RegistryService.scala
Scala
gpl-2.0
10,415
package archiver import java.io.File class FileMapping private(val mappings: Map[String, File], val permissions: Map[String, FilePermissions]) { def size = mappings.size def isEmpty = mappings.isEmpty def append(fm: FileMapping) = new FileMapping(mappings ++ fm.mappings, permissions ++ fm.permissions) def foreach(f: ((String, File)) => Unit): Unit = mappings.foreach(f) def map[B](f: ((String, File)) => B): Iterable[B] = mappings.map(f) override def toString = { s"""mappings: $mappings |permissions: $permissions """.stripMargin } } object FileMapping { def apply(roots: List[File], glob: Option[String] = None, base: Option[String] = None, permissions: Map[String, FilePermissions] = Map.empty)(implicit resolver: PermissionResolver): FileMapping = { def name(f: File, root: File) = base.map(_ + "/").getOrElse("") + f.getAbsolutePath.substring(root.getAbsolutePath.length).drop(1) def entries(f: File): Seq[File] = f :: IO.listFiles(f, glob).flatMap(entries) def tuple(root: File) = (f: File) => name(f, root) -> f val mappings = roots.flatMap(root => entries(root).tail.map(tuple(root))).toMap new FileMapping(mappings, resolver.resolve(mappings, permissions)) } def apply(mappings: Map[String, File], permissions: Map[String, FilePermissions])(implicit resolver: PermissionResolver) = { new FileMapping(mappings, resolver.resolve(mappings, permissions)) } }
hamnis/scala-archiver
src/main/scala/archiver/FileMapping.scala
Scala
apache-2.0
1,433
package org.example class ProvidedTest
xeno-by/old-scalameta-sbt
sbt/src/sbt-test/project/provided/sub/src/test/scala/ProvidedTest.scala
Scala
bsd-3-clause
39
package akka.s3 import java.net.URLEncoder import akka.http.scaladsl.model.headers.{HttpOrigin, `Access-Control-Allow-Origin`, `Access-Control-Allow-Credentials`, ETag} import akka.http.scaladsl.model.{StatusCodes, HttpEntity, Multipart, HttpRequest} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._ import scala.concurrent.{Future, Await} import scala.concurrent.duration.Duration import scala.concurrent.ExecutionContext.Implicits.global trait PostObject { self: PreAuthContext => def doPostObject() = { post { extractBucket { bucketName => formField("key") { keyName => internal(bucketName, keyName) } } } } def internal(bucketName: String, keyName: String) = { entity(as[Multipart.FormData]) { mfd => // Post Object uses fields to send header data // TODO signature v4 val fdl = HeaderList.FromMultipart(mfd) val hl = req.listFromHeaders val origin = hl.get("Origin") val allHl = HeaderList.Aggregate(Seq(fdl, hl)) val policy = fdl.get("Policy") val successActionStatus = fdl.get("success_action_status").map(_.toInt) // "acl"?, // "Cache-Control"?, // "Content-Type"?, // "Content-Disposition"?, // "Content-Encoding"?, // "Expires"?, val callerId: Option[String] = if (policy.isDefined) { val getSecretKey = (accessKey: String) => users.getId(accessKey).flatMap(users.getUser(_)).map(_.secretKey).get val authKey = Stream(PostAuthV2(fdl, getSecretKey)).map(_.run).find(_.isDefined).flatten authKey.isDefined.orFailWith(Error.SignatureDoesNotMatch()) users.getId(authKey.get) } else { None } val k = tree .findBucket(bucketName).get .key(keyName) k.mk val v = k.acquireNewVersion Acl.File(callerId, Seq()).write(v.acl) // val writeFut: Future[Unit] = mfd.parts.runForeach { part => // println(part.name) // if (part.name == "file") { // v.data.writeBytes(part.entity.dataBytes) // } // } // Await.ready(writeFut, Duration.Inf) println(fdl.bytes) v.data.writeBytes(fdl.bytes.toArray) val newETag = v.data.computeMD5 // TODO check // val usrMeta = fd.fields.filter { case (k,_) => k.startsWith("x-amz-meta-") } Meta.File( isVersioned = false, isDeleteMarker = false, eTag = newETag, attrs = KVList.builder .append(CONTENT_TYPE, allHl.get(CONTENT_TYPE)) .append(CONTENT_DISPOSITION, allHl.get(CONTENT_DISPOSITION)) .build, xattrs = KVList.builder.build ).write(v.meta) v.commit val headers = ETag(newETag) +: // not sure required `Access-Control-Allow-Credentials`(true) +: RawHeaderList( (X_AMZ_REQUEST_ID, requestId) ) .applySome(origin) { a => b => `Access-Control-Allow-Origin`(HttpOrigin(b)) +: a } // Location is URL-encoded unlike Key isn't. // From out analysis of the actual http response, Location is found encoded. val xml = <PostResponse> <Bucket>{bucketName}</Bucket> <Key>{keyName}</Key> <ETag>{newETag}</ETag> <Location>{s"http://${server.config.ip}:${server.config.port}/${bucketName}/${URLEncoder.encode(keyName)}"}</Location> </PostResponse> // If the value is set to 200 or 204, Amazon S3 returns an empty document with a 200 or 204 status code. // If the value is set to 201, Amazon S3 returns an XML document with a 201 status code. // If the value is not set or if it is set to an invalid value, Amazon S3 returns an empty document with a 204 status code. val code = successActionStatus.getOrElse(204) code match { case 200 | 204 => complete(code, headers, HttpEntity.Empty) case 201 => complete(code, headers, xml) case _ => complete(StatusCodes.Forbidden) // FIXME } } } }
akiradeveloper/akka-s3
src/main/scala/akka/s3/api/PostObject.scala
Scala
apache-2.0
4,076
package com.github.swwjf.ws import com.github.springtestdbunit.DbUnitTestExecutionListener import com.github.springtestdbunit.annotation._ import com.github.springtestdbunit.assertion.DatabaseAssertionMode import com.github.swwjf.WebServicesApplication import com.github.swwjf.config.WSDBTestConfig import org.junit.Test import org.junit.runner.RunWith import org.springframework.beans.factory.annotation.Autowired import org.springframework.boot.test.SpringApplicationConfiguration import org.springframework.http.MediaType import org.springframework.test.context.support.DependencyInjectionTestExecutionListener import org.springframework.test.context.transaction.TransactionalTestExecutionListener import org.springframework.test.context.{TestExecutionListeners, TestContextManager} import org.springframework.test.context.junit4.SpringJUnit4ClassRunner import org.springframework.test.context.web.WebAppConfiguration import org.springframework.test.web.servlet.MockMvc import org.springframework.test.web.servlet.request.MockMvcRequestBuilders._ import org.springframework.test.web.servlet.result.MockMvcResultMatchers._ import org.springframework.test.web.servlet.setup.MockMvcBuilders import org.springframework.web.context.WebApplicationContext @RunWith(classOf[SpringJUnit4ClassRunner]) @TestExecutionListeners(Array( classOf[DbUnitTestExecutionListener], classOf[DependencyInjectionTestExecutionListener], classOf[TransactionalTestExecutionListener] )) @WebAppConfiguration @SpringApplicationConfiguration(Array(classOf[WebServicesApplication])) @DbUnitConfiguration(databaseConnection = Array(WSDBTestConfig.ConnectionName)) class InfoEndpointTest { @Autowired private val webApplicationContext: WebApplicationContext = null new TestContextManager(getClass).prepareTestInstance(this) private val mockMvc: MockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build() @Test @DatabaseTearDown( connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointSaveInfoExpectedResultTestDataSet.xml"), `type` = DatabaseOperation.DELETE_ALL) @ExpectedDatabase( connection = WSDBTestConfig.ConnectionName, value = "classpath:/InfoEndpointSaveInfoExpectedResultTestDataSet.xml", assertionMode = DatabaseAssertionMode.NON_STRICT_UNORDERED) def testShouldSaveInfo(): Unit = { mockMvc.perform( post("/info") .contentType(MediaType.APPLICATION_JSON) .content(saveInfoRequest)) .andExpect(status().isOk) } @Test @DatabaseSetup(connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointSaveInfoExpectedResultTestDataSet.xml")) @DatabaseTearDown( connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointSaveInfoExpectedResultTestDataSet.xml"), `type` = DatabaseOperation.DELETE_ALL) @ExpectedDatabase( connection = WSDBTestConfig.ConnectionName, value = "classpath:/InfoEndpointSaveInfoExpectedResultTestDataSet.xml", assertionMode = DatabaseAssertionMode.NON_STRICT_UNORDERED) def testShouldFailOnSaveInfoDueToDuplicateLabel(): Unit = { mockMvc.perform( post("/info") .contentType(MediaType.APPLICATION_JSON) .content(saveInfoRequest)) .andExpect(status().isBadRequest) .andExpect(content().json( """ { "error_message" : "Invalid/duplicate label" } """ )) } @Test @DatabaseSetup(connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointSaveInfoExpectedResultTestDataSet.xml")) @DatabaseTearDown( connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointUpdateInfoExpectedResultTestDataSet.xml"), `type` = DatabaseOperation.DELETE_ALL) @ExpectedDatabase( connection = WSDBTestConfig.ConnectionName, value = "classpath:/InfoEndpointUpdateInfoExpectedResultTestDataSet.xml", assertionMode = DatabaseAssertionMode.NON_STRICT_UNORDERED) def testShouldUpdateInfo(): Unit = { mockMvc.perform( put("/info") .contentType(MediaType.APPLICATION_JSON) .content(updateInfoRequest)) .andExpect(status().isOk) } @Test @DatabaseSetup(connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointGetAllSavedInfoExpectedResultTestDataSet.xml")) @DatabaseTearDown( connection = WSDBTestConfig.ConnectionName, value = Array("classpath:/InfoEndpointGetAllSavedInfoExpectedResultTestDataSet.xml"), `type` = DatabaseOperation.DELETE_ALL) @ExpectedDatabase( connection = WSDBTestConfig.ConnectionName, value = "classpath:/InfoEndpointGetAllSavedInfoExpectedResultTestDataSet.xml", assertionMode = DatabaseAssertionMode.NON_STRICT_UNORDERED) def testShouldGetAllSavedInfo(): Unit = { mockMvc.perform( get("/info")) .andExpect(status().isOk) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(content().json(getAllSavedInfoResponse)) } private val saveInfoRequest = """ { "label": "sample", "main_details": "pin code: ****", "comments": "this is super important info" } """ private val updateInfoRequest = """ { "label": "sample", "main_details": "pin code: ****", "comments": "pin code has changed!" } """ private val getAllSavedInfoResponse = """ [ { "updatedDate":"2016-02-14T18:07:04", "createdDate":"2016-01-14T18:07:04", "label":"sample", "comments":"this is super important info", "main_details":"pin code: ****" } ] """ }
andrei-l/scala-webapp-with-java-frameworks
webservices/src/test/scala/com/github/swwjf/ws/InfoEndpointTest.scala
Scala
mit
5,669
package maker.project import maker.task.tasks.UpdateTask import maker.utils.FileUtils._ import java.io.BufferedWriter import scala.collection.immutable.VectorBuilder import org.eclipse.aether.artifact.Artifact import maker.ScalaVersion trait Bootstrapper{ self : Module => def updateMakerScript(marker : String, replacement : Seq[String]){ val builder = new VectorBuilder[String]() val lines = file("maker.py").readLines val start = lines.indexWhere(_.startsWith(marker)) val end = lines.indexWhere(_.startsWith(marker), start + 1) builder ++= lines.take(start + 1) builder ++= replacement builder ++= lines.drop(end) withFileWriter(file("maker.py")){ writer : BufferedWriter => val text = builder.result.mkString("\n") writer.println(text) } } def writeBoostrapFile(){ // TODO - exclusions val artifacts = new UpdateTask(self).binaryArtifacts.filterNot(_.getArtifactId == "compiler-interface") val bldr = new VectorBuilder[String]() def makeLine(artifact : Artifact) = { val group = artifact.getGroupId.replace('.', '/') val id = artifact.getArtifactId val version = artifact.getVersion s"""\t(MAVEN, "$group", "$id", "$version")""" } bldr += "MAKER_DEPENDENCIES = [" artifacts.dropRight(1).foreach{ artifact => bldr += (makeLine(artifact) + ",") } bldr += makeLine(artifacts.last) bldr += "]" updateMakerScript("# GENERATED MAKER DEPENDENCIES", bldr.result) } }
cage433/maker
maker/src/maker/project/Bootstrapper.scala
Scala
bsd-2-clause
1,589
import scala.reflect.macros.whitebox.Context import scala.language.experimental.macros import scala.language.postfixOps import scala.annotation.StaticAnnotation object kaseMacro { // ======= DIFFERENCES WITH VANILLA CASE ======= // (this list is probably not exhaustive, since I didn't have time to study all details of the existing implementation // 1) No CASE and SYNTHETIC flags (those have special treatment in the compiler, which I didn't want to interact with) // 2) No warnings specially tailored for case classes, because the compiler knows how methods like equal will behave // 3) No error messages specially tailored for case classes in case of synthesis conflicts (e.g. implicit case class) // 4) No special treatment for case class pattern matching (constructor patterns, refutability checks) // 5) No special treatment in RefChecks.relativeVariance (I don't even know why that one's necessary) // 6) No specialized hashcode implementation for primitive fields (I didn't have time to go into all the details of codegen) // 7) No referential transparency for methods like apply or copy that are injected into the companion, but should use kase class definition scope abstract class kaseHelper[C <: Context](val c: C) { import c.universe._ import c.universe.{Flag => PublicFlags} import scala.reflect.internal.{Flags => InternalFlags} def isTrait(mods: Modifiers) = (mods.flags.asInstanceOf[Long] & InternalFlags.TRAIT) != 0 def isFinal(mods: Modifiers) = (mods.flags.asInstanceOf[Long] & InternalFlags.FINAL) != 0 def isByName(mods: Modifiers) = (mods.flags.asInstanceOf[Long] & InternalFlags.BYNAMEPARAM) != 0 def isAbstract(mods: Modifiers) = (mods.flags.asInstanceOf[Long] & InternalFlags.ABSTRACT) != 0 val ParamMods = Modifiers(InternalFlags.PARAM.asInstanceOf[Long].asInstanceOf[FlagSet]) val SyntheticMods = Modifiers((0 /* | InternalFlags.SYNTHETIC */).asInstanceOf[Long].asInstanceOf[FlagSet]) val SyntheticCaseMods = Modifiers((0 /* | InternalFlags.SYNTHETIC | InternalFlags.CASE */).asInstanceOf[Long].asInstanceOf[FlagSet]) val OverrideSyntheticMods = Modifiers(PublicFlags.OVERRIDE /* | InternalFlags.SYNTHETIC.asInstanceOf[Long].asInstanceOf[FlagSet] */) val FinalOverrideSyntheticMods = Modifiers(PublicFlags.FINAL | PublicFlags.OVERRIDE /* | InternalFlags.SYNTHETIC.asInstanceOf[Long].asInstanceOf[FlagSet] */) def makeCase(mods: Modifiers) = { val flags1 = mods.flags.asInstanceOf[Long] /* | InternalFlags.CASEACCESSOR */ Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def makeCaseAccessor(mods: Modifiers) = { if (isByName(mods)) c.abort(c.enclosingPosition, "`kase` parameters may not be call-by-name") val flags1 = mods.flags.asInstanceOf[Long] /* | InternalFlags.CASEACCESSOR */ & ~InternalFlags.PRIVATE & ~InternalFlags.LOCAL Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def unmakeDefault(mods: Modifiers) = { var flags1 = mods.flags.asInstanceOf[Long] & ~InternalFlags.DEFAULTPARAM Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def unmakeCaseAccessor(mods: Modifiers) = { var flags1 = mods.flags.asInstanceOf[Long] & ~InternalFlags.PARAMACCESSOR & ~InternalFlags.CASEACCESSOR Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def makeDefault(mods: Modifiers) = Modifiers(mods.flags | PublicFlags.DEFAULTPARAM, mods.privateWithin, mods.annotations) def unmakeParam(mods: Modifiers) = { var flags1 = mods.flags.asInstanceOf[Long] & ~InternalFlags.PARAM Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def makeDeferredSynthetic(mods: Modifiers) = { var flags1 = mods.flags.asInstanceOf[Long] | InternalFlags.DEFERRED /* | InternalFlags.SYNTHETIC */ Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def unmakeVariant(mods: Modifiers) = { var flags1 = mods.flags.asInstanceOf[Long] & ~InternalFlags.COVARIANT & ~InternalFlags.CONTRAVARIANT Modifiers(flags1.asInstanceOf[FlagSet], mods.privateWithin, mods.annotations) } def expand(annottees: List[c.Tree]): List[c.Tree] } class kaseClassHelper[C <: Context](override val c: C) extends kaseHelper(c) { import c.universe._ import definitions._ def expand(annottees: List[c.Tree]): List[c.Tree] = { val cdef @ ClassDef(_, name, tparams, Template(_, _, cbody)) = annottees.head val primaryCtor = cbody.collect{ case ddef @ DefDef(_, termNames.CONSTRUCTOR, _, _, _, _) => ddef }.head if (primaryCtor.vparamss.isEmpty) c.abort(c.enclosingPosition, "`kase` is not applicable to classes without a parameter list") val primaryParamss = primaryCtor.vparamss val primaryParams = primaryParamss.head val secondaryParamss = primaryParamss.tail val ourPolyType = if (tparams.nonEmpty) AppliedTypeTree(Ident(name), tparams.map(tparam => Ident(tparam.name))) else Ident(name) val ourWildType = if (tparams.nonEmpty) AppliedTypeTree(Ident(name), tparams.map(_ => Bind(typeNames.WILDCARD, EmptyTree))) else Ident(name) val tparamUnderscores = tparams.zipWithIndex.map{ case (tdef, i) => TypeDef(makeDeferredSynthetic(unmakeParam(tdef.mods)), TypeName("x$" + (i+1)), tdef.tparams, tdef.rhs) } val ourExistentialType = ExistentialTypeTree(AppliedTypeTree(Ident(name), tparamUnderscores.map(tdef => Ident(tdef.name))), tparamUnderscores) val kaseClass = { val ClassDef(cmods, _, _, Template(cparents, cself, _)) = cdef // step 1: make it a case class val cmods1 = makeCase(cmods) // step 2: turn param accessors into case accessors val cbody2 = cbody.map { case ValDef(mods, name, tpt, rhs) if primaryParams.exists(_.name == name) => ValDef(makeCaseAccessor(mods), name, tpt, rhs) case stat => stat } // step 3: inject copy if not defined val cbody3 = { if (cbody2.collect{ case ddef @ DefDef(_, name, _, _, _, _) if name == TermName("copy") => ddef }.nonEmpty) cbody2 else { val copyTparams = tparams val primaryCopyParamss = primaryParams.map(p => ValDef(makeDefault(unmakeCaseAccessor(p.mods)), p.name, p.tpt, Ident(p.name))) val secondaryCopyParamss = secondaryParamss.map(_.map(p => ValDef(unmakeDefault(unmakeCaseAccessor(p.mods)), p.name, p.tpt, EmptyTree))) val copyParamss = primaryCopyParamss :: secondaryCopyParamss val copyArgss = copyParamss.map(_.map(p => Ident(p.name))) val copyBody = copyArgss.foldLeft(Select(New(Ident(cdef.name)), termNames.CONSTRUCTOR): Tree)((callee, args) => Apply(callee, args)) val copyMethod = DefDef(SyntheticMods, TermName("copy"), copyTparams, copyParamss, TypeTree(), copyBody) cbody2 :+ copyMethod } } // step 4: implement product val cparents4 = cparents :+ Select(Ident(TermName("scala")), TypeName("Product")) :+ Select(Ident(TermName("scala")), TypeName("Serializable")) val cbody4 = { val productPrefixMethod = DefDef(OverrideSyntheticMods, TermName("productPrefix"), Nil, Nil, TypeTree(), Literal(Constant(name.toString))) val productArityMethod = DefDef(SyntheticMods, TermName("productArity"), Nil, Nil, TypeTree(), Literal(Constant(primaryParams.length))) val productElementParam = ValDef(ParamMods, TermName("x$1"), Select(Ident(TermName("scala")), TypeName("Int")), EmptyTree) def productElementByIndex(i: Int) = CaseDef(Literal(Constant(i)), EmptyTree, Select(This(name), primaryParams(i).name)) val productElementFallback = CaseDef(Ident(termNames.WILDCARD), EmptyTree, Throw(Apply(Select(New(TypeTree(c.mirror.staticClass("java.lang.IndexOutOfBoundsException").toType)), termNames.CONSTRUCTOR), List(Apply(Select(Ident(productElementParam.name), TermName("toString")), List()))))) val productElementBody = Match(Ident(productElementParam.name), (0 until primaryParams.length map productElementByIndex toList) :+ productElementFallback) val productElementMethod = DefDef(SyntheticMods, TermName("productElement"), Nil, List(List(productElementParam )), TypeTree(), productElementBody) val scalaRunTime = Select(Select(Ident(TermName("scala")), TermName("runtime")), TermName("ScalaRunTime")) val productIteratorBody = Apply(TypeApply(Select(scalaRunTime, TermName("typedProductIterator")), List(Ident(TypeName("Any")))), List(This(name))) val productIteratorMethod = DefDef(OverrideSyntheticMods, TermName("productIterator"), Nil, Nil, TypeTree(), productIteratorBody) val canEqualParam = ValDef(ParamMods, TermName("x$1"), Select(Ident(TermName("scala")), TypeName("Any")), EmptyTree) val canEqualBody = TypeApply(Select(Ident(canEqualParam.name), TermName("isInstanceOf")), List(ourExistentialType)) val canEqualMethod = DefDef(SyntheticMods, TermName("canEqual"), Nil, List(List(canEqualParam)), TypeTree(), canEqualBody) cbody3 ++ List(productPrefixMethod, productArityMethod, productElementMethod, productIteratorMethod, canEqualMethod) } // step 5: inject hashcode val cbody5 = { val scalaRunTime = Select(Select(Ident(TermName("scala")), TermName("runtime")), TermName("ScalaRunTime")) val hashcodeMethod = DefDef(OverrideSyntheticMods, TermName("hashCode"), Nil, Nil, TypeTree(), Apply(Select(scalaRunTime, TermName("_hashCode")), List(This(name)))) cbody4 :+ hashcodeMethod } // step 6: inject meaningful toString if not defined val cbody6 = { if (cbody5.collect{ case ddef @ DefDef(_, name, _, _, _, _) if name == TermName("toString") => ddef }.nonEmpty) cbody5 else { val scalaRunTime = Select(Select(Ident(TermName("scala")), TermName("runtime")), TermName("ScalaRunTime")) val toStringBody = Apply(Select(scalaRunTime, TermName("_toString")), List(This(name))) val toStringMethod = DefDef(OverrideSyntheticMods, TermName("toString"), Nil, Nil, TypeTree(), toStringBody) cbody5 :+ toStringMethod } } // step 7: inject equals val cbody7 = { val equalsParam = ValDef(ParamMods, TermName("x$1"), Select(Ident(TermName("scala")), TypeName("Any")), EmptyTree) val equalsBody = { def thisEqThat = { val thatAnyRef = TypeApply(Select(Ident(equalsParam.name), TermName("asInstanceOf")), List(Ident(TypeName("Object")))) Apply(Select(This(name), TermName("eq")), List(thatAnyRef)) } def thatCanEqualThis = { val thatC = TypeApply(Select(Ident(equalsParam.name), TermName("asInstanceOf")), List(ourPolyType)) Apply(Select(thatC, TermName("canEqual")), List(This(name))) } def sameTypeCheck = { val ifSameType = CaseDef(Typed(Ident(termNames.WILDCARD), ourWildType), EmptyTree, Literal(Constant(true))) val otherwise = CaseDef(Ident(termNames.WILDCARD), EmptyTree, Literal(Constant(false))) Match(Ident(equalsParam.name), List(ifSameType, otherwise)) } def sameFieldsCheck = { val thatC = ValDef(SyntheticMods, TermName(name.toString + "$1"), ourPolyType, TypeApply(Select(Ident(equalsParam.name), TermName("asInstanceOf")), List(ourPolyType))) val sameFieldsChecks = primaryParams.map(p => Apply(Select(Select(This(name), p.name), TermName("==").encodedName), List(Select(Ident(thatC.name), p.name)))) val thatCanEqualThis = Apply(Select(Ident(thatC.name), TermName("canEqual")), List(This(name))) val sameFieldCheck = (sameFieldsChecks :+ thatCanEqualThis).reduceLeft((acc, check) => Apply(Select(acc, TermName("&&").encodedName), List(check))) Block(List(thatC), sameFieldCheck) } if (primaryParamss.isEmpty) { if (isFinal(cmods)) sameTypeCheck else Apply(Select(sameTypeCheck, TermName("&&").encodedName), List(thatCanEqualThis)) } else { val thisEqualsThat = Apply(Select(sameTypeCheck, TermName("&&").encodedName), List(sameFieldsCheck)) Apply(Select(thisEqThat, TermName("||").encodedName), List(thisEqualsThat)) } } val equalsMethod = DefDef(OverrideSyntheticMods, TermName("equals"), Nil, List(List(equalsParam)), TypeTree(), equalsBody) cbody6 :+ equalsMethod } ClassDef(cmods1, name, tparams, Template(cparents4, cself, cbody7)) } val kaseModule = { val mdef @ ModuleDef(mmods, mname, Template(mparents, mself, mbody)) = annottees.tail.headOption getOrElse { val shouldInheritFromFun = !isAbstract(cdef.mods) && tparams.isEmpty && primaryParamss.length == 1 val funClass = Select(Select(Ident(TermName("scala")), TermName("runtime")), TypeName("AbstractFunction" + primaryParams.length)) val funParent = AppliedTypeTree(funClass, primaryParams.map(_.tpt) :+ Ident(name)) val parents = if (shouldInheritFromFun) List(funParent) else List(Ident(AnyRefClass)) val emptyCtor = DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(typeNames.EMPTY), typeNames.EMPTY), termNames.CONSTRUCTOR), List())), Literal(Constant(())))) ModuleDef(SyntheticMods, name.toTermName, Template(parents, noSelfType, List(emptyCtor))) } // step 1: inject toString if not defined val mbody1 = { if (mbody.collect{ case ddef @ DefDef(_, name, _, _, _, _) if name == TermName("toString") => ddef }.nonEmpty) mbody else { val toStringBody = Literal(Constant(name.toString)) val toStringMethod = DefDef(FinalOverrideSyntheticMods, TermName("toString"), Nil, Nil, TypeTree(), toStringBody) mbody :+ toStringMethod } } // step 2: inject apply val mbody2 = { val applyTparams = tparams.map(p => TypeDef(unmakeVariant(p.mods), p.name, p.tparams, p.rhs)) val applyParamss = primaryParamss.map(_.map(p => ValDef(unmakeCaseAccessor(p.mods), p.name, p.tpt, p.rhs))) val applyArgss = applyParamss.map(_.map(p => Ident(p.name))) val applyBody = applyArgss.foldLeft(Select(New(ourPolyType), termNames.CONSTRUCTOR): Tree)((callee, args) => Apply(callee, args)) val applyMethod = DefDef(SyntheticCaseMods, TermName("apply"), applyTparams, applyParamss, TypeTree(), applyBody) mbody1 :+ applyMethod } // step 3: inject unapply val mbody3 = { val unapplyTparams = tparams.map(p => TypeDef(unmakeVariant(p.mods), p.name, p.tparams, p.rhs)) val unapplyParam = ValDef(ParamMods, TermName("x$0"), ourPolyType, EmptyTree) val unapplyName = primaryParams match { case _ :+ AppliedTypeTree(tpt: RefTree, _) if tpt.name == TypeName("<repeated>") => TermName("unapplySeq") case _ => TermName("unapply") } val unapplyBody = { val none = Select(Ident(TermName("scala")), TermName("None")) def some(xs: Tree*) = Apply(Select(Ident(TermName("scala")), TermName("Some")), xs.toList) val thisEqNull = Apply(Select(Ident(unapplyParam.name), TermName("==").encodedName), List(Literal(Constant(null)))) val failure = primaryParams match { case Nil => Literal(Constant(false)) case _ => none } val success = primaryParams match { case Nil => Literal(Constant(true)) case ps => val fs = ps.map(p => Select(Ident(unapplyParam.name), p.name)) val tuple = if (ps.length == 1) fs.head else Apply(Select(Ident(TermName("scala")), TermName("Tuple" + ps.length)), fs) some(tuple) } If(thisEqNull, failure, success) } val unapplyMethod = DefDef(SyntheticCaseMods, unapplyName, unapplyTparams, List(List(unapplyParam)), TypeTree(), unapplyBody) mbody2 :+ unapplyMethod } ModuleDef(mmods, mname, Template(mparents, mself, mbody3)) } List(kaseClass, kaseModule) } } class kaseObjectHelper[C <: Context](override val c: C) extends kaseHelper(c) { import c.universe._ def expand(annottees: List[c.Tree]): List[c.Tree] = { val mdef @ ModuleDef(mods, name, Template(parents, self, body)) :: Nil = annottees // step 1: make it a case object val mods1 = makeCase(mods) // step 2: implement product val parents2 = parents :+ Select(Ident(TermName("scala")), TypeName("Product")) :+ Select(Ident(TermName("scala")), TypeName("Serializable")) val body2 = { val productPrefixMethod = DefDef(OverrideSyntheticMods, TermName("productPrefix"), Nil, Nil, TypeTree(), Literal(Constant(name.toString))) val productArityMethod = DefDef(SyntheticMods, TermName("productArity"), Nil, Nil, TypeTree(), Literal(Constant(0))) val productElementParam = ValDef(ParamMods, TermName("x$1"), Select(Ident(TermName("scala")), TypeName("Int")), EmptyTree) val productElementBody = Match(Ident(productElementParam.name), List(CaseDef(Ident(termNames.WILDCARD), EmptyTree, Throw(Apply(Select(New(TypeTree(c.mirror.staticClass("java.lang.IndexOutOfBoundsException").toType)), termNames.CONSTRUCTOR), List(Apply(Select(Ident(productElementParam.name), TermName("toString")), List()))))))) val productElementMethod = DefDef(SyntheticMods, TermName("productElement"), Nil, List(List(productElementParam )), TypeTree(), productElementBody) val scalaRunTime = Select(Select(Ident(TermName("scala")), TermName("runtime")), TermName("ScalaRunTime")) val productIteratorBody = Apply(TypeApply(Select(scalaRunTime, TermName("typedProductIterator")), List(Ident(TypeName("Any")))), List(This(name.toTypeName))) val productIteratorMethod = DefDef(OverrideSyntheticMods, TermName("productIterator"), Nil, Nil, TypeTree(), productIteratorBody) val canEqualParam = ValDef(ParamMods, TermName("x$1"), Select(Ident(TermName("scala")), TypeName("Any")), EmptyTree) val canEqualBody = TypeApply(Select(Ident(canEqualParam.name), TermName("isInstanceOf")), List(SingletonTypeTree(Ident(name)))) val canEqualMethod = DefDef(SyntheticMods, TermName("canEqual"), Nil, List(List(canEqualParam)), TypeTree(), canEqualBody) body ++ List(productPrefixMethod, productArityMethod, productElementMethod, productIteratorMethod, canEqualMethod) } // step 3: inject hashcode val body3 = { val hashcodeMethod = DefDef(OverrideSyntheticMods, TermName("hashCode"), Nil, Nil, TypeTree(), Literal(Constant((name.decodedName.toString.hashCode)))) body2 :+ hashcodeMethod } // step 4: inject meaningful toString if not defined val body4 = { if (body3.collect{ case ddef @ DefDef(_, name, _, _, _, _) if name == TermName("toString") => ddef }.nonEmpty) body3 else { val toStringMethod = DefDef(OverrideSyntheticMods, TermName("toString"), Nil, Nil, TypeTree(), Literal(Constant(name.toString))) body3 :+ toStringMethod } } List(ModuleDef(mods1, name, Template(parents2, self, body4))) } } def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { import c.universe._ val helper = { annottees.head.tree match { case ClassDef(_, _, _, _) => new kaseClassHelper[c.type](c) case ModuleDef(_, _, _) => new kaseObjectHelper[c.type](c) case _ => c.abort(c.enclosingPosition, "`kase` is only applicable to classes and objects") } } c.Expr[Any](Block(helper.expand(annottees.map(_.tree).toList), Literal(Constant(())))) } } class kase extends StaticAnnotation { def macroTransform(annottees: Any*): Any = macro kaseMacro.impl } package pkg { class kase extends StaticAnnotation { def macroTransform(annottees: Any*): Any = macro kaseMacro.impl } }
lrytz/scala
test/macro-annot/run/kase/macro_kase_1.scala
Scala
apache-2.0
20,437
package optimizers import breeze.linalg.{DenseVector, Vector} import models.{Model, Regularizer, RealFunction, Loss} import org.apache.spark.rdd.RDD import vectors.LabelledPoint trait Optimizer[-LossType<:Loss[_,_], DataType, AlphasType, VType] extends Serializable { def optimize(model: Model[LossType], data: DataType, alpha: AlphasType, v: VType): (AlphasType,VType) } trait SingleCoordinateOptimizer[-LossType<:Loss[_,_]] extends Optimizer[LossType, LabelledPoint, Double, Vector[Double]] trait LocalOptimizer[-LossType<:Loss[_,_]] extends Optimizer[LossType, Array[LabelledPoint], DenseVector[Double], DenseVector[Double]] trait DistributedOptimizer[-LossType<:Loss[_,_]] extends Optimizer[LossType, RDD[LabelledPoint], RDD[Double], DenseVector[Double]]
sforte/cacao
src/main/scala/optimizers/Optimizer.scala
Scala
apache-2.0
771
import play.api._ import play.api.mvc.Results.InternalServerError import play.api.Logger import play.api.mvc.RequestHeader import play.api.mvc.SimpleResult import play.api.mvc.Result import play.api.mvc.Results._ import play.api.mvc.Handler object Global extends GlobalSettings { override def onStart(app: Application) { Logger.info("Application has started") } override def onStop(app: Application) { Logger.info("Application shutdown...") } override def onRouteRequest(request: RequestHeader): Option[Handler] = { if (!request.toString.contains("assets") && !request.toString.contains("javascriptRoutes")) { Logger.info("Request:" + request.toString) } super.onRouteRequest(request) } }
knoldus/Play-Starter-Template
app/Global.scala
Scala
apache-2.0
733
object HelloWorld { def main(args: Array[String]) { assert(org.eclipse.core.runtime.adaptor.EclipseStarter.PROP_BUNDLES != null) println("Hello, world!") } }
digimead/sbt-osgi-manager
src/sbt-test/osgi-manager/workWithPluginAOP/src/main/scala/Hello.scala
Scala
apache-2.0
172
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.predictionio.workflow import akka.actor.Actor import akka.event.Logging import org.apache.predictionio.data.storage.EngineInstance import org.json4s.JValue class PluginsActor(engineVariant: String) extends Actor { implicit val system = context.system val log = Logging(system, this) val pluginContext = EngineServerPluginContext(log, engineVariant) def receive: PartialFunction[Any, Unit] = { case (ei: EngineInstance, q: JValue, p: JValue) => pluginContext.outputSniffers.values.foreach(_.process(ei, q, p, pluginContext)) case h: PluginsActor.HandleREST => try { sender() ! pluginContext.outputSniffers(h.pluginName).handleREST(h.pluginArgs) } catch { case e: Exception => sender() ! s"""{"message":"${e.getMessage}"}""" } case _ => log.error("Unknown message sent to the Engine Server output sniffer plugin host.") } } object PluginsActor { case class HandleREST(pluginName: String, pluginArgs: Seq[String]) }
himanshudhami/PredictionIO
core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginsActor.scala
Scala
apache-2.0
1,820
package de.dominicscheurer.fsautils import Types._ import Conversions._ object RegularExpressions { sealed abstract class RE extends FSA_DSL { type MutableMap[A,B] = scala.collection.mutable.Map[A,B] type Map[A,B] = scala.collection.immutable.Map[A,B] def MutableMap[A,B]() : MutableMap[A,B] = collection.mutable.Map[A,B]() def *(): RE = Star(this) def +(rhs: RE): RE = Or(this, rhs) def &(rhs: RE): RE = Concat(this, rhs) def alphabet: Set[Letter] def toNFA: NFA = toNFAInt(alphabet, MutableMap()) def toNFAInt(alph: Set[Letter], cache: MutableMap[RE, NFA]): NFA override def hashCode: Int = toString hashCode override def equals(other: Any): Boolean = other.isInstanceOf[RE] && (this.hashCode == other.hashCode) /** * cleanString does some post processing on the * toString method in order to make the output better * readable. However, you naturally achieve better * correctness guarantees without this method (since * this is just string manipulation with regular expressions). */ def cleanString = recClean(toString) private def recClean(s: String): String = { val cleanRes = clean(s) if (s equals cleanRes) { s } else { recClean(cleanRes) } } private def clean(s: String): String = s .replace("{} + ", "") .replace("({})*", "\u025B") // epsilon .replace("{}", "\u00D8") // emptyset .replace("**", "*") .replaceAll("""'([a-z])""", "$1") .replaceAll("""\(([a-z])\)""", "$1") .replaceAll("""\(\(([^\(\)]+)\)\)\*""", "($1)*") .replaceAll("""\(\u025B \+ ([^\(\)]+)\)\*""", "($1)*") .replaceAll(""" [&\+] \u00D8""", "") .replaceAll("""\u00D8 [&\+] """, "") .replaceAll("""\(([a-z\u025B])\)([\*]?)""", "$1$2") .replaceAll("""\(\(([^\(\)]+)\)\)""", "($1)") .replaceAll("""\(([a-z])\*\)""", "$1*") def clean: RE = this match { case Star(inner) => inner match { // ((...)*)* => (...)* case Star(inner2) => Star(inner2 clean) // ({}* + XXX)* => (XXX)* case Or(Star(Empty()), rhs) => rhs clean // (XXX + {}*)* => (XXX)* case Or(lhs, Star(Empty())) => lhs clean case _ => Star(inner clean) } case Or(lhs, rhs) => lhs match { // {} + (...) => (...) case Empty() => rhs clean case Star(Empty()) => rhs match { // {}* + (...)* => (...)* case Star(rhsInner) => Star(rhsInner clean) case _ => Or(lhs clean, rhs clean) } case _ => rhs match { // (...) + {} => (...) case Empty() => lhs clean case Star(Empty()) => lhs match { // (...)* + {}* => (...)* case Star(lhsInner) => Star(lhsInner clean) case _ => Or(lhs clean, rhs clean) } case _ => if (lhs equals rhs) // XXX + XXX => XXX lhs else Or(lhs clean, rhs clean) } } case Concat(lhs, rhs) => lhs match { // {} & (...) => (...) case Empty() => rhs clean case Or(Star(Empty()), lhsInner) => rhs match { case Star(rhsInner) => { val lhsInnerClean = lhsInner clean val rhsInnerClean = rhsInner clean if (lhsInnerClean equals rhsInnerClean) // (eps + XXX) & (XXX)* => (XXX)* Star(rhsInnerClean) else Concat(lhs clean, rhs clean) } case _ => Concat(lhs clean, rhs clean) } case Or(lhsInner, Star(Empty())) => rhs match { case Star(rhsInner) => { val lhsInnerClean = lhsInner clean val rhsInnerClean = rhsInner clean if (lhsInnerClean equals rhsInnerClean) // (XXX + eps) & (XXX)* => (XXX)* Star(rhsInnerClean) else Concat(lhs clean, rhs clean) } case _ => Concat(lhs clean, rhs clean) } case _ => rhs match { // (...) + {} => (...) case Empty() => lhs clean case Or(rhsInner, Star(Empty())) => lhs match { case Star(lhsInner) => { val lhsInnerClean = lhsInner clean val rhsInnerClean = rhsInner clean if (lhsInnerClean equals rhsInnerClean) // (XXX)* & (XXX + eps) => (XXX)* Star(lhsInnerClean) else Concat(lhs clean, rhs clean) } case _ => Concat(lhs clean, rhs clean) } case Or(Star(Empty()), rhsInner) => lhs match { case Star(lhsInner) => { val lhsInnerClean = lhsInner clean val rhsInnerClean = rhsInner clean if (lhsInnerClean equals rhsInnerClean) // (XXX)* & (eps + XXX) => (XXX)* Star(lhsInnerClean) else Concat(lhs clean, rhs clean) } case _ => Concat(lhs clean, rhs clean) } case _ => Concat(lhs clean, rhs clean) } } case _ => this } } case class L(l: Letter) extends RE { override def toString = l toString override def alphabet = Set(l) override def toNFAInt(alph: Set[Letter], cache: MutableMap[RE, NFA]) = { val genNFA = nfa('Z, 'S, 'q0, 'd, 'A) where 'Z ==> alph and 'S ==> Set(0, 1) and 'q0 ==> 0 and 'A ==> Set(1) and 'd ==> Delta( (0, l) -> Set(1)) || cache += (this -> genNFA) genNFA } } case class Empty() extends RE { override def toString = "{}" override def alphabet = Set() override def toNFAInt(alph: Set[Letter], cache: MutableMap[RE, NFA]) = { val emptyAcc: Set[Int] = Set() val genNFA = nfa('Z, 'S, 'q0, 'd, 'A) where 'Z ==> alph and 'S ==> Set(0) and 'q0 ==> 0 and 'A ==> emptyAcc and 'd ==> DeltaRel(Map()) || cache += (this -> genNFA) genNFA } } case class Star(re: RE) extends RE { override def toString = "(" + re.toString + ")*" override def alphabet = re.alphabet override def toNFAInt(alph: Set[Letter], cache: MutableMap[RE, NFA]) = if (re equals Empty()) nfa('Z, 'S, 'q0, 'd, 'A) where 'Z ==> alph and 'S ==> Set(0) and 'q0 ==> 0 and 'A ==> Set(0) and 'd ==> DeltaRel(Map()) || else { cache get this match { case None => { val genNFA = (re toNFAInt (alph, cache))* cache += (this -> genNFA) genNFA } case Some(nfa) => nfa } } } case class Or(lhs: RE, rhs: RE) extends RE { override def toString = "(" + lhs.toString + " + " + rhs.toString + ")" override def alphabet = lhs.alphabet ++ rhs.alphabet override def toNFAInt(alph: Set[Letter], cache: MutableMap[RE, NFA]) = cache get this match { case None => { val genNFA = ((lhs toNFAInt (alph, cache)) | (rhs toNFAInt (alph, cache))): NFA cache += (this -> genNFA) genNFA } case Some(nfa) => nfa } } case class Concat(lhs: RE, rhs: RE) extends RE { override def toString = "(" + lhs.toString + " & " + rhs.toString + ")" override def alphabet = lhs.alphabet ++ rhs.alphabet override def toNFAInt(alph: Set[Letter], cache: MutableMap[RE, NFA]) = cache get this match { case None => { val genNFA = ((lhs toNFAInt (alph, cache)) ++ (rhs toNFAInt (alph, cache))): NFA cache += (this -> genNFA) genNFA } case Some(nfa) => nfa } } }
rindPHI/FSAUtils
src/de/dominicscheurer/fsautils/RegularExpressions.scala
Scala
mit
9,930
import java.io.{File, FilenameFilter} import flaky._ import flaky.history.{Git, History, HistoryReport} import org.apache.commons.vfs2.VFS object RenderHtml extends App with Unzip { println("Creating report") private val reportsDir = new File("target/flakyreports") private val dirWithReports = new File("src/test/resources/history8") val log = new DummySbtLogger() private val zipFile: File = dirWithReports .listFiles(new FilenameFilter { override def accept(dir: File, name: String): Boolean = name.endsWith("zip") }).minBy(_.getName) private val projectZip = new File("src/test/resources/gitrepo.zip") private val unzipDir = new File("target/unzipped/") println(s"Unzipping ${zipFile.getPath}") unzip(projectZip, unzipDir) private val projectDir = new File(unzipDir,"gitrepo") private val report = Flaky.createReportFromHistory(VFS.getManager.resolveFile(zipFile.toURI.toString.replace("file:/", "zip:/"))) private val historyReport: HistoryReport = new History("My App", dirWithReports, new File(""), projectDir).createHistoryReport() FlakyCommand.createHtmlReports("My App", report, Some(historyReport), reportsDir, Git(projectDir), log) println(s"Reports created in ${reportsDir.getAbsolutePath}") }
otrebski/sbt-flaky
src/test/scala/RenderHtml.scala
Scala
apache-2.0
1,256
object Bug { def foo(): Unit = { val v = { lazy val s = 0 s } } }
yusuke2255/dotty
tests/untried/pos/t5796.scala
Scala
bsd-3-clause
90
//: ---------------------------------------------------------------------------- //: Copyright (C) 2014 Verizon. All Rights Reserved. //: //: Licensed under the Apache License, Version 2.0 (the "License"); //: you may not use this file except in compliance with the License. //: You may obtain a copy of the License at //: //: http://www.apache.org/licenses/LICENSE-2.0 //: //: Unless required by applicable law or agreed to in writing, software //: distributed under the License is distributed on an "AS IS" BASIS, //: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //: See the License for the specific language governing permissions and //: limitations under the License. //: //: ---------------------------------------------------------------------------- package remotely package transport.netty import java.net.InetSocketAddress import java.io.File import io.netty.buffer.{ByteBuf,Unpooled} import io.netty.channel._ import io.netty.channel.socket.SocketChannel import io.netty.channel.socket.nio.NioServerSocketChannel import io.netty.handler.ssl.util.InsecureTrustManagerFactory import org.apache.commons.pool2.ObjectPool import io.netty.handler.codec.ByteToMessageDecoder import javax.net.ssl.{TrustManagerFactory,CertPathTrustManagerParameters} import java.security.KeyStore import java.io.FileInputStream import scalaz.{-\\/,\\/,\\/-,Monoid} import scalaz.concurrent.Task import scalaz.stream.{Process,async} import scodec.Err import scodec.bits.BitVector /////////////////////////////////////////////////////////////////////////// // A Netty based transport for remotely. // // Netty connections consist of two pipelines on each side of a // network connection, an outbound pipeline and an inbound pipeline // // Client Outbound Pipeline // // +---------+ // [ Network ] ← | Enframe | ← Client request // +---------+ // // Server Inbound Pipeline // // +---------+ +-----------------------+ // [ Network ] → | Deframe | → | ServerDeframedHandler | // +---------+ +-----------------------+ // // Deframe - decodes the framing in order to find message boundaries // // ServerDeframedHandler - accepts full messages from Deframe, for // each message, opens a queue/processs pair, calls the handler which // returns a result Process which is copied back out to the network // // Server Outbound Pipeline // // +---------+ +-----------------------+ // [ Network ] ← | Enframe | ← | ServerDeframedHandler | // +---------+ +-----------------------+ // // Enfrome - prepends each ByteVector emitted from the Process with a // int indicating how many bytes are in this ByteVector, when the // Process halts, a zero is written indicating the end of the stream // // +-----------------------+ // [ Network ] ← | ServerDeframedHandler | // +-----------------------+ // // // Client Intbound Pipeline // // +---------+ +-----------------------+ // [ Network ] → | Deframe | → | ClientDeframedHandler | // +---------+ +-----------------------+ // // Deframe - The same as in the Server pipeline // // ClientDeframedHandler - This is added to the pipeline when a // connection is borrowed from the connection pool. It holds onto a // queue which it feeds with frames passed up from Deframe. This queue // feeds the Process which represents the output of a remote call. // /** * set of messages passed in and out of the FrameEncoder/FrameDecoder * probably unnecessary, I'm probably just trying to sweep an * isInstanceOf test under the compiler */ sealed trait Framed case class Bits(bv: BitVector) extends Framed case object EOS extends Framed /** * handler which is at the lowest level of the stack, it decodes the * frames as described (where STU? where are they described?) it * emits Deframed things to the next level up which can then treat * the streams between each EOS we emit as a separate request */ class Deframe extends ByteToMessageDecoder { // stew loves mutable state // this will be None in between frames. // this will be Some(x) when we have seen all but x bytes of the // current frame. var remaining: Option[Int] = None override protected def decode(ctx: ChannelHandlerContext, // this is our network connection in: ByteBuf, // this is our input out: java.util.List[Object]): Unit = { remaining match { case None => // we are expecting a frame header of a single byte which is // the number of bytes in the upcoming frame if (in.readableBytes() >= 4) { val rem = in.readInt() if(rem == 0) { val _ = out.add(EOS) } else { remaining = Some(rem) } } case Some(rem) => // we are waiting for at least rem more bytes, as that is what // is outstanding in the current frame if(in.readableBytes() >= rem) { val bytes = new Array[Byte](rem) in.readBytes(bytes) remaining = None val bits = BitVector.view(bytes) val _ = out.add(Bits(bits)) } } } } class ClientDeframedHandler(queue: async.mutable.Queue[BitVector]) extends SimpleChannelInboundHandler[Framed] { // there has been an error private def fail(message: String, ctx: ChannelHandlerContext): Unit = { queue.fail(new Throwable(message)).runAsync(Function.const(())) val _ = ctx.channel.close() // should this be disconnect? is there a difference } // we've seen the end of the input, close the queue writing to the input stream private def close(): Unit = { queue.close.runAsync(Function.const(())) } override def exceptionCaught(ctx: ChannelHandlerContext, ee: Throwable): Unit = { ee.printStackTrace() fail(ee.getMessage, ctx) } override def channelRead0(ctx: ChannelHandlerContext, f: Framed): Unit = f match { case Bits(bv) => queue.enqueueOne(bv).runAsync(Function.const(())) case EOS => close() } } /** * output handler which gets a stream of BitVectors and enframes them */ @ChannelHandler.Sharable object Enframe extends ChannelOutboundHandlerAdapter { override def write(ctx: ChannelHandlerContext, obj: Object, cp: ChannelPromise): Unit = { obj match { case Bits(bv) => val byv = bv.toByteVector val _ = ctx.writeAndFlush(Unpooled.wrappedBuffer((codecs.int32.encodeValid(byv.size) ++ bv).toByteBuffer), cp) case EOS => val _ = ctx.writeAndFlush(Unpooled.wrappedBuffer(codecs.int32.encodeValid(0).toByteBuffer), cp) case x => throw new IllegalArgumentException("was expecting Framed, got: " + x) } } }
jedesah/remotely
core/src/main/scala/transport/netty/Transport.scala
Scala
apache-2.0
6,838
/** * Copyright 2015 Thomson Reuters * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package cmwell.it import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers} import play.api.libs.json.Json import cmwell.util.http.{SimpleResponse, StringPath} import com.typesafe.scalalogging.LazyLogging import scala.concurrent.{Await, Future} import scala.concurrent.duration.FiniteDuration import scala.concurrent.duration._ import scala.io.Source /** * Created by yaakov on 7/10/16. */ class SparqlTests extends FunSpec with Matchers with Helpers with BeforeAndAfterAll with LazyLogging { val _sparql = cmw / "_sparql" // todo uncomment once we have data file, also un-ignore ignored tests below override def beforeAll() = { // val fileNTriple = Source.fromURL(this.getClass.getResource("/data-for-sparql.nt")).mkString // val resFut = Http.post(_in, fileNTriple, textPlain, List("format" -> "ntriples"), tokenHeader) // Await.result(resFut, requestTimeout) // // waitForData(cmw / "sparql.org" / "data.thomsonreuters.com", 316) } // todo move to Helpers def waitAndExtractBody(req: Future[SimpleResponse[Array[Byte]]]) = new String(Await.result(req, requestTimeout).payload, "UTF-8") // todo generalize and move to Helpers def waitForData(path: StringPath, expectedDataLength: Int, maxRetries: Int = 32, sleepTime: FiniteDuration = 1.second) = { import scala.concurrent.ExecutionContext.Implicits.global var (length, retry) = (0, 0) do { val res = new String(Await.result(Http.get(path, Seq("op"->"search", "length"->"1", "format"->"json")), requestTimeout).payload, "UTF-8") length = (Json.parse(res) \\ "results" \\ "total").asOpt[Int].getOrElse(0) retry+=1 sleepTime.fromNow.block } while(length<expectedDataLength && retry<maxRetries) } protected def postSparqlAndWaitForResults(query: String, queryParameters: Seq[(String,String)] = Seq()): String = waitAndExtractBody(postSparqlRequest(query, queryParameters)).trim protected def postSparqlRequest(query: String, queryParameters: Seq[(String,String)] = Seq()) = Http.post(_sparql, query, textPlain, queryParameters) protected def removeSysFieldsAndSort(ntriples: String) = ntriples.lines.toSeq.filterNot(_.contains("meta/sys#")).sorted.mkString("\\n") describe("SPARQL Tests") { ignore("should run a CONSTRUCT sparql on whole graph and return one record") { val query = """""".stripMargin val expectedResults = "" postSparqlAndWaitForResults(query) should be(expectedResults) } ignore("should run a sparql query for a given subject without any other information (i.e. get infoton)") { val query = "" val expected = "" val actual = removeSysFieldsAndSort(postSparqlAndWaitForResults(query)) actual should be(expected) } it("should explain to user that one does not simply stream all cm-well content using SPARQL") { postSparqlAndWaitForResults("SELECT * WHERE { ?s ?p ?o }") should be("[Error] Each triple-matching must have binding of a subject, a predicate or an object. If you'd like to download entire CM-Well's content, please use the Stream API\\n\\n-------------\\n| s | p | o |\\n=============\\n-------------") } it("should fail for a non-existing namespace") { val notSuchPred = "<http://no.such.predicate/in/the/world>" val ns = "http://no.such.predicate/in/the/" postSparqlAndWaitForResults(s"CONSTRUCT { ?s $notSuchPred ?o . } WHERE { ?s $notSuchPred ?o }") should be(s"[Error] Namespace $ns does not exist") } ignore("should warn the user when some searches were exahusted") { val query = "" postSparqlAndWaitForResults(query, Seq("intermediate-limit"->"1")) should startWith("[Warning] a query search was exhausted; results below may be partial! Please narrow your query to have complete results.") } it("should tell the user what is wrong with syntax of the query") { val query = "this string is not a valid sparql" postSparqlAndWaitForResults(query) should startWith("{\\"success\\":false,\\"error\\":\\"Lexical error at line 1") } // TODO: PLEASE READ THIS CAREFULLY WHEN ADDING TESTS HERE. May the force be with you. // // SPARQL TESTS CONVENTIONS: // ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // Please only add data in NTriples to the resource file `data-for-sparql.nt` // Each test should use a different "subfolder" under /sparql.org (i.e. do not add more ntriples which their subject starts with /sparql.org/data.thomsonreuters.com) (NOT MANDATORY) // Each time you add data, kindly add an invocation to `waitForData` with the path and the amount of Infotons - at the end of `beforeAll` method (~ at line 30) (MANDATORY) } }
nruppin/CM-Well
server/cmwell-it/src/it/scala/cmwell/it/SparqlTests.scala
Scala
apache-2.0
5,332
package techex.domain import org.joda.time.Instant import techex.data.{EnterObservation, ExitObservation} import scalaz.Scalaz._ import scalaz.{Tree, _} object areas { val somewhere = Area("somewhere") val foyer = Area("foyer") val toiletAtSamf = Area("toilet @ Samfundet") val toiletAtSeminar = Area("toilet @ Seminar") val stage = Area("stage") val auditorium = Area("auditorium") val bar = Area("bar") val technoportStand = Area("Technoport stand") val seminarArea = Area("Technoport seminarområde") val samfundet = Area("Samfundet") val samfStorsal = Area("Storsalen") val samfKlubben = Area("Klubben") val meetingRoom = Area("Meetingroom") val kjelleren = Area("Kjelleren") val technoport2015 = Area("Technoport 2015") val auditoriumExit = Area("Auditorium exit") val standsTechEx = Area("Stands @ Technoport") val coffeeStand = Area("Coffee stand") val standIntention = Area("Intention") val standContext = Area("Context") val standUserInsight = Area("UserInsight") val standProduction = Area("Production") val standUse = Area("Use") val standEntrepenerurShip = Area("Entrepeneurship") val standInfo = Area("Infodesk") val standKantega = Area("Kantega stand") val storhubben = Area("Storhubben") val meeting = Area("MR") val mrtTuring = Area("Turing") val mrtTesla = Area("Tesla") val mrtEngelbart = Area("Engelbart") val mrtAda = Area("Ada") val mrtHopper = Area("Hopper") val team = Area("Team") val mrtCurie = Area("Curie") val desk1 = Area("desk1") val desk2 = Area("desk2") val desk3 = Area("desk3") val coffeeMachines = Area("coffeeMachines") val kantegaCoffeeUp = Area("kantegaCoffeeUpstairs") val kantegaCoffeeDn = Area("kantegaCoffeeDownstairs") val kantegaFelles = Area("felles") val kantegaKantine = Area("kantegaKantine") val kantegaOffice = Area("KantegaOffice") val meetingPoint = Area("Meetingpoint") def beaconPlacementFor(r: Area, minor: Int, prox: Proximity): (BeaconId, (Proximity, Area)) = BeaconId(minor) ->(prox, r) def beaconsAt(r: Area) = beaconPlacement.toList.filter(_._2._2 === r).map(_._1) val beaconPlacement: Map[BeaconId, (Proximity, Area)] = Map( beaconPlacementFor(desk1, 1, Near), beaconPlacementFor(desk2, 2, Near), beaconPlacementFor(desk3, 3, Near), beaconPlacementFor(kantegaCoffeeDn, 4, Near), beaconPlacementFor(kantegaCoffeeUp, 5, Near), beaconPlacementFor(mrtTuring, 6, Near), beaconPlacementFor(mrtTesla, 7, Near), beaconPlacementFor(mrtEngelbart, 8, Near), beaconPlacementFor(mrtAda, 9, Near), beaconPlacementFor(mrtHopper, 10, Near), beaconPlacementFor(mrtCurie, 11, Near), beaconPlacementFor(kantegaKantine, 12, Far), beaconPlacementFor(kantegaKantine, 13, Far), beaconPlacementFor(kantegaKantine, 14, Far), beaconPlacementFor(kantegaKantine, 15, Far), beaconPlacementFor(auditorium, 101, Far), beaconPlacementFor(auditorium, 102, Far), beaconPlacementFor(auditorium, 103, Far), beaconPlacementFor(auditorium, 104, Far), beaconPlacementFor(stage, 105, Near), beaconPlacementFor(coffeeStand, 106, Near), beaconPlacementFor(coffeeStand, 107, Near), beaconPlacementFor(coffeeStand, 108, Near), beaconPlacementFor(standIntention, 109, Near), beaconPlacementFor(standContext, 110, Near), beaconPlacementFor(standUserInsight, 111, Near), beaconPlacementFor(standProduction, 112, Near), beaconPlacementFor(standUse, 113, Near), beaconPlacementFor(standEntrepenerurShip, 114, Near), beaconPlacementFor(standKantega, 115, Near), beaconPlacementFor(samfStorsal, 116, Far), beaconPlacementFor(samfStorsal, 117, Far), beaconPlacementFor(samfStorsal, 118, Far), beaconPlacementFor(samfStorsal, 119, Far), beaconPlacementFor(samfKlubben, 120, Far), beaconPlacementFor(samfKlubben, 121, Far), beaconPlacementFor(meetingRoom, 122, Far), beaconPlacementFor(standInfo, 123, Far) ) val technoportLocationTree: Tree[Area] = technoport2015.node( samfundet.node( samfStorsal.leaf, samfKlubben.leaf), seminarArea.node( auditorium.leaf, stage.leaf), standsTechEx.node( standKantega.leaf, standContext.leaf, standEntrepenerurShip.leaf, standIntention.leaf, standProduction.leaf, standUse.leaf, standInfo.leaf, coffeeStand.leaf), meeting.node( meetingRoom.leaf )) val kantegaLocationTree: Tree[Area] = kantegaOffice.node( meeting.node( mrtTuring.leaf, mrtTesla.leaf, mrtAda.leaf, mrtHopper.leaf, mrtCurie.leaf), team.node( desk1.leaf, desk2.leaf, desk3.leaf), coffeeMachines.node( kantegaCoffeeUp.leaf, kantegaCoffeeDn.leaf), kantegaFelles.node( kantegaKantine.leaf) ) val locationHierarcy: Tree[Area] = somewhere.node(technoportLocationTree, kantegaLocationTree) def contains(parent: Area, other: Area): Boolean = { if (parent === other) true else areas .locationHierarcy .loc .find(loc => loc.getLabel === parent) .get .find(loc => loc.getLabel === other) .isDefined } def withParentAreas(area: Area): List[Area] = { def getParentMaybe(child: Option[Area]): List[Area] = { child match { case None => nil[Area] case Some(a) => a :: getParentMaybe(getParentArea(a)) } } getParentMaybe(Some(area)) } def getParentArea(area: Area): Option[Area] = { areas .locationHierarcy .loc .find(loc => loc.getLabel === area) .get.parent.map(_.getLabel) } } case class LocationId(value: String) case class Area(name: String) { def contains(other: Area) = areas.contains(this, other) def withParents: List[Area] = areas.withParentAreas(this) } object Area { implicit val areaEqual: Equal[Area] = Equal[String].contramap(_.name) } case class BeaconId(minor: Int) sealed trait Proximity { def isSameOrCloserThan(other: Proximity) = (this, other) match { case (Immediate, _) => true case (Near, Far | Near) => true case (Far, Far) => true case _ => false } def asString = this match { case Immediate => "immediate" case Near => "near" case Far => "far" } } object Proximity { def apply(value: String) = value.toLowerCase match { case "immediate" | "1" => Immediate case "near" | "2" => Near case _ => Far } def unapply(p: Proximity) = Option(p.asString) } case object Near extends Proximity case object Far extends Proximity case object Immediate extends Proximity sealed trait Direction { def asString = this match { case Exit => "exit" case _ => "enter" } } object Direction { def apply(value: String) = value.toLowerCase match { case "exit" => Exit case _ => Enter } def unapply(d: Direction) = Option(d.asString) } case object Enter extends Direction case object Exit extends Direction case class ObservationData(major: Option[Int], minor: Option[Int], proximity: Option[Proximity], activity: String) { def toObservation(playerId: PlayerId, instant: Instant): EnterObservation \\/ ExitObservation = Direction(activity) match { case Enter => -\\/(EnterObservation(BeaconId(minor.get), playerId, instant, proximity.get)) case Exit => \\/-(ExitObservation(playerId, instant)) } } case class Timed[A](timestamp: Instant, value: A) case class LocationUpdate(playerId: PlayerId, area: Area, instant: Instant) case class UpdateMeta(playerId: PlayerId, instant: Instant)
kantega/tech-ex-2015
backend/src/main/scala/techex/domain/areas.scala
Scala
mit
8,111
/** * Copyright (C) 2012-2013 Kaj Magnus Lindberg (born 1979) * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.debiki.core import com.debiki.core.{PostActionPayload => PAP} import java.{util => ju} import org.scalatest.FreeSpec import org.scalatest.MustMatchers import RawPostAction.copyCreatePost import Prelude._ /** Tests deletion of comments and comment trees. * * Constructs these comments: * * gp -> p -> c * gp -> d * gp -> e * * Deletes the whole `p` tree, and the single comment `d`. */ class PagePartsDeletionTest extends FreeSpec with MustMatchers { var postId = 1001 def nextId() = { postId += 1 ; postId } private def time(when: Int) = new ju.Date(when) val gp = RawPostAction.forNewPost(1001, creationDati = time(100), userIdData = SystemUser.UserIdData, parentPostId = None, text = "gp-text", approval = Some(Approval.AuthoritativeUser)) val p = copyCreatePost(gp, id = nextId(), parentPostId = Some(gp.id), text = "p-text") val c = copyCreatePost(gp, id = nextId(), parentPostId = Some(p.id), text = "c-text") val d = copyCreatePost(gp, id = nextId(), parentPostId = Some(gp.id), text = "d-text") val e = copyCreatePost(gp, id = nextId(), parentPostId = Some(gp.id), text = "e-text") val delete_p_tree = RawPostAction( nextId(), creationDati = time(101), postId = p.id, userIdData = SystemUser.UserIdData, payload = PAP.DeleteTree) val delete_d = delete_p_tree.copy(id = nextId(), postId = d.id, payload = PAP.DeletePost(clearFlags = false)) val pageNoDeletes = PageParts("pnd", rawActions = gp::p::c::d::e::Nil) val pageWithDeletes = PageParts("pwd", rawActions = delete_p_tree::delete_d::pageNoDeletes.rawActions) "PageParts' comments can be deleted:" - { "when nothing has been deleted, nothing is deleted" in { for (postId <- List(gp.id, p.id, c.id, d.id, e.id)) pageNoDeletes.getPost_!(postId).isDeletedSomehow must be === false } "things that have not been deleted are not deleted" in { for (postId <- List(gp.id, c.id, e.id)) pageWithDeletes.getPost_!(postId).isDeletedSomehow must be === false } "a tree can be deleted" in { val page = pageWithDeletes page.getPost_!(p.id).isDeletedSomehow must be === true page.getPost_!(p.id).isTreeDeleted must be === true page.getPost_!(p.id).isPostDeleted must be === false } "a single comment can be deleted" in { val page = pageWithDeletes page.getPost_!(d.id).isDeletedSomehow must be === true page.getPost_!(d.id).isTreeDeleted must be === false page.getPost_!(d.id).isPostDeleted must be === true } } } // vim: fdm=marker et ts=2 sw=2 fo=tcqwn list
debiki/debiki-server-old
modules/debiki-core/src/test/scala/com/debiki/core/PagePartsDeletionTest.scala
Scala
agpl-3.0
3,366
package com.twitter.finagle.factory import com.twitter.finagle._ import com.twitter.finagle.client.LatencyCompensation import com.twitter.util.{Duration, Future, Timer} object TimeoutFactory { /** * A class eligible for configuring a [[com.twitter.finagle.Stackable]] * [[com.twitter.finagle.factory.TimeoutFactory]]. */ case class Param(timeout: Duration) { def mk(): (Param, Stack.Param[Param]) = (this, Param.param) } object Param { implicit val param = Stack.Param(Param(Duration.Top)) } /** * Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.factory.TimeoutFactory]]. * * @param role The stack role used to identify the TimeoutFactory when inserted * into a stack. */ def module[Req, Rep](role: Stack.Role): Stackable[ServiceFactory[Req, Rep]] = { val _role = role new Stack.Module4[ Param, param.Timer, param.Label, LatencyCompensation.Compensation, ServiceFactory[Req, Rep] ] { val role: Stack.Role = _role val description: String = "Timeout service acquisition after a given period" def make( _timeout: Param, _timer: param.Timer, _label: param.Label, compensation: LatencyCompensation.Compensation, next: ServiceFactory[Req, Rep] ): TimeoutFactory[Req, Rep] = { val Param(timeout) = _timeout val param.Label(label) = _label val param.Timer(timer) = _timer val howLong = compensation.howlong val totalTimeout = howLong + timeout val exc = new ServiceTimeoutException(totalTimeout) exc.serviceName = label new TimeoutFactory(next, totalTimeout, exc, timer) } } } } /** * A factory wrapper that times out the service acquisition after the * given time. * * @see The [[https://twitter.github.io/finagle/guide/Servers.html#request-timeout user guide]] * for more details. */ class TimeoutFactory[Req, Rep]( self: ServiceFactory[Req, Rep], timeout: Duration, exception: ServiceTimeoutException, timer: Timer) extends ServiceFactoryProxy[Req, Rep](self) { private[this] val failure = Future.exception(Failure.adapt(exception, FailureFlags.Retryable)) override def apply(conn: ClientConnection): Future[Service[Req, Rep]] = { val res = super.apply(conn) res.within(timer, timeout).rescue { case exc: java.util.concurrent.TimeoutException => res.raise(exc) res.onSuccess { _.close() } failure } } }
twitter/finagle
finagle-core/src/main/scala/com/twitter/finagle/factory/TimeoutFactory.scala
Scala
apache-2.0
2,530
package org.sofi.deadman.test.view import org.sofi.deadman.component.view._ import org.sofi.deadman.messages.command._ import org.sofi.deadman.messages.query._ import org.sofi.deadman.test.TestSystem import scala.concurrent.duration._ final class AggregateViewTest extends TestSystem { // View private val viewActor = system.actorOf(AggregateView.props(aggregate, eventLog)) "An aggregate view" must { "Successfully receive a Task event" in { taskActor ! ScheduleTask("test", aggregate, "0", 1.second.toMillis) expectMsg(CommandResponse(ResponseType.SUCCESS)) viewActor ! GetTasks(QueryType.AGGREGATE, aggregate = Some(aggregate)) expectMsgPF() { case result: Tasks ⇒ result.tasks.size must be(1) result.tasks.foreach(_.aggregate must be(aggregate)) } } "Successfully clear state on a TaskExpiration event" in { // Wait for task to expire Thread.sleep(2.seconds.toMillis) // Query view state viewActor ! GetTasks(QueryType.AGGREGATE, aggregate = Some(aggregate)) expectMsgPF() { case result: Tasks ⇒ result.tasks.isEmpty must be(true) } } } }
SocialFinance/deadman-switch
core/src/test/scala/org/sofi/deadman/test/view/AggregateViewTest.scala
Scala
bsd-3-clause
1,187
package org.locationtech.geomesa.jobs.mapreduce import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat import org.apache.accumulo.core.client.security.tokens.{AuthenticationToken, PasswordToken} import org.apache.accumulo.core.security.Authorizations import org.apache.hadoop.mapreduce.Job import org.locationtech.geomesa.accumulo.AccumuloVersion._ object InputFormatBaseAdapter { def setConnectorInfo(job: Job, user: String, token: PasswordToken) = accumuloVersion match { case V15 => setConnectorInfo15(job, user, token) case V16 => setConnectorInfo16(job, user, token) case _ => setConnectorInfo16(job, user, token) } def setConnectorInfo15(job: Job, user: String, token: PasswordToken) = { val method = Class.forName("org.apache.accumulo.core.client.mapreduce.InputFormatBase") .getMethod("setConnectorInfo", classOf[Job], classOf[String], classOf[AuthenticationToken]) method.invoke(null, job, user, token) } def setConnectorInfo16(job: Job, user: String, token: PasswordToken) = { val method = classOf[AccumuloInputFormat] .getMethod("setConnectorInfo", classOf[Job], classOf[String], classOf[AuthenticationToken]) method.invoke(null, job, user, token) } def setZooKeeperInstance(job: Job, instance: String, zookeepers: String) = accumuloVersion match { case V15 => setZooKeeperInstance15(job, instance, zookeepers) case V16 => setZooKeeperInstance16(job, instance, zookeepers) case _ => setZooKeeperInstance16(job, instance, zookeepers) } def setZooKeeperInstance15(job: Job, instance: String, zookeepers: String) = { val method = Class.forName("org.apache.accumulo.core.client.mapreduce.InputFormatBase") .getMethod("setZooKeeperInstance", classOf[Job], classOf[String], classOf[String]) method.invoke(null, job, instance, zookeepers) } def setZooKeeperInstance16(job: Job, instance: String, zookeepers: String) = { val method = classOf[AccumuloInputFormat] .getMethod("setZooKeeperInstance", classOf[Job], classOf[String], classOf[String]) method.invoke(null, job, instance, zookeepers) } def setScanAuthorizations(job: Job, authorizations: Authorizations): Unit = accumuloVersion match { case V15 => setScanAuthorizations15(job, authorizations) case V16 => setScanAuthorizations16(job, authorizations) case _ => setScanAuthorizations16(job, authorizations) } def setScanAuthorizations15(job: Job, authorizations: Authorizations): Unit = { val method = Class.forName("org.apache.accumulo.core.client.mapreduce.InputFormatBase") .getMethod("setScanAuthorizations", classOf[Job], classOf[Authorizations], classOf[String]) method.invoke(null, job, authorizations) } def setScanAuthorizations16(job: Job, authorizations: Authorizations): Unit = { val method = classOf[AccumuloInputFormat].getMethod("setScanAuthorizations", classOf[Job], classOf[Authorizations], classOf[String]) method.invoke(null, job, authorizations) } }
drackaer/geomesa
geomesa-jobs/src/main/scala/org/locationtech/geomesa/jobs/mapreduce/InputFormatBaseAdapter.scala
Scala
apache-2.0
3,022
package com.automatak.dnp3.codegen import java.lang.reflect.{Constructor, Method, Field} object JNIMethod { val classOfBool = classOf[java.lang.Boolean] val classOfShort = classOf[java.lang.Short] val classOfInt = classOf[java.lang.Integer] val classOfLong = classOf[java.lang.Long] val classOfFloat = classOf[java.lang.Float] val classOfDouble = classOf[java.lang.Double] val classOfString = classOf[java.lang.String] def getType(clazz: Class[_]): String = clazz match { case `classOfBool` => "jboolean" case `classOfShort` => "jshort" case `classOfInt` => "jint" case `classOfLong` => "jlong" case `classOfFloat` => "jfloat" case `classOfDouble` => "jdouble" case `classOfString` => "jstring" case _ => { if(clazz.isPrimitive) { clazz.getTypeName match { case "void" => "void" case "boolean" => "jboolean" case "int" => "jint" case "byte" => "jbyte" case "long" => "jlong" case "float" => "jfloat" case "short" => "jshort" case "double" => "jdouble" case _ => throw new Exception("undefined primitive type: %s".format(clazz.getTypeName)) } } else { "jobject" } } } def getFieldType(clazz: Class[_]): String = clazz match { case `classOfInt` => "I" case `classOfLong` => "J" case `classOfBool` => "Z" case `classOfDouble` => "D" case _ => { if(clazz.isPrimitive) { clazz.getTypeName match { case "boolean" => "Z" case "int" => "I" case "void" => "void" case "byte" => "B" case "long" => "J" case "short" => "S" case "float" => "F" case "double" => "D" case _ => throw new Exception("undefined primitive type: %s".format(clazz.getTypeName)) } } else { clazz.fqcn } } } def getReturnType(clazz: Class[_]): String = clazz match { case `classOfInt` => "Int" case `classOfLong` => "Long" case `classOfBool` => "Boolean" case `classOfDouble` => "Double" case _ => { if(clazz.isPrimitive) { clazz.getTypeName match { case "boolean" => "Boolean" case "int" => "Int" case "long" => "Long" case "void" => "Void" case "byte" => "Byte" case "short" => "Short" case "float" => "Float" case "double" => "Double" case _ => throw new Exception("undefined primitive type: %s".format(clazz.getTypeName)) } } else { "Object" } } } def getSignature(method: Method, className: Option[String] = None) : String = { def returnType = { val value = getType(method.getReturnType) if(value == "jobject") "LocalRef<jobject>" else value } def arguments = { if(method.getParameterCount == 0) "" else { ", " + method.getParameters.map(p => "%s %s".format(getType(p.getType), p.getName)).mkString(", ") } } def prefix = className.map(n => "%s::".format(n)).getOrElse("") def additionalArgs = if(method.isStatic) "" else ", jobject instance" "%s %s%s(JNIEnv* env%s%s)".format(returnType, prefix, method.getName, additionalArgs, arguments) } def getImpl(method: Method)(implicit i: Indentation) : Iterator[String] = { def returnPrefix : String = { if(!method.getReturnType.isPrimitive()) { "return LocalRef<%s>(env, ".format(getType(method.getReturnType)) } else { if(method.isVoid) "" else "return " } } def returnSuffix : String = { if(!method.getReturnType.isPrimitive()) { ");" } else { ";" } } def args : String = if(method.getParameterCount == 0) "" else { ", " + method.getParameters.map(p => p.getName).mkString(", ") } def callMethod : String = { if(method.isStatic) { "%senv->CallStatic%sMethod(this->clazz, this->%sMethod%s)%s".format( returnPrefix, getReturnType(method.getReturnType), method.getName, args, returnSuffix ) } else { "%senv->Call%sMethod(instance, this->%sMethod%s)%s".format( returnPrefix, getReturnType(method.getReturnType), method.getName, args, returnSuffix ) } } JNIMethod.getSignature(method, Some(method.getDeclaringClass.getSimpleName)).iter ++ bracket { callMethod.iter } } def getConstructorSignature(constructor: Constructor[_], className: Option[String] = None) : String = { def arguments = constructor.getParameters.map(p => "%s %s".format(getType(p.getType), p.getName)).mkString(", ") if(arguments.isEmpty) { "LocalRef<jobject> %sinit%d(JNIEnv* env)".format(className.map(n => "%s::".format(n)).getOrElse(""), constructor.getParameterCount) } else { "LocalRef<jobject> %sinit%d(JNIEnv* env, %s)".format(className.map(n => "%s::".format(n)).getOrElse(""), constructor.getParameterCount, arguments) } } def getConstructorImpl(constructor: Constructor[_])(implicit i: Indentation) : Iterator[String] = { def args : String = if(constructor.getParameterCount == 0) "" else { ", " + constructor.getParameters.map(p => p.getName).mkString(", ") } JNIMethod.getConstructorSignature(constructor, Some(constructor.getDeclaringClass.getSimpleName)).iter ++ bracket { "return LocalRef<jobject>(env, env->NewObject(this->clazz, this->init%dConstructor%s));".format( constructor.getParameterCount, args ).iter } } def getFieldGetterImpl(f : Field)(implicit i: Indentation) : Iterator[String] = { def fieldType : String = getReturnType(f.getType) def cast : String = if(f.getType == classOf[String]) "(jstring) " else "" if(f.getType.isPrimitive) { "%s %s::get%s(JNIEnv* env, jobject instance)".format(getType(f.getType), f.getDeclaringClass.getSimpleName, f.getName).iter ++ bracket { "return %senv->Get%sField(instance, this->%sField);".format(cast, fieldType, f.getName).iter } } else { "LocalRef<%s> %s::get%s(JNIEnv* env, jobject instance)".format(getType(f.getType), f.getDeclaringClass.getSimpleName, f.getName).iter ++ bracket { "return LocalRef<%s>(env, %senv->Get%sField(instance, this->%sField));".format(getType(f.getType), cast, fieldType, f.getName).iter } } } }
thiagoralves/OpenPLC_v2
dnp3/java/codegen/src/main/scala/com/automatak/dnp3/codegen/JNIMethod.scala
Scala
gpl-3.0
6,559
/* Copyright 2012 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.scalding.typed import java.io.Serializable import scala.collection.JavaConverters._ import com.twitter.algebird.{ Fold, Semigroup, Ring, Aggregator } import com.twitter.algebird.mutable.PriorityQueueMonoid import com.twitter.scalding.typed.functions._ object KeyedListLike { /** KeyedListLike items are implicitly convertable to TypedPipe */ implicit def toTypedPipe[K, V, S[K, +V] <: KeyedListLike[K, V, S]](keyed: KeyedListLike[K, V, S]): TypedPipe[(K, V)] = keyed.toTypedPipe implicit def toTypedPipeKeyed[K, V, S[K, +V] <: KeyedListLike[K, V, S]](keyed: KeyedListLike[K, V, S]): TypedPipe.Keyed[K, V] = new TypedPipe.Keyed(keyed.toTypedPipe) } /** * This is for the case where you don't want to expose any structure * but the ability to operate on an iterator of the values */ trait KeyedList[K, +T] extends KeyedListLike[K, T, KeyedList] /** * Represents sharded lists of items of type T * There are exactly two fundamental operations: * toTypedPipe: marks the end of the grouped-on-key operations. * mapValueStream: further transforms all values, in order, one at a time, * with a function from Iterator to another Iterator */ trait KeyedListLike[K, +T, +This[K, +T] <: KeyedListLike[K, T, This]] extends Serializable { /** * End of the operations on values. From this point on the keyed structure * is lost and another shuffle is generally required to reconstruct it */ def toTypedPipe: TypedPipe[(K, T)] /** * This is like take except that the items are kept in memory * and we attempt to partially execute on the mappers if possible * For very large values of n, this could create memory pressure. * (as you may aggregate n items in a memory heap for each key) * If you get OOM issues, try to resolve using the method `take` instead. */ def bufferedTake(n: Int): This[K, T] /* Here is an example implementation, but since each subclass of KeyedListLike has its own constaints, this is always to be overriden. {@code if (n < 1) { // This means don't take anything, which is legal, but strange filterKeys(Constant(false)) } else if (n == 1) { head } else { // By default, there is no ordering. This method is overridden // in IdentityValueSortedReduce // Note, this is going to bias toward low hashcode items. // If you care which items you take, you should sort by a random number // or the value itself. val fakeOrdering: Ordering[T] = Ordering.by { v: T => v.hashCode } implicit val mon = new PriorityQueueMonoid(n)(fakeOrdering) mapValues(mon.build(_)) // Do the heap-sort on the mappers: .sum .mapValues { vs => vs.iterator.asScala } .flattenValues } } */ /** * filter keys on a predicate. More efficient than filter if you are * only looking at keys */ def filterKeys(fn: K => Boolean): This[K, T] /* an inefficient implementation is below, but * since this can always be pushed mapside, we should avoid * using this implementation, lest we accidentally forget to * implement the smart thing * {@code * mapGroup { (k: K, items: Iterator[T]) => if (fn(k)) items else Iterator.empty } * } */ /** * Operate on an Iterator[T] of all the values for each key at one time. * Prefer this to toList, when you can avoid accumulating the whole list in memory. * Prefer sum, which is partially executed map-side by default. * Use mapValueStream when you don't care about the key for the group. * * Iterator is always Non-empty. * Note, any key that has all values removed will not appear in subsequent * .mapGroup/mapValueStream */ def mapGroup[V](smfn: (K, Iterator[T]) => Iterator[V]): This[K, V] /////////// /// The below are all implemented in terms of the above: /////////// /** * Use Algebird Aggregator to do the reduction */ def aggregate[B, C](agg: Aggregator[T, B, C]): This[K, C] = mapValues[B](AggPrepare(agg)) .sum[B](agg.semigroup) .mapValues[C](AggPresent(agg)) /** * .filter(fn).toTypedPipe == .toTypedPipe.filter(fn) * It is generally better to avoid going back to a TypedPipe * as long as possible: this minimizes the times we go in * and out of cascading/hadoop types. */ def filter(fn: ((K, T)) => Boolean): This[K, T] = mapGroup(FilterGroup(fn)) /** * flatten the values * Useful after sortedTake, for instance */ def flattenValues[U](implicit ev: T <:< TraversableOnce[U]): This[K, U] = flatMapValues(Widen(SubTypes.fromEv(ev))) /** * This is just short hand for mapValueStream(identity), it makes sure the * planner sees that you want to force a shuffle. For expert tuning */ def forceToReducers: This[K, T] = mapValueStream(Identity()) /** * Use this to get the first value encountered. * prefer this to take(1). */ def head: This[K, T] = sum(HeadSemigroup[T]()) /** * This is a special case of mapValueStream, but can be optimized because it doesn't need * all the values for a given key at once. An unoptimized implementation is: * mapValueStream { _.map { fn } } * but for Grouped we can avoid resorting to mapValueStream */ def mapValues[V](fn: T => V): This[K, V] = mapGroup(MapGroupMapValues(fn)) /** * Similar to mapValues, but works like flatMap, returning a collection of outputs * for each value input. */ def flatMapValues[V](fn: T => TraversableOnce[V]): This[K, V] = mapGroup(MapGroupFlatMapValues(fn)) /** * Use this when you don't care about the key for the group, * otherwise use mapGroup */ def mapValueStream[V](smfn: Iterator[T] => Iterator[V]): This[K, V] = mapGroup(MapValueStream(smfn)) /** * Add all items according to the implicit Semigroup * If there is no sorting, we default to assuming the Semigroup is * commutative. If you don't want that, define an ordering on the Values, * sort or .forceToReducers. * * Semigroups MAY have a faster implementation of sum for iterators, * so prefer using sum/sumLeft to reduce */ def sum[U >: T](implicit sg: Semigroup[U]): This[K, U] = sumLeft[U] /** * reduce with fn which must be associative and commutative. * Like the above this can be optimized in some Grouped cases. * If you don't have a commutative operator, use reduceLeft */ def reduce[U >: T](fn: (U, U) => U): This[K, U] = sum(SemigroupFromFn(fn)) /** * Take the largest k things according to the implicit ordering. * Useful for top-k without having to call ord.reverse */ def sortedReverseTake[U >: T](k: Int)(implicit ord: Ordering[U]): This[K, Seq[U]] = sortedTake[U](k)(ord.reverse) /** * This implements bottom-k (smallest k items) on each mapper for each key, then * sends those to reducers to get the result. This is faster * than using .take if k * (number of Keys) is small enough * to fit in memory. */ def sortedTake[U >: T](k: Int)(implicit ord: Ordering[U]): This[K, Seq[U]] = { val mon = new PriorityQueueMonoid[U](k)(ord) mapValues(mon.build(_)) .sum(mon) // results in a PriorityQueue // scala can't infer the type, possibly due to the view bound on TypedPipe .mapValues(_.iterator.asScala.toList.sorted(ord)) } /** Like the above, but with a less than operation for the ordering */ def sortWithTake[U >: T](k: Int)(lessThan: (U, U) => Boolean): This[K, Seq[T]] = sortedTake(k)(Ordering.fromLessThan(lessThan)) /** For each key, Return the product of all the values */ def product[U >: T](implicit ring: Ring[U]): This[K, U] = sum(SemigroupFromProduct(ring)) /** For each key, count the number of values that satisfy a predicate */ def count(fn: T => Boolean): This[K, Long] = mapValues(Count(fn)).sum /** For each key, check to see if a predicate is true for all Values*/ def forall(fn: T => Boolean): This[K, Boolean] = mapValues(fn).product /** * For each key, selects all elements except first n ones. */ def drop(n: Int): This[K, T] = mapValueStream(Drop(n)) /** * For each key, Drops longest prefix of elements that satisfy the given predicate. */ def dropWhile(p: T => Boolean): This[K, T] = mapValueStream(DropWhile(p)) /** * For each key, Selects first n elements. Don't use this if n == 1, head is faster in that case. */ def take(n: Int): This[K, T] = if (n < 1) filterKeys(Constant(false)) // just don't keep anything else if (n == 1) head else mapValueStream(Take(n)) /** * For each key, Takes longest prefix of elements that satisfy the given predicate. */ def takeWhile(p: T => Boolean): This[K, T] = mapValueStream(TakeWhile(p)) /** * Folds are composable aggregations that make one pass over the data. * If you need to do several custom folds over the same data, use Fold.join * and this method */ def fold[V](f: Fold[T, V]): This[K, V] = mapValueStream(FoldIterator(f)) /** * If the fold depends on the key, use this method to construct * the fold for each key */ def foldWithKey[V](fn: K => Fold[T, V]): This[K, V] = mapGroup(FoldWithKeyIterator(fn)) /** For each key, fold the values. see scala.collection.Iterable.foldLeft */ def foldLeft[B](z: B)(fn: (B, T) => B): This[K, B] = mapValueStream(FoldLeftIterator(z, fn)) /** For each key, scanLeft the values. see scala.collection.Iterable.scanLeft */ def scanLeft[B](z: B)(fn: (B, T) => B): This[K, B] = mapValueStream(ScanLeftIterator(z, fn)) /** * Similar to reduce but always on the reduce-side (never optimized to mapside), * and named for the scala function. fn need not be associative and/or commutative. * Makes sense when you want to reduce, but in a particular sorted order. * the old value comes in on the left. */ def reduceLeft[U >: T](fn: (U, U) => U): This[K, U] = sumLeft[U](SemigroupFromFn(fn)) /** * Semigroups MAY have a faster implementation of sum for iterators, * so prefer using sum/sumLeft to reduce/reduceLeft */ def sumLeft[U >: T](implicit sg: Semigroup[U]): This[K, U] = mapValueStream[U](SumAll(sg)) /** For each key, give the number of values */ def size: This[K, Long] = mapValues(Constant(1L)).sum /** * For each key, give the number of unique values. WARNING: May OOM. * This assumes the values for each key can fit in memory. */ def distinctSize: This[K, Long] = toSet[T].mapValues(SizeOfSet()) /** * For each key, remove duplicate values. WARNING: May OOM. * This assumes the values for each key can fit in memory. */ def distinctValues: This[K, T] = toSet[T].flattenValues /** * AVOID THIS IF POSSIBLE * For each key, accumulate all the values into a List. WARNING: May OOM * Only use this method if you are sure all the values will fit in memory. * You really should try to ask why you need all the values, and if you * want to do some custom reduction, do it in mapGroup or mapValueStream * * This does no map-side aggregation even though it is a Monoid because * toList does not decrease the size of the data at all, so in practice * it only wastes effort to try to cache. */ def toList: This[K, List[T]] = mapValueStream(ToList[T]()) /** * AVOID THIS IF POSSIBLE * Same risks apply here as to toList: you may OOM. See toList. * Note that toSet needs to be parameterized even though toList does not. * This is because List is covariant in its type parameter in the scala API, * but Set is invariant. See: * http://stackoverflow.com/questions/676615/why-is-scalas-immutable-set-not-covariant-in-its-type */ def toSet[U >: T]: This[K, Set[U]] = mapValues(ToSet[U]()).sum /** For each key, give the maximum value*/ def max[B >: T](implicit cmp: Ordering[B]): This[K, T] = reduce(MaxOrd[T, B](cmp)) /** For each key, give the maximum value by some function*/ def maxBy[B](fn: T => B)(implicit cmp: Ordering[B]): This[K, T] = reduce(MaxOrdBy(fn, cmp)) /** For each key, give the minimum value*/ def min[B >: T](implicit cmp: Ordering[B]): This[K, T] = reduce(MinOrd[T, B](cmp)) /** For each key, give the minimum value by some function*/ def minBy[B](fn: T => B)(implicit cmp: Ordering[B]): This[K, T] = reduce(MinOrdBy(fn, cmp)) /** Use this to error if there is more than 1 value per key * Using this makes it easier to detect when data does * not have the shape you expect and to communicate to * scalding that certain optimizations are safe to do * * Note, this has no effect and is a waste to call * after sum because it is true by construction at that * point */ def requireSingleValuePerKey: This[K, T] = mapValueStream(SumAll(RequireSingleSemigroup())) /** Convert to a TypedPipe and only keep the keys */ def keys: TypedPipe[K] = toTypedPipe.keys /** Convert to a TypedPipe and only keep the values */ def values: TypedPipe[T] = toTypedPipe.values }
jzmq/scalding
scalding-core/src/main/scala/com/twitter/scalding/typed/KeyedList.scala
Scala
apache-2.0
13,656
package me.eax.examples.thrift.tests import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import me.eax.examples.thrift.game._ import me.eax.examples.thrift.tests.gen._ import org.apache.thrift.protocol._ import org.apache.thrift.transport._ import org.scalatest._ import org.scalatest.prop._ class BinaryProtocol extends FunSpec with Matchers with GeneratorDrivenPropertyChecks { describe("Thrift") { it("serializes and deserializes using TBinaryProtocol") { forAll { (data1: Hero) => val bytes = { val out = new ByteArrayOutputStream() data1.write(new TBinaryProtocol(new TIOStreamTransport(out))) out.toByteArray } val data2 = { val stream = new ByteArrayInputStream(bytes) Hero.decode(new TBinaryProtocol(new TIOStreamTransport(stream))) } data1 shouldBe data2 } } it("serializes and deserializes lists using TBinaryProtocol") { forAll { (data1: List[Hero]) => val bytes = { val out = new ByteArrayOutputStream() val proto = new TBinaryProtocol(new TIOStreamTransport(out)) proto.writeListBegin(new TList(TType.STRUCT, data1.size)) // or Map, or Set data1.foreach(_.write(proto)) proto.writeListEnd() out.toByteArray } val data2 = { val stream = new ByteArrayInputStream(bytes) val proto = new TBinaryProtocol(new TIOStreamTransport(stream)) val listInfo = proto.readListBegin() val res = (for(_ <- 1 to listInfo.size) yield Hero.decode(proto)).toList proto.readListEnd() res } data1 shouldBe data2 } } } }
afiskon/scala-thrift-example
src/test/scala/me/eax/examples/thrift/tests/BinaryProtocol.scala
Scala
mit
1,721
package pl.setblack.lsa.cryptotpyrc import scala.concurrent.Future trait CryptoKey { def export : Future[String] } trait PrivateKey extends CryptoKey trait PublicKey extends CryptoKey case class KeyPair[PUBLIC, PRIVATE](pub : PUBLIC, priv: PRIVATE) { }
lightserver/cryptotpyrc
app/shared/src/main/scala/pl/setblack/lsa/cryptotpyrc/CryptoKey.scala
Scala
bsd-3-clause
260
/* Copyright 2013 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.summingbird.online import com.twitter.summingbird.online.option._ import com.twitter.summingbird.option._ import com.twitter.util.Duration /* * These are our set of constants that are a base set of sane ones for all online platforms. * This shouldn't be directly referred to by user code, hence private[summingbird]. The platform itself * should expose its customized set of Constants. */ private[summingbird] trait OnlineDefaultConstants { val DEFAULT_SOURCE_PARALLELISM = SourceParallelism(1) val DEFAULT_FM_PARALLELISM = FlatMapParallelism(5) val DEFAULT_FM_CACHE = CacheSize(0) val DEFAULT_SUMMER_PARALLELISM = SummerParallelism(5) val DEFAULT_ONLINE_SUCCESS_HANDLER = OnlineSuccessHandler(_ => {}) val DEFAULT_ONLINE_EXCEPTION_HANDLER = OnlineExceptionHandler(Map.empty) val DEFAULT_SUMMER_CACHE = CacheSize(0) val DEFAULT_MONOID_IS_COMMUTATIVE = MonoidIsCommutative.default val DEFAULT_MAX_WAITING_FUTURES = MaxWaitingFutures(10) val DEFAULT_MAX_FUTURE_WAIT_TIME = MaxFutureWaitTime(Duration.fromSeconds(60)) val DEFAULT_FLUSH_FREQUENCY = FlushFrequency(Duration.fromSeconds(10)) val DEFAULT_USE_ASYNC_CACHE = UseAsyncCache(false) val DEFAULT_ASYNC_POOL_SIZE = AsyncPoolSize(Runtime.getRuntime().availableProcessors()) val DEFAULT_SOFT_MEMORY_FLUSH_PERCENT = SoftMemoryFlushPercent(80.0F) val DEFAULT_VALUE_COMBINER_CACHE_SIZE = ValueCombinerCacheSize(100) val DEFAULT_MAX_EMIT_PER_EXECUTE = MaxEmitPerExecute(Int.MaxValue) val DEFAULT_SUMMER_BATCH_MULTIPLIER = SummerBatchMultiplier(100) val DEFAULT_FM_MERGEABLE_WITH_SOURCE = FMMergeableWithSource.default } private[summingbird] object OnlineDefaultConstants extends OnlineDefaultConstants
twitter/summingbird
summingbird-online/src/main/scala/com/twitter/summingbird/online/OnlineDefaultConstants.scala
Scala
apache-2.0
2,270
/* ========================================================================================= * Copyright © 2013-2014 the kamon project <http://kamon.io/> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. * ========================================================================================= */ import sbt._ import Keys._ /** Sigar distribution repackaging. */ object SigarRepack { import UnzipTask._ import sbt.Package._ import Dependencies._ import com.typesafe.sbt.osgi.OsgiKeys /** Helper settings for extracted sigar sources. */ lazy val sigarSources = SettingKey[File]("sigar-sources", "Location of extracted sigar sources.") /** Helper settings for extracted sigar javadoc. */ lazy val sigarJavadoc = SettingKey[File]("sigar-javadoc", "Location of extracted sigar javadoc.") /** Native o/s libraries folder inside kamon-sigar.jar. Hardcoded in [kamon.sigar.SigarProvisioner.java]. */ lazy val nativeFolder = "native" /** Full class name of the sigar activator. Provides http://wiki.osgi.org/wiki/Bundle-Activator. */ lazy val activatorClass = "kamon.sigar.SigarActivator" /** Full class name of the sigar load time agent. Provides Agent-Class and Premain-Class contracts. */ lazy val agentClass = "kamon.sigar.SigarAgent" /** Full class name of the sigar main class. Provides sigar command line interface. */ lazy val mainClass = "org.hyperic.sigar.cmd.Runner" /** A name filter which matches java source files. */ lazy val sourceFilter: NameFilter = new PatternFilter( java.util.regex.Pattern.compile("""(.+\\.java)""") ) /** A name filter which matches document files. */ lazy val javadocFilter: NameFilter = new PatternFilter( java.util.regex.Pattern.compile("""(.+\\.html)""") ) /** A name filter which matches java class files. */ lazy val classFilter: NameFilter = new PatternFilter( java.util.regex.Pattern.compile("""(.+\\.class)""") ) /** A name filter which matches native o/s libraries. */ lazy val nativeFilter: NameFilter = new PatternFilter( java.util.regex.Pattern.compile("""(.+\\.dll)|(.+\\.dylib)|(.+\\.lib)|(.+\\.sl)|(.+\\.so)""") ) /** Required final jar manifest headers. Present in both default and OSGI packaging. */ lazy val manifestHeaders = Seq( ("Main-Class", mainClass), ("Agent-Class", agentClass), ("Premain-Class", agentClass), ("Embedded-Sigar-Origin", redhatRepo.root), ("Embedded-Sigar-Licence", sigarLicence), ("Embedded-Sigar-Version", sigarVersion), ("Embedded-Sigar-BuildVersion", sigarBuildVersion) ) /** Repackage origial Sigar classes, sources and native libraries. */ lazy val settings = Seq( /** Hide external artifacts from pom.xml. */ ivyConfigurations += external, /** Location of sigar source extraction. */ sigarSources := target.value / "sigar-sources", /** Location of sigar source extraction. */ sigarJavadoc := target.value / "sigar-javadoc", /** Origianl sigar resources extraction and relocation. */ unzipTask := { val log = streams.value.log val report = update.value log.info(s"Unpack SRC: ${sigarJar}") val srcTarget = sigarSources.value val srcArtifact = locateArtifact(report, sigarJar, "sources") val srcFileList = extractArtifact(srcArtifact, srcTarget, sourceFilter, false) log.info(s"Unpack DOC: ${sigarJar}") val docTarget = sigarJavadoc.value val docArtifact = locateArtifact(report, sigarJar, "javadoc") val docFileList = extractArtifact(srcArtifact, srcTarget, javadocFilter, false) log.info(s"Unpack JAR: ${sigarJar}") val jarTarget = (classDirectory in Compile).value val jarArtifact = locateArtifact(report, sigarJar) val jarFileList = extractArtifact(jarArtifact, jarTarget, classFilter, false) log.info(s"Unpack ZIP: ${sigarZip}") val zipTarget = jarTarget / nativeFolder val zipArtifact = locateArtifact(report, sigarZip) val zipFileList = extractArtifact(zipArtifact, zipTarget, nativeFilter, true) }, /** Unpack sigar resources before compile. */ (Keys.compile in Compile) <<= (Keys.compile in Compile) dependsOn unzipTask, /** Include original sigar sources as our own. */ (packageSrc in Compile) <<= (packageSrc in Compile) dependsOn unzipTask, (mappings in (Compile, packageSrc)) ++= { val base = sigarSources.value val finder = base ** sourceFilter val pairList = finder x relativeTo(base) pairList }, /** Ensure JVM agent packaging with default manifest. */ packageOptions in (Compile, packageBin) += ManifestAttributes(manifestHeaders: _*), /** Invoke verbose tesing in separate JVM. */ testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), fork in Test := true, exportJars := true ) }
kamon-io/sigar-loader
project/SigarRepack.scala
Scala
apache-2.0
5,341
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.graph.scala.test import org.apache.flink.api.scala._ import org.apache.flink.graph.{Edge, Vertex} object TestGraphUtils { def getLongLongVertexData(env: ExecutionEnvironment): DataSet[Vertex[Long, Long]] = { env.fromCollection(getLongLongVertices) } def getLongLongEdgeData(env: ExecutionEnvironment): DataSet[Edge[Long, Long]] = { env.fromCollection(getLongLongEdges) } def getLongLongVertices: List[Vertex[Long, Long]] = { List( new Vertex[Long, Long](1L, 1L), new Vertex[Long, Long](2L, 2L), new Vertex[Long, Long](3L, 3L), new Vertex[Long, Long](4L, 4L), new Vertex[Long, Long](5L, 5L) ) } def getLongLongEdges: List[Edge[Long, Long]] = { List( new Edge[Long, Long](1L, 2L, 12L), new Edge[Long, Long](1L, 3L, 13L), new Edge[Long, Long](2L, 3L, 23L), new Edge[Long, Long](3L, 4L, 34L), new Edge[Long, Long](3L, 5L, 35L), new Edge[Long, Long](4L, 5L, 45L), new Edge[Long, Long](5L, 1L, 51L) ) } }
hequn8128/flink
flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/TestGraphUtils.scala
Scala
apache-2.0
1,961
import language.higherKinds trait T { def t = 0 } trait Foo { def coflatMap[A <: T](f: A): A } object O extends Foo { def coflatMap[A <: T](f: A) = { val f2 = coflatMap(f) // inferred in 2.9.2 / 2.10.0 as [Nothing] f2.t // so this does't type check. f2 } } // Why? When a return type is inherited, the derived method // symbol first gets a preliminary type assigned, based on the // 1) method type of a unique matching super member // 2) viewed as a member type of the inheritor (to substitute, // e.g. class type parameters) // 3) substituted to replace the super-method's type parameters // with those of the inheritor // 4) dissected to take just the return type wrapped in thisMethodType(). // // In Scala 2.10.0 and earlier, this preliminary method type // // 1) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329 // 2) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329 // 3) (<param> f#12556: A#11336)A#11336 // 4) [A#11336 <: <empty>#3.this.T#7068](<param> f#12552: A#11337&0)A#11336 // // The type #4 from the old version is problematic: the parameter is typed with // a skolem for the type parameter `A`. It won't be considered to match the // method it overrides, instead they are seen as being overloaded, and type inference // goes awry (Nothing is inferred as the type argument for the recursive call // to coflatMap. // // The Namers patch adds one step here: it subsitutes the type parameter symbols // for the skolems: // // https://github.com/scala/scala/commit/b74c33eb#L2R1014 // // So we end up with a method symbol info: // // 5) [A#11336 <: <empty>#3.this.T#7068](<param> f#12505: A#11336)A#11336 // // This *does* match the method in the super class, and type inference // chooses the correct type argument.
yusuke2255/dotty
tests/untried/pos/t7200b.scala
Scala
bsd-3-clause
1,835
/* Copyright (C) 2008-2014 University of Massachusetts Amherst. This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://github.com/factorie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cc.factorie.app.topics.lda import cc.factorie._ import cc.factorie.directed._ import cc.factorie.app.nlp.lexicon.StopWords import scala.collection.mutable.HashMap import java.io.{PrintWriter, FileWriter, File, BufferedReader, InputStreamReader, FileInputStream} import collection.mutable.{ArrayBuffer, HashSet, HashMap, LinkedHashMap} import cc.factorie.directed._ import cc.factorie.optimize.TrainerHelpers import java.util.concurrent.Executors import cc.factorie.variable._ import cc.factorie.util.DefaultCmdOptions import cc.factorie.app.topics.lda /** Typical recommended value for alpha1 is 50/numTopics. */ class LDA(val wordSeqDomain: CategoricalSeqDomain[String], numTopics: Int = 10, alpha1:Double = 0.1, val beta1:Double = 0.01, val burnIn: Int = 100)(implicit val model:MutableDirectedModel, implicit val random: scala.util.Random) { def this(numTopics:Int, alpha1:Double, beta1:Double, burnIn:Int)(implicit random: scala.util.Random) = this(new CategoricalSeqDomain[String], numTopics, alpha1, beta1, burnIn)(DirectedModel(), random) var diagnosticName = "" /** The per-word variable that indicates which topic it comes from. */ object ZDomain extends DiscreteDomain(numTopics) object ZSeqDomain extends DiscreteSeqDomain { def elementDomain = ZDomain } class Zs extends DiscreteSeqVariable { def this(initial:Seq[Int]) = { this(); this.appendInts(initial) } def this(initial:Array[Int]) = { this(); this.appendInts(initial) } def this(len:Int) = this(new Array[Int](len)) // relies on new Array being filled with 0s def domain = ZSeqDomain //def words: Document = childFactors.first.asInstanceOf[PlatedDiscreteMixture.Factor]._1.asInstanceOf[Document] } def newZs: Zs = new Zs // Because new lda.Zs won't work, because lda isn't a stable identifier. def wordDomain = wordSeqDomain.elementDomain /** The prior over per-topic word distribution */ val betas = MassesVariable.growableUniform(wordDomain, beta1) /** The prior over per-document topic distribution */ val alphas = MassesVariable.dense(numTopics, alpha1) var maxDocSize:Int = 0 var docLengthCounts: Array[Int] = null /** The collection of all documents used to fit the parameters of this LDA model. */ private val documentMap = new LinkedHashMap[String,Doc] { def +=(d:Document): Unit = this(d.name) = d } def documents: Iterable[Doc] = documentMap.values def getDocument(name:String) : Doc = documentMap.getOrElse(name, null) def nameDocumentMap: scala.collection.Map[String,Doc] = documentMap /** The per-topic distribution over words. FiniteMixture is a Seq of Dirichlet-distributed Proportions. */ val phis = Mixture(numTopics)(ProportionsVariable.growableDense(wordDomain) ~ Dirichlet(betas)) protected def setupDocument(doc:Doc, m:MutableDirectedModel, random: scala.util.Random): Unit = { implicit val rng = random require(wordSeqDomain eq doc.ws.domain) require(doc.ws.length > 0) // was > 1 if (doc.theta eq null) doc.theta = ProportionsVariable.sortedSparseCounts(numTopics) else require (doc.theta.value.length == numTopics) doc.theta.~(Dirichlet(alphas))(m) if (doc.zs eq null) doc.zs = new Zs(Array.tabulate(doc.ws.length)(i => random.nextInt(numTopics))) // Could also initialize to all 0 for more efficient sparse inference else { require(doc.zs.length == doc.ws.length, "doc.ws.length=%d != doc.zs.length=%d".format(doc.ws.length, doc.zs.length)) require(doc.zs.domain.elementDomain.size == numTopics, "zs.domain.elementDomain.size=%d != numTopics=%d".format(doc.zs.domain.elementDomain.size, numTopics)) } doc.zs.~(PlatedDiscrete(doc.theta))(m) doc.ws.~(PlatedCategoricalMixture(phis, doc.zs))(m) } /** Add a document to the LDA model. */ def addDocument(doc:Doc, random: scala.util.Random): Unit = { if (documentMap.contains(doc.name)) throw new Error(this.toString+" already contains document "+doc.name) setupDocument(doc, model, random) documentMap(doc.name) = doc maxDocSize = math.max(maxDocSize, doc.ws.length) } def removeDocument(doc:Doc): Unit = { documentMap.remove(doc.name) model -= model.parentFactor(doc.theta) model -= model.parentFactor(doc.zs) model -= model.parentFactor(doc.ws) } /** Infer doc.theta. If the document is not already part of this LDA, do not add it and do not collapse anything that would effect this LDA. */ def inferDocumentTheta(doc:Doc, iterations:Int = 10): Unit = { if (model.parentFactor(doc.ws) ne null) { val sampler = new CollapsedGibbsSampler(Seq(doc.theta), model) for (i <- 1 to iterations) sampler.process(doc.zs) } else { val m = DirectedModel() setupDocument(doc, m, random) //println("LDA.inferDocumentTheta: model factors = "+m.allFactors) //println("LDA.inferDocumentTheta: zs factors = "+m.factors(Seq(doc.zs))) val sampler = new CollapsedGibbsSampler(Seq(doc.theta), m) for (i <- 1 to iterations) sampler.process(doc.zs) } } /** Run a collapsed Gibbs sampler to estimate the parameters of the LDA model. */ def inferTopics(iterations:Int = 60, fitAlphaInterval:Int = Int.MaxValue, diagnosticInterval:Int = 10, diagnosticShowPhrases:Boolean = false): Unit = { val sampler = SparseLDAInferencer(ZDomain, wordDomain, documents, alphas.value, beta1, model) if(fitAlphaInterval != Int.MaxValue) { sampler.initializeHistograms(maxDocSize) docLengthCounts = Array.fill[Int](maxDocSize+1)(0) for (doc <- documents) docLengthCounts(doc.ws.length) += 1 } println("Collapsing finished. Starting sampling iterations:") //sampler.debug = debug val startTime = System.currentTimeMillis for (i <- 1 to iterations) { val timeToEstAlpha = (i % fitAlphaInterval == 0) && i > burnIn val startIterationTime = System.currentTimeMillis for (doc <- documents) sampler.process(doc.zs.asInstanceOf[Zs], timeToEstAlpha) val timeSecs = (System.currentTimeMillis - startIterationTime)/1000.0 if (timeSecs < 2.0) print(".") else print("%.0fsec ".format(timeSecs)); Console.flush() if (i % diagnosticInterval == 0) { println ("\\n"+diagnosticName+"\\nIteration "+i) sampler.export(phis) if (diagnosticShowPhrases) println(topicsWordsAndPhrasesSummary(10,10)) else println(topicsSummary(10)) } /*if (i % fitAlphaInterval == 0) { sampler.exportThetas(documents) MaximizeDirichletByMomentMatching(alphas, model) sampler.resetSmoothing(alphas.tensor, beta1) println("alpha = " + alphas.tensor.toSeq.mkString(" ")) }*/ if (timeToEstAlpha){ LearnDirichletUsingFrequencyHistograms(alphas, sampler.topicDocCounts, docLengthCounts) sampler.resetSmoothing(alphas.value, beta1) sampler.initializeHistograms(maxDocSize) //println("alpha = " + alphas.tensor.toSeq.mkString(" ")) } } //println("Finished in "+((System.currentTimeMillis-startTime)/1000.0)+" seconds") // Set original uncollapsed parameters to mean of collapsed parameters sampler.export(phis, beta1, numTopics) sampler.exportThetas(documents) } // Not finished def inferTopicsMultithreaded(numThreads:Int, iterations:Int = 60, fitAlphaInterval:Int = Int.MaxValue, diagnosticInterval:Int = 10, diagnosticShowPhrases:Boolean = false): Unit = { if (fitAlphaInterval != Int.MaxValue) throw new Error("LDA.inferTopicsMultithreaded.fitAlphaInterval not yet implemented.") val docSubsets = documents.grouped(documents.size/numThreads + 1).toSeq //Get global Nt and Nw,t for all the documents val phiCounts = new DiscreteMixtureCounts(wordDomain, ZDomain) for (doc <- documents) phiCounts.incrementFactor(model.parentFactor(doc.ws).asInstanceOf[PlatedCategoricalMixture.Factor], 1) val numTypes = wordDomain.length val phiCountsArray = new Array[DiscreteMixtureCounts[String]](numThreads) val samplersArray = new Array[SparseLDAInferencer](numThreads) //Copy the counts to each thread val pool = Executors.newFixedThreadPool(numThreads) try { util.Threading.parForeach(0 until numThreads, pool)(threadID => { phiCountsArray(threadID) = new DiscreteMixtureCounts(wordDomain, ZDomain) val localPhiCounts = new DiscreteMixtureCounts(wordDomain, ZDomain) for (w <- 0 until numTypes) phiCounts(w).forCounts((t,c) => phiCountsArray(threadID).increment(w, t, c)) for (doc <- docSubsets(threadID)) localPhiCounts.incrementFactor(model.parentFactor(doc.ws).asInstanceOf[PlatedCategoricalMixture.Factor], 1) samplersArray(threadID) = new SparseLDAInferencer(ZDomain, wordDomain, phiCountsArray(threadID), alphas.value, beta1, model, random, localPhiCounts) }) for (iteration <- 1 to iterations) { val startIterationTime = System.currentTimeMillis util.Threading.parForeach(0 until numThreads, pool)(threadID => { samplersArray(threadID).resetCached() for (doc <- docSubsets(threadID)) samplersArray(threadID).process(doc.zs.asInstanceOf[Zs]) }) //Sum per thread counts java.util.Arrays.fill(phiCounts.mixtureCounts, 0) (0 until numTopics).par.foreach(t => { for (threadID <- 0 until numThreads) phiCounts.mixtureCounts(t) += samplersArray(threadID).localPhiCounts.mixtureCounts(t) }) (0 until numTypes).par.foreach(w => { phiCounts(w).clear() for (threadID <- 0 until numThreads) samplersArray(threadID).localPhiCounts(w).forCounts((t, c) => phiCounts(w).incrementCountAtIndex(t, c)) }) //Copy global counts to per thread counts util.Threading.parForeach(0 until numThreads, pool) (threadID => { System.arraycopy(phiCounts.mixtureCounts, 0, phiCountsArray(threadID).mixtureCounts, 0, numTopics) for (w <- 0 until numTypes) phiCountsArray(threadID)(w).copyBuffer(phiCounts(w)) }) if (iteration % diagnosticInterval == 0) { println ("Iteration "+iteration) maximizePhisAndThetas if (diagnosticShowPhrases) println(topicsWordsAndPhrasesSummary(10,10)) else println(topicsSummary(10)) } val timeSecs = (System.currentTimeMillis - startIterationTime)/1000.0 if (timeSecs < 2.0) print(".") else print("%.0fsec ".format(timeSecs)); Console.flush() } } finally pool.shutdown() maximizePhisAndThetas } def topicWords(topicIndex:Int, numWords:Int = 10): Seq[String] = phis(topicIndex).value.top(numWords).map(dp => wordDomain.category(dp.index)) def topicWordsArray(topicIndex:Int, numWords:Int): Array[String] = topicWords(topicIndex, numWords).toArray def topicSummary(topicIndex:Int, numWords:Int = 10): String = "Topic %3d %s %d %f".format(topicIndex, topicWords(topicIndex, numWords).mkString(" "), phis(topicIndex).value.massTotal.toInt, alphas.value(topicIndex)) def topicsSummary(numWords:Int = 10): String = Range(0, numTopics).map(topicSummary(_, numWords)).mkString("\\n") def topicsPhraseCounts = new TopicPhraseCounts(numTopics) ++= documents def topicsWordsAndPhrasesSummary(numWords: Int = 10, numPhrases: Int = 10): String = { val sb = new StringBuffer val tpc = topicsPhraseCounts for (i <- 0 until numTopics) { sb.append(topicSummary(i, numWords)) sb.append("\\n ") // Matching "Topic 333 ", plus one extra space for indentation val tp = tpc.topicPhrases(i, numPhrases) sb.append(tp.mkString(" ")) sb.append("\\n") } sb.toString } def maximizePhisAndThetas(): Unit = { phis.foreach(_.value.masses.zero()) // TODO What about the priors on phis and theta?? -akm for (doc <- documents) { val len = doc.ws.length var i = 0 while (i < len) { val zi = doc.zs.intValue(i) phis(zi).value.masses.+=(doc.ws.intValue(i), 1.0) doc.theta.value.masses.+=(zi, 1.0) i += 1 } } } def saveWordsZs(file:File): Unit = { val pw = new PrintWriter(file) for (doc <- documents) doc.writeNameWordsZs(pw) } def addDocumentsFromWordZs(file:File, minDocLength:Int, random: scala.util.Random): Unit = { import scala.util.control.Breaks._ val reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))) reader.mark(512) val alphasName = reader.readLine() if (alphasName == "/alphas") { // If they are present, read the alpha parameters. val alphasString = reader.readLine(); alphas.value := alphasString.split(" ").map(_.toDouble) // set lda.alphas reader.readLine() // consume delimiting newline println("Read alphas "+alphas.value.mkString(" ")) } else reader.reset() // Put the reader back to the read position when reader.mark was called breakable { while (true) { val doc = new Document(wordSeqDomain, "", Nil) // doc.name will be set in doc.readNameWordsZs doc.zs = new Zs(Nil) val numWords = doc.readNameWordsZs(reader) if (numWords < 0) break() else if (numWords >= minDocLength) addDocument(doc, random) // Skip documents that have only one word because inference can't handle them //else System.err.println("addDocumentsFromWordZs skipping document %s: only %d words found.".format(doc.name, numWords)) }} reader.close() maximizePhisAndThetas } } object LDA extends LDACmd class LDAOpts extends DefaultCmdOptions { val numTopics = new CmdOption("num-topics", 10, "N", "Number of topics.", false, 't') val alpha = new CmdOption("alpha", 0.1, "N", "Dirichlet parameter for per-document topic proportions.") val beta = new CmdOption("beta", 0.01, "N", "Dirichlet parameter for per-topic word proportions.") val numThreads = new CmdOption("num-threads", 1, "N", "Number of threads for multithreaded topic inference.") val numIterations = new CmdOption("num-iterations", 50, "N", "Number of iterations of inference.", false, 'i') val diagnostic = new CmdOption("diagnostic-interval", 10, "N", "Number of iterations between each diagnostic printing of intermediate results.", false , 'd') val diagnosticPhrases= new CmdOption("diagnostic-phrases", false, "true|false", "If true diagnostic printing will include multi-word phrases.") val fitAlpha = new CmdOption("fit-alpha-interval", Int.MaxValue, "N", "Number of iterations between each re-estimation of prior on per-document topic distribution.") val optimizeBurnIn =new CmdOption("optimize-burn-in", 100, "N", "Number of iterations to run before the first estimation of the alpha parameters") val tokenRegex = new CmdOption("token-regex", "\\\\p{Alpha}+", "REGEX", "Regular expression for segmenting tokens.") val readDirs = new CmdOption("read-dirs", List(""), "DIR...", "Space-(or comma)-separated list of directories containing plain text input files.") val readLines = new CmdOption("read-lines", "", "FILENAME", "File containing lines of text, one for each document.") val readLinesRegex= new CmdOption("read-lines-regex", "", "REGEX", "Regular expression with parens around the portion of the line that should be read as the text of the document.") val readLinesRegexGroups= new CmdOption("read-lines-regex-groups", List(1), "GROUPNUMS", "The --read-lines-regex group numbers from which to grab the text of the document.") val readLinesRegexPrint = new CmdOption("read-lines-regex-print", false, "BOOL", "Print the --read-lines-regex match that will become the text of the document.") val writeDocs = new CmdOption("write-docs", "lda-docs.txt", "FILENAME", "Save LDA state, writing document names, words and z assignments") val readDocs = new CmdOption("read-docs", "lda-docs.txt", "FILENAME", "Add documents from filename, reading document names, words and z assignments; store documents; can then add more documents or do more inference.") val readPhis = new CmdOption("read-phis", "lda-docs.txt", "FILENAME", "Read documents from filename, but only use them to increment topic word counts; does not store documents (conserving memory); cannot do more inference, nor print phrases") { override def invoke = { numIterations.setValue(0)} } val maxNumDocs = new CmdOption("max-num-docs", Int.MaxValue, "N", "The maximum number of documents to read.") val printTopics = new CmdOption("print-topics", 20, "N", "Just before exiting print top N words for each topic.") val printPhrases = new CmdOption("print-topics-phrases", 20, "N", "Just before exiting print top N phrases for each topic.") val thetaServer = new CmdOption("theta-server", 50, "N", "Read from sdin newline-separated documents, and output a theta topic distribution for each, estimated by N iterations of sampling on the document.") val verbose = new CmdOption("verbose", false, "BOOLEAN", "Turn on verbose output") } class LDACmd { /* Potentail stop topics: -Topic 47 formal verification model specification methods systems software checking ada analysis -Topic 35 programming sigplan java generation implementation language comp programs sys design -Topic 83 ieee trans syst expert circuits systems eng esa appl computers 1 0.100000 */ import scala.collection.mutable.ArrayBuffer import scala.util.control.Breaks._ import java.io.Reader import cc.factorie.app.strings.StringSegmenter var verbose = false val minDocLength = 3 def newDocument(domain:CategoricalSeqDomain[String], name:String, contents:Reader, segmenter:StringSegmenter): Doc = Document.fromReader(domain, name, contents, segmenter) def main(args:Array[String]): Unit = { object opts extends LDAOpts implicit val random = new scala.util.Random(0) opts.parse(args) verbose = opts.verbose.value /** The domain of the words in documents */ object WordSeqDomain extends CategoricalSeqDomain[String] val model = DirectedModel() val lda = new LDA(WordSeqDomain, opts.numTopics.value, opts.alpha.value, opts.beta.value, opts.optimizeBurnIn.value)(model,random) val mySegmenter = new cc.factorie.app.strings.RegexSegmenter(opts.tokenRegex.value.r) if (opts.readDirs.wasInvoked) { for (directory <- opts.readDirs.value) { val dir = new File(directory); if (!dir.isDirectory) { System.err.println(directory+" is not a directory."); System.exit(-1) } println("Reading files from directory " + directory) breakable { for (file <- new File(directory).listFiles; if file.isFile) { if (lda.documents.size == opts.maxNumDocs.value) break() val doc = Document.fromFile(WordSeqDomain, file, "UTF-8", segmenter = mySegmenter) if (doc.length >= minDocLength) lda.addDocument(doc, random) if (lda.documents.size % 1000 == 0) { print(" "+lda.documents.size); Console.flush() }; if (lda.documents.size % 10000 == 0) println() }} //println() } } if (opts.readLines.wasInvoked) { val name = if (opts.readLines.value == "-") "stdin" else opts.readLines.value val source = if (opts.readLines.value == "-") scala.io.Source.stdin else scala.io.Source.fromFile(new File(opts.readLines.value)) var count = 0 breakable { for (line <- source.getLines()) { if (lda.documents.size == opts.maxNumDocs.value) break() val text: String = if (!opts.readLinesRegex.wasInvoked) line else { val textbuffer = new StringBuffer for (groupIndex <- opts.readLinesRegexGroups.value) { val mi = opts.readLinesRegex.value.r.findFirstMatchIn(line).getOrElse(throw new Error("No regex match for --read-lines-regex in "+line)) if (mi.groupCount >= groupIndex) textbuffer append mi.group(groupIndex) else throw new Error("No group found with index "+groupIndex) } textbuffer.toString } if (text eq null) throw new Error("No () group for --read-lines-regex in "+line) if (opts.readLinesRegexPrint.value) println(text) val doc = Document.fromString(WordSeqDomain, name+":"+count, text, segmenter = mySegmenter) if (doc.length >= minDocLength) lda.addDocument(doc, random) count += 1 if (count % 1000 == 0) { print(" "+count); Console.flush() }; if (count % 10000 == 0) println() }} source.close() } if (opts.readDocs.wasInvoked) { val file = new File(opts.readDocs.value) val reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))) reader.mark(512) val alphasName = reader.readLine() if (alphasName == "/alphas") { // If they are present, read the alpha parameters. val alphasString = reader.readLine(); lda.alphas.value := alphasString.split(" ").map(_.toDouble) // set lda.alphas reader.readLine() // consume delimiting newline println("Read alphas "+lda.alphas.value.mkString(" ")) } else reader.reset() // Put the reader back to the read position when reader.mark was called breakable { while (true) { if (lda.documents.size == opts.maxNumDocs.value) break() val doc = new Document(WordSeqDomain, "", Nil) // doc.name will be set in doc.readNameWordsZs doc.zs = new lda.Zs(Nil) val numWords = doc.readNameWordsZs(reader) if (numWords < 0) break() else if (numWords >= minDocLength) lda.addDocument(doc, random) // Skip documents that have only one word because inference can't handle them else System.err.println("--read-docs skipping document %s: only %d words found.".format(doc.name, numWords)) }} reader.close() lda.maximizePhisAndThetas //println(lda.documents.head.ws.categoryValues.mkString(" ")) //println(lda.documents.head.zs.intValues.mkString(" ")) } if (opts.readPhis.wasInvoked) { val file = new File(opts.readPhis.value) val reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))) val alphasName = reader.readLine(); if (alphasName != "/alphas") throw new Error("/alphas not found") val alphasString = reader.readLine(); lda.alphas.value := alphasString.split(" ").map(_.toDouble) // set lda.alphas reader.readLine() // consume delimiting newline println("Read alphas "+lda.alphas.value.mkString(" ")) breakable { while (true) { if (lda.documents.size == opts.maxNumDocs.value) break() val doc = new Document(WordSeqDomain, "", Nil) // doc.name will be set in doc.readNameWordsZs doc.zs = new lda.Zs(Nil) val numWords = doc.readNameWordsZs(reader) if (numWords < 0) break() else if (numWords >= minDocLength) { val len = doc.ws.length var i = 0 while (i < len) { val zi = doc.zs.intValue(i) lda.phis(zi).value.+=(doc.ws.intValue(i), 1.0) i += 1 } } else System.err.println("--read-docs skipping document %s: only %d words found.".format(doc.name, numWords)) // Skip documents that have only one word because inference can't handle them }} reader.close() } else if (lda.documents.size == 0) { System.err.println("You must specific either the --input-dirs or --input-lines options to provide documents."); System.exit(-1) } println("\\nRead "+lda.documents.size+" documents, "+WordSeqDomain.elementDomain.size+" word types, "+lda.documents.map(_.ws.length).sum+" word tokens.") // Run inference to discover topics if (opts.numIterations.value > 0) { val startTime = System.currentTimeMillis if (opts.numThreads.value > 1) lda.inferTopicsMultithreaded(opts.numThreads.value, opts.numIterations.value, diagnosticInterval = opts.diagnostic.value, diagnosticShowPhrases = opts.diagnosticPhrases.value) else lda.inferTopics(opts.numIterations.value, fitAlphaInterval = opts.fitAlpha.value, diagnosticInterval = opts.diagnostic.value, diagnosticShowPhrases = opts.diagnosticPhrases.value) println("Finished in " + ((System.currentTimeMillis - startTime) / 1000.0) + " seconds") } //testSaveLoad(lda) //println("topics.LDA temporary test") //val doc1 = lda.documents.head //lda.removeDocument(doc1) //lda.inferDocumentTheta(doc1) //println(doc1.ws.categoryValues.take(10).mkString(" ")) //println(doc1.theta) if (opts.writeDocs.wasInvoked) { val file = new File(opts.writeDocs.value) val pw = new PrintWriter(file) pw.println("/alphas") pw.println(lda.alphas.value.mkString(" ")) pw.println() lda.documents.foreach(_.writeNameWordsZs(pw)) pw.close() } if (opts.printTopics.wasInvoked) println(lda.topicsSummary(opts.printTopics.value)) if (opts.printPhrases.wasInvoked) println(lda.topicsWordsAndPhrasesSummary(opts.printPhrases.value, opts.printPhrases.value)) //println(lda.topicsPhraseCounts.topicsPhrasesSummary(opts.printPhrases.value)) if (opts.thetaServer.wasInvoked) { lda.wordSeqDomain.elementDomain.freeze() val reader = new java.io.BufferedReader(new java.io.InputStreamReader(System.in)) println("Reading documents, one per line. To end, close by Control-D") var line = reader.readLine var count = 0 while (line ne null) { val doc = Document.fromString(lda.wordSeqDomain, "<stdin>"+count, line) count += 1 lda.inferDocumentTheta(doc, opts.thetaServer.value) println(doc.theta.value.mkString(" ")) line = reader.readLine } } } }
hlin117/factorie
src/main/scala/cc/factorie/app/topics/lda/LDA.scala
Scala
apache-2.0
26,418
package ueb02 /** * Einfach vorwärts verkettete Liste von Ganzzahlen à la LISP. * Eine Liste mit den Elementen 1, 2 und 3 kann gebildet werden durch new List(1, new List(2, new List(3,null))) * * Der Konstruktor entspricht dem CONS-Operation von LISP. * * @param head Inhalt eines Listenknotens * @param tail Vorwärtsreferenz zum nächsten Listenknoten. Das Listenende wird durch null markiert. */ class List(val head: Int, val tail: List) { /**Liefert die String-Darstellung einer aus Paaren gebildeten, über die Referenz tail vorwärts verketteten Liste.*/ override def toString(): String = tail match { case null => head.toString case _ => head.toString + " " + tail.toString } } object List { def apply(head: Int, tail: List) = new List(head, tail) }
sebastian-dasse/uni-scala
ScalaKurs(Knabe)_S/src/ueb02/List.scala
Scala
mit
789
package sttp.client3.impl.zio import sttp.capabilities.Effect import sttp.client3.testing.SttpBackendStub import sttp.client3.{Request, Response, SttpBackend} import sttp.model.StatusCode import sttp.monad.MonadError import zio.{Has, RIO, Ref, Tag, UIO, URIO, ZLayer} trait SttpClientStubbingBase[R, P] { type SttpClientStubbing = Has[Service] // the tag as viewed by the implementing object. Needs to be passed explicitly, otherwise Has[] breaks. private[sttp] def serviceTag: Tag[Service] private[sttp] def sttpBackendTag: Tag[SttpBackend[RIO[R, *], P]] trait Service { def whenRequestMatchesPartial(partial: PartialFunction[Request[_, _], Response[_]]): URIO[SttpClientStubbing, Unit] private[zio] def update(f: SttpBackendStub[RIO[R, *], P] => SttpBackendStub[RIO[R, *], P]): UIO[Unit] } private[sttp] class StubWrapper(stub: Ref[SttpBackendStub[RIO[R, *], P]]) extends Service { override def whenRequestMatchesPartial( partial: PartialFunction[Request[_, _], Response[_]] ): URIO[SttpClientStubbing, Unit] = update(_.whenRequestMatchesPartial(partial)) override private[zio] def update(f: SttpBackendStub[RIO[R, *], P] => SttpBackendStub[RIO[R, *], P]) = stub.update(f) } case class StubbingWhenRequest private[sttp] (p: Request[_, _] => Boolean) { implicit val _serviceTag: Tag[Service] = serviceTag val thenRespondOk: URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondOk()) def thenRespondNotFound(): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondNotFound()) def thenRespondServerError(): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondServerError()) def thenRespondWithCode(status: StatusCode, msg: String = ""): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondWithCode(status, msg)) def thenRespond[T](body: T): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespond(body)) def thenRespond[T](resp: => Response[T]): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespond(resp)) def thenRespondCyclic[T](bodies: T*): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondCyclic(bodies: _*)) def thenRespondCyclicResponses[T](responses: Response[T]*): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondCyclicResponses(responses: _*)) def thenRespondF(resp: => RIO[R, Response[_]]): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondF(resp)) def thenRespondF(resp: Request[_, _] => RIO[R, Response[_]]): URIO[SttpClientStubbing, Unit] = whenRequest(_.thenRespondF(resp)) private def whenRequest( f: SttpBackendStub[RIO[R, *], P]#WhenRequest => SttpBackendStub[RIO[R, *], P] ): URIO[SttpClientStubbing, Unit] = URIO.serviceWith(_.update(stub => f(stub.whenRequestMatches(p)))) } val layer: ZLayer[Any, Nothing, Has[Service] with Has[SttpBackend[RIO[R, *], P]]] = { val monad = new RIOMonadAsyncError[R] implicit val _serviceTag: Tag[Service] = serviceTag implicit val _backendTag: Tag[SttpBackend[RIO[R, *], P]] = sttpBackendTag val composed = for { stub <- Ref.make(SttpBackendStub[RIO[R, *], P](monad)) stubber = new StubWrapper(stub) proxy = new SttpBackend[RIO[R, *], P] { override def send[T, RR >: P with Effect[RIO[R, *]]](request: Request[T, RR]): RIO[R, Response[T]] = stub.get >>= (_.send(request)) override def close(): RIO[R, Unit] = stub.get >>= (_.close()) override def responseMonad: MonadError[RIO[R, *]] = monad } } yield Has.allOf[Service, SttpBackend[RIO[R, *], P]](stubber, proxy) composed.toLayerMany } }
softwaremill/sttp
effects/zio1/src/main/scala/sttp/client3/impl/zio/SttpClientStubbingBase.scala
Scala
apache-2.0
3,699
package com.shocktrade.controlpanel.runtime import scala.concurrent.{ExecutionContext, Future} /** * Represents an evaluatable value or expression * @author Lawrence Daniels <[email protected]> */ trait Evaluatable { def eval(rc: RuntimeContext, scope: Scope)(implicit ec: ExecutionContext): Future[TypedValue] }
ldaniels528/shocktrade.js
app/client/control_panel/src/main/scala/com/shocktrade/controlpanel/runtime/Evaluatable.scala
Scala
apache-2.0
333
package com.twitter.scrooge.ast sealed abstract class Definition extends DefinitionNode { val sid: SimpleID } case class ConstDefinition( sid: SimpleID, fieldType: FieldType, value: RHS, docstring: Option[String] ) extends Definition case class Typedef(sid: SimpleID, fieldType: FieldType, annotations: Map[String, String] = Map.empty) extends Definition case class Enum( sid: SimpleID, values: Seq[EnumField], docstring: Option[String] ) extends Definition case class EnumField(sid: SimpleID, value: Int, docstring: Option[String]) extends Definition case class Senum(sid: SimpleID, values: Seq[String]) extends Definition sealed abstract class StructLike extends Definition { val originalName: String val fields: Seq[Field] val docstring: Option[String] val annotations: Map[String, String] } case class Struct( sid: SimpleID, originalName: String, fields: Seq[Field], docstring: Option[String], annotations: Map[String, String] = Map.empty ) extends StructLike case class Union( sid: SimpleID, originalName: String, fields: Seq[Field], docstring: Option[String], annotations: Map[String, String] = Map.empty ) extends StructLike case class FunctionArgs( sid: SimpleID, originalName: String, fields: Seq[Field] ) extends StructLike { override val docstring: Option[String] = None override val annotations: Map[String, String] = Map.empty } case class FunctionResult( sid: SimpleID, originalName: String, fields: Seq[Field] ) extends StructLike { override val docstring: Option[String] = None override val annotations: Map[String, String] = Map.empty } case class Exception_( sid: SimpleID, originalName: String, fields: Seq[Field], docstring: Option[String] ) extends StructLike { override val annotations: Map[String, String] = Map.empty } case class Service( sid: SimpleID, parent: Option[ServiceParent], functions: Seq[Function], docstring: Option[String] ) extends Definition case class ServiceParent( sid: SimpleID, prefix: Option[SimpleID], service: Option[Service] = None)
elipoz/scrooge
scrooge-generator/src/main/scala/com/twitter/scrooge/AST/Definition.scala
Scala
apache-2.0
2,084
package sryza import java.awt.event._ import java.awt.geom.AffineTransform import java.awt.image.{AffineTransformOp, BufferedImage} import java.awt._ import javax.imageio.ImageIO import javax.swing.JPanel import java.io.File class NodesPanel(val cluster: Cluster) extends JPanel { import NodesPanel._ setPreferredSize(new Dimension(WIDTH, HEIGHT)) val nodeImagesLeft = (1 to 3).map(x => transparentImage(ImageIO.read(getClass.getResource(s"/node$x.jpeg")))) val sickImage = transparentImage(ImageIO.read(getClass.getResource("/sick.jpeg"))) val backgroundImage = ImageIO.read(getClass.getResource("/background.png")) def scaleImage(image: BufferedImage, width: Int, height: Int): BufferedImage = { val after = new BufferedImage(image.getWidth, image.getHeight, BufferedImage.TYPE_INT_ARGB) val at = new AffineTransform() at.scale(width.toDouble / image.getWidth, height.toDouble / image.getHeight) val scaleOp = new AffineTransformOp(at, AffineTransformOp.TYPE_BILINEAR) scaleOp.filter(image, after) } def transparentImage(image: BufferedImage): BufferedImage = { val imageClone = new BufferedImage(image.getWidth(), image.getHeight(), BufferedImage.TYPE_INT_ARGB) val imageCloneG = imageClone.createGraphics() imageCloneG.drawImage(image, 0, 0, null) imageCloneG.setComposite(AlphaComposite.getInstance(AlphaComposite.DST_IN, 0.5f)) val width = image.getWidth() val height = image.getHeight() val raster = imageClone.getRaster() val background = imageClone.getRGB(0, 0) for (x <- 0 until width; y <- 0 until height) { val rgb = imageClone.getRGB(x, y) val r = rgb if (rgb == background) { val transparent = rgb & 0x00ffffff imageClone.setRGB(x, y, transparent) } } imageClone } override def paint(g: Graphics): Unit = { import Color._ val graphics = g.asInstanceOf[Graphics2D] graphics.drawImage(backgroundImage, 0, 0, null) cluster.nodes.foreach { node => val nodeImage = if (!node.healthy) { sickImage } else { nodeImagesLeft(node.imageId) } val flip = !node.facingLeft && node.healthy val width = if (flip) -node.width else node.width val x = node.x + (if (flip) node.width / 2 else -node.width / 2) graphics.drawImage(nodeImage, x, node.y - node.height / 2, width, node.height, null) if (cluster.queryExecuting && node.healthy) { graphics.setColor(orange) graphics.setStroke(new BasicStroke(3)) graphics.drawRect(node.x - node.width / 2, node.y - node.height / 2, node.width, node.height) } if (selectedNode.exists(node == _)) { graphics.setColor(green) graphics.setStroke(new BasicStroke(5)) graphics.drawRect(node.x - node.width / 2, node.y - node.height / 2, node.width, node.height) } } } var draggedNode: Option[Node] = None var selectedNode: Option[Node] = None addMouseListener(new MouseAdapter() { def findNode(e: MouseEvent): Option[Node] = { val x = e.getX val y = e.getY cluster.nodes.find { node => x < node.x + node.width / 2 && x > node.x - node.width / 2 && y < node.y + node.height / 2 && y > node.y - node.height / 2 } } override def mouseClicked(e: MouseEvent): Unit = { selectedNode = findNode(e) } override def mousePressed(e: MouseEvent): Unit = { draggedNode = findNode(e) } override def mouseReleased(e: MouseEvent): Unit = { draggedNode = None } }) addMouseMotionListener(new MouseMotionAdapter { override def mouseDragged(e: MouseEvent): Unit = { draggedNode.foreach { node => node.x = e.getX node.y = e.getY } } }) } object NodesPanel { val NODE_WIDTH = 100 val NODE_HEIGHT = 85 val WIDTH = 829 val HEIGHT = 579 }
sryza/clusters
src/main/scala/sryza/NodesPanel.scala
Scala
apache-2.0
3,929
package io.getquill.context.sql.idiom import io.getquill.Spec import io.getquill.context.sql.testContext._ import io.getquill.context.sql.SqlQuery import scala.util.Try import io.getquill.context.sql.norm.SqlNormalize class VerifySqlQuerySpec extends Spec { "fails if the query can't be translated to applicative joins" - { "sortBy" in { val q = quote { qr1.flatMap(a => qr2.filter(b => b.s == a.s).sortBy(b => b.s).map(b => b.s)) } VerifySqlQuery(SqlQuery(q.ast)).toString mustEqual "Some(The monad composition can't be expressed using applicative joins. Faulty expression: 'b.s == a.s'. Free variables: 'List(a)'.)" } "take" in { val q = quote { qr1.flatMap(a => qr2.filter(b => b.s == a.s).take(10).map(b => b.s)) } VerifySqlQuery(SqlQuery(q.ast)).toString mustEqual "Some(The monad composition can't be expressed using applicative joins. Faulty expression: 'b.s == a.s'. Free variables: 'List(a)'.)" } "doesn't accept table reference" - { "with filter" in { val q = quote { qr1.leftJoin(qr2).on((a, b) => a.i == b.i).filter { case (a, b) => b.isDefined } } an[IllegalArgumentException] should be thrownBy VerifySqlQuery(SqlQuery(SqlNormalize(q.ast))) } "with map" in { val q = quote { qr1.leftJoin(qr2).on((a, b) => a.i == b.i) .map(pcTup => if (pcTup._2.isDefined) "bar" else "baz") } an[IllegalArgumentException] should be thrownBy VerifySqlQuery(SqlQuery(SqlNormalize(q.ast))) } } "invalid flatJoin on" in { val q = quote { for { a <- qr1 b <- qr2 if a.i == b.i c <- qr1.leftJoin(_.i == a.i) } yield (a.i, b.i, c.map(_.i)) } Try(VerifySqlQuery(SqlQuery(q.ast))).isFailure mustEqual true } } }
getquill/quill
quill-sql/src/test/scala/io/getquill/context/sql/idiom/VerifySqlQuerySpec.scala
Scala
apache-2.0
1,904
package scintuit.contrib.play.data.api import play.api.libs.json._ import play.api.libs.functional.syntax._ import scintuit.data.api.transaction._ import scintuit.contrib.play.data.raw object transaction { object TransactionFormats extends TransactionFormats trait TransactionFormats { import raw.transaction.{TransactionFormats => RawTransactionFormats} implicit val correctionActionFormat: Format[CorrectionAction] = RawTransactionFormats.correctionActionFormat implicit val investmentSubAccountTypeFormat: Format[InvestmentSubAccountType] = RawTransactionFormats.investmentSubAccountTypeFormat implicit val banking401KSourceTypeFormat: Format[Banking401KSourceType] = RawTransactionFormats.banking401KSourceTypeFormat implicit val buyTypeFormat: Format[BuyType] = RawTransactionFormats.buyTypeFormat implicit val incomeTypeFormat: Format[IncomeType] = RawTransactionFormats.incomeTypeFormat implicit val optionsActionFormat: Format[OptionsAction] = RawTransactionFormats.optionsActionFormat implicit val optionsBuyTypeFormat: Format[OptionsBuyType] = RawTransactionFormats.optionsBuyTypeFormat implicit val optionsSellTypeFormat: Format[OptionsSellType] = RawTransactionFormats.optionsSellTypeFormat implicit val positionTypeFormat: Format[PositionType] = RawTransactionFormats.positionTypeFormat implicit val transferTypeFormat: Format[TransferAction] = RawTransactionFormats.transferTypeFormat implicit val relatedOptionTransactionTypeFormat: Format[RelatedOptionTransactionType] = RawTransactionFormats.relatedOptionTransactionTypeFormat implicit val securedTypeFormat: Format[SecuredType] = RawTransactionFormats.securedTypeFormat implicit val sellReasonFormat: Format[SellReason] = RawTransactionFormats.sellReasonFormat implicit val sellTypeFormat: Format[SellType] = RawTransactionFormats.sellTypeFormat private val bankingTransactionFormat: Format[BankingTransaction] = xmap(taggedFormat("banking", RawTransactionFormats.bankingTransactionFormat))(BankingTransaction, _.raw) private val creditTransactionFormat: Format[CreditTransaction] = xmap(taggedFormat("credit", RawTransactionFormats.creditTransactionFormat))(CreditTransaction, _.raw) private val investmentTransactionFormat: Format[InvestmentTransaction] = xmap(taggedFormat("investment", RawTransactionFormats.investmentTransactionFormat))(InvestmentTransaction, _.raw) private val investmentBankingTransactionFormat: Format[InvestmentBankingTransaction] = xmap(taggedFormat("investment_banking", RawTransactionFormats.investmentBankingTransactionFormat))(InvestmentBankingTransaction, _.raw) private val loanTransactionFormat: Format[LoanTransaction] = xmap(taggedFormat("loan", RawTransactionFormats.loanTransactionFormat))(LoanTransaction, _.raw) private val rewardTransactionFormat: Format[RewardTransaction] = xmap(taggedFormat("reward", RawTransactionFormats.rewardTransactionFormat))(RewardTransaction, _.raw) implicit val transactionFormat: Format[Transaction] = Format[Transaction]( bankingTransactionFormat.map(t => t: Transaction) orElse creditTransactionFormat.map(identity) orElse investmentTransactionFormat.map(identity) orElse investmentBankingTransactionFormat.map(identity) orElse loanTransactionFormat.map(identity) orElse rewardTransactionFormat.map(identity), Writes[Transaction]{ case t: BankingTransaction => bankingTransactionFormat.writes(t) case t: CreditTransaction => creditTransactionFormat.writes(t) case t: InvestmentTransaction => investmentTransactionFormat.writes(t) case t: InvestmentBankingTransaction => investmentBankingTransactionFormat.writes(t) case t: LoanTransaction => loanTransactionFormat.writes(t) case t: RewardTransaction => rewardTransactionFormat.writes(t) } ) private def taggedReads[A](tag: String, reads: Reads[A]): Reads[A] = (__ \\ "tag").read[String].filter(_ == tag) andKeep reads private def taggedWrites[A](tag: String, writes: Writes[A]): Writes[A] = writes.transform(_.as[JsObject] +("tag", JsString(tag))) private def taggedFormat[A](tag: String, format: Format[A]): Format[A] = Format(taggedReads(tag, format), taggedWrites(tag, format)) private def xmap[A, B](format: Format[A])(fab: A => B, fba: B => A): Format[B] = Format(format map fab, Writes(b => format.writes(fba(b)))) } }
drbild/scintuit
contrib/play-json/src/main/scala/scintuit/contrib/play/data/api/transaction.scala
Scala
apache-2.0
4,506
package net.bhardy.braintree.scala.search class KeyValueNode[T <: SearchRequest[T]](nodeName:String, parent:T) extends SearchNode[T](nodeName, parent) { def is(value: AnyRef): T = { parent.addKeyValueCriteria(nodeName.toString, value.toString) } }
benhardy/braintree-scala
src/main/scala/search/KeyValueNode.scala
Scala
mit
257
/* @meta { "processorId": "org.helgoboss.scala_bundle:1.0.0", "projectId": "org.helgoboss:app-info:1.0-SNAPSHOT", "dependencies": [ "com.weiglewilczek.scala-lang-osgi:scala-library:2.9.1" ], "transformers": [ "org.helgoboss.my_oss:1.0.0" ] } */ package org.helgoboss.app_info import java.io.File import java.awt.Image trait AppInfo { def dataDir: File def homeDir: Option[File] def id: String def name: String def iconImage: Option[Image] def logFile: Option[File] }
helgoboss/app-info
org.helgoboss.app-info.scala
Scala
mit
521
/* * Copyright 2017 Nicolas Rinaudo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kantan.mongodb import laws.discipline._, arbitrary._ class OptionCodecTests extends DisciplineSuite { checkAll("BsonValueCodec[Option[Int]]", BsonValueCodecTests[Option[Int]].codec[String, Float]) }
nrinaudo/kantan.mongodb
core/src/test/scala/kantan/mongodb/OptionCodecTests.scala
Scala
apache-2.0
811
package com.stevens.spark import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkConf import org.apache.hadoop.io._ import com.stevens.minhash._ object ShingleCounter extends App { val shingleLength = args(0).toInt val corpusSequence = args(1) val outputLocation = args(2) val conf = new SparkConf().setAppName("Shingle Counter") val sc = new SparkContext(conf) val shingleLengthBroadcast = sc.broadcast(shingleLength) val corpusRDD = sc.sequenceFile(corpusSequence, classOf[Text], classOf[Text]) .map { case(id, text) => (id.toString, text.toString) } val shinglesRDD = corpusRDD.flatMap { case(id, text) => val minHash = new MinHashDocument(text, shingleLength=shingleLengthBroadcast.value) minHash.generateShingles.map(shingle => (shingle, BigInt(1))) }.reduceByKey(_ + _) val shingleCount = shinglesRDD.count() println(s"Number of distinct ${shingleLength}-shingles: $shingleCount") shinglesRDD.keys.saveAsTextFile(outputLocation) sc.stop() }
steven-s/minhash-document-clusters
src/main/scala/com/stevens/spark/ShingleCounter.scala
Scala
mit
1,049
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala.tools.partest import scala.reflect.runtime.{universe => ru} import scala.tools.nsc._ /** For testing compiler internals directly. * Each source code string in "sources" will be compiled, and * the check function will be called with the source code and the * resulting CompilationUnit. The check implementation should * test for what it wants to test and fail (via assert or other * exception) if it is not happy. */ abstract class CompilerTest extends DirectTest { def check(source: String, unit: global.CompilationUnit): Unit lazy val global: Global = newCompiler() lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *) import global._ import definitions.{ compilerTypeFromTag } override def extraSettings = "-usejavacp -d " + testOutput.path def show() = sources.lazyZip(units).foreach(check) // Override at least one of these... def code = "" def sources: List[String] = List(code) // Utility functions class MkType(sym: Symbol) { def apply[M](implicit t: ru.TypeTag[M]): Type = if (sym eq NoSymbol) NoType else appliedType(sym, compilerTypeFromTag(t)) } implicit def mkMkType(sym: Symbol) = new MkType(sym) def allMembers(root: Symbol): List[Symbol] = { def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = { val latest = roots flatMap (_.info.members) filterNot (seen contains _) if (latest.isEmpty) seen.toList.sortWith(_ isLess _) else loop(seen ++ latest, latest) } loop(Set(), List(root)) } class SymsInPackage(pkgName: String) { def pkg = rootMirror.getPackage(TermName(pkgName)) def classes = allMembers(pkg) filter (_.isClass) def modules = allMembers(pkg) filter (_.isModule) def symbols = classes ++ terms filterNot (_ eq NoSymbol) def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor) def tparams = classes flatMap (_.info.typeParams) def tpes = symbols.map(_.tpe).distinct } }
martijnhoekstra/scala
src/partest/scala/tools/partest/CompilerTest.scala
Scala
apache-2.0
2,298
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.internal import org.apache.spark.SparkConf import org.apache.spark.annotation.{Experimental, Unstable} import org.apache.spark.sql.{ExperimentalMethods, SparkSession, UDFRegistration, _} import org.apache.spark.sql.catalog.v2.CatalogPlugin import org.apache.spark.sql.catalyst.analysis.{Analyzer, FunctionRegistry} import org.apache.spark.sql.catalyst.catalog.SessionCatalog import org.apache.spark.sql.catalyst.optimizer.Optimizer import org.apache.spark.sql.catalyst.parser.ParserInterface import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.execution.{ColumnarRule, QueryExecution, SparkOptimizer, SparkPlanner, SparkSqlParser} import org.apache.spark.sql.execution.datasources._ import org.apache.spark.sql.execution.datasources.v2.{V2StreamingScanSupportCheck, V2WriteSupportCheck} import org.apache.spark.sql.streaming.StreamingQueryManager import org.apache.spark.sql.util.ExecutionListenerManager /** * Builder class that coordinates construction of a new [[SessionState]]. * * The builder explicitly defines all components needed by the session state, and creates a session * state when `build` is called. Components should only be initialized once. This is not a problem * for most components as they are only used in the `build` function. However some components * (`conf`, `catalog`, `functionRegistry`, `experimentalMethods` & `sqlParser`) are as dependencies * for other components and are shared as a result. These components are defined as lazy vals to * make sure the component is created only once. * * A developer can modify the builder by providing custom versions of components, or by using the * hooks provided for the analyzer, optimizer & planner. There are some dependencies between the * components (they are documented per dependency), a developer should respect these when making * modifications in order to prevent initialization problems. * * A parent [[SessionState]] can be used to initialize the new [[SessionState]]. The new session * state will clone the parent sessions state's `conf`, `functionRegistry`, `experimentalMethods` * and `catalog` fields. Note that the state is cloned when `build` is called, and not before. */ @Experimental @Unstable abstract class BaseSessionStateBuilder( val session: SparkSession, val parentState: Option[SessionState] = None) { type NewBuilder = (SparkSession, Option[SessionState]) => BaseSessionStateBuilder /** * Function that produces a new instance of the `BaseSessionStateBuilder`. This is used by the * [[SessionState]]'s clone functionality. Make sure to override this when implementing your own * [[SessionStateBuilder]]. */ protected def newBuilder: NewBuilder /** * Session extensions defined in the [[SparkSession]]. */ protected def extensions: SparkSessionExtensions = session.extensions /** * Extract entries from `SparkConf` and put them in the `SQLConf` */ protected def mergeSparkConf(sqlConf: SQLConf, sparkConf: SparkConf): Unit = { sparkConf.getAll.foreach { case (k, v) => sqlConf.setConfString(k, v) } } /** * SQL-specific key-value configurations. * * These either get cloned from a pre-existing instance or newly created. The conf is merged * with its [[SparkConf]] only when there is no parent session. */ protected lazy val conf: SQLConf = { parentState.map { s => val cloned = s.conf.clone() if (session.sparkContext.conf.get(StaticSQLConf.SQL_LEGACY_SESSION_INIT_WITH_DEFAULTS)) { mergeSparkConf(cloned, session.sparkContext.conf) } cloned }.getOrElse { val conf = new SQLConf mergeSparkConf(conf, session.sparkContext.conf) conf } } /** * Internal catalog managing functions registered by the user. * * This either gets cloned from a pre-existing version or cloned from the built-in registry. */ protected lazy val functionRegistry: FunctionRegistry = { parentState.map(_.functionRegistry.clone()) .getOrElse(extensions.registerFunctions(FunctionRegistry.builtin.clone())) } /** * Experimental methods that can be used to define custom optimization rules and custom planning * strategies. * * This either gets cloned from a pre-existing version or newly created. */ protected lazy val experimentalMethods: ExperimentalMethods = { parentState.map(_.experimentalMethods.clone()).getOrElse(new ExperimentalMethods) } /** * Parser that extracts expressions, plans, table identifiers etc. from SQL texts. * * Note: this depends on the `conf` field. */ protected lazy val sqlParser: ParserInterface = { extensions.buildParser(session, new SparkSqlParser(conf)) } /** * ResourceLoader that is used to load function resources and jars. */ protected lazy val resourceLoader: SessionResourceLoader = new SessionResourceLoader(session) /** * Catalog for managing table and database states. If there is a pre-existing catalog, the state * of that catalog (temp tables & current database) will be copied into the new catalog. * * Note: this depends on the `conf`, `functionRegistry` and `sqlParser` fields. */ protected lazy val catalog: SessionCatalog = { val catalog = new SessionCatalog( () => session.sharedState.externalCatalog, () => session.sharedState.globalTempViewManager, functionRegistry, conf, SessionState.newHadoopConf(session.sparkContext.hadoopConfiguration, conf), sqlParser, resourceLoader) parentState.foreach(_.catalog.copyStateTo(catalog)) catalog } /** * Interface exposed to the user for registering user-defined functions. * * Note 1: The user-defined functions must be deterministic. * Note 2: This depends on the `functionRegistry` field. */ protected def udfRegistration: UDFRegistration = new UDFRegistration(functionRegistry) /** * Logical query plan analyzer for resolving unresolved attributes and relations. * * Note: this depends on the `conf` and `catalog` fields. */ protected def analyzer: Analyzer = new Analyzer(catalog, conf) { override val extendedResolutionRules: Seq[Rule[LogicalPlan]] = new FindDataSourceTable(session) +: new ResolveSQLOnFile(session) +: new FallBackFileSourceV2(session) +: DataSourceResolution(conf, this) +: customResolutionRules override val postHocResolutionRules: Seq[Rule[LogicalPlan]] = PreprocessTableCreation(session) +: PreprocessTableInsertion(conf) +: DataSourceAnalysis(conf) +: customPostHocResolutionRules override val extendedCheckRules: Seq[LogicalPlan => Unit] = PreWriteCheck +: PreReadCheck +: HiveOnlyCheck +: V2WriteSupportCheck +: V2StreamingScanSupportCheck +: customCheckRules override protected def lookupCatalog(name: String): CatalogPlugin = session.catalog(name) } /** * Custom resolution rules to add to the Analyzer. Prefer overriding this instead of creating * your own Analyzer. * * Note that this may NOT depend on the `analyzer` function. */ protected def customResolutionRules: Seq[Rule[LogicalPlan]] = { extensions.buildResolutionRules(session) } /** * Custom post resolution rules to add to the Analyzer. Prefer overriding this instead of * creating your own Analyzer. * * Note that this may NOT depend on the `analyzer` function. */ protected def customPostHocResolutionRules: Seq[Rule[LogicalPlan]] = { extensions.buildPostHocResolutionRules(session) } /** * Custom check rules to add to the Analyzer. Prefer overriding this instead of creating * your own Analyzer. * * Note that this may NOT depend on the `analyzer` function. */ protected def customCheckRules: Seq[LogicalPlan => Unit] = { extensions.buildCheckRules(session) } /** * Logical query plan optimizer. * * Note: this depends on `catalog` and `experimentalMethods` fields. */ protected def optimizer: Optimizer = { new SparkOptimizer(catalog, experimentalMethods) { override def extendedOperatorOptimizationRules: Seq[Rule[LogicalPlan]] = super.extendedOperatorOptimizationRules ++ customOperatorOptimizationRules } } /** * Custom operator optimization rules to add to the Optimizer. Prefer overriding this instead * of creating your own Optimizer. * * Note that this may NOT depend on the `optimizer` function. */ protected def customOperatorOptimizationRules: Seq[Rule[LogicalPlan]] = { extensions.buildOptimizerRules(session) } /** * Planner that converts optimized logical plans to physical plans. * * Note: this depends on the `conf` and `experimentalMethods` fields. */ protected def planner: SparkPlanner = { new SparkPlanner(session.sparkContext, conf, experimentalMethods) { override def extraPlanningStrategies: Seq[Strategy] = super.extraPlanningStrategies ++ customPlanningStrategies } } /** * Custom strategies to add to the planner. Prefer overriding this instead of creating * your own Planner. * * Note that this may NOT depend on the `planner` function. */ protected def customPlanningStrategies: Seq[Strategy] = { extensions.buildPlannerStrategies(session) } protected def columnarRules: Seq[ColumnarRule] = { extensions.buildColumnarRules(session) } /** * Create a query execution object. */ protected def createQueryExecution: LogicalPlan => QueryExecution = { plan => new QueryExecution(session, plan) } /** * Interface to start and stop streaming queries. */ protected def streamingQueryManager: StreamingQueryManager = new StreamingQueryManager(session) /** * An interface to register custom [[org.apache.spark.sql.util.QueryExecutionListener]]s * that listen for execution metrics. * * This gets cloned from parent if available, otherwise a new instance is created. */ protected def listenerManager: ExecutionListenerManager = { parentState.map(_.listenerManager.clone(session)).getOrElse( new ExecutionListenerManager(session, loadExtensions = true)) } /** * Function used to make clones of the session state. */ protected def createClone: (SparkSession, SessionState) => SessionState = { val createBuilder = newBuilder (session, state) => createBuilder(session, Option(state)).build() } /** * Build the [[SessionState]]. */ def build(): SessionState = { new SessionState( session.sharedState, conf, experimentalMethods, functionRegistry, udfRegistration, () => catalog, sqlParser, () => analyzer, () => optimizer, planner, streamingQueryManager, listenerManager, () => resourceLoader, createQueryExecution, createClone, columnarRules) } } /** * Helper class for using SessionStateBuilders during tests. */ private[sql] trait WithTestConf { self: BaseSessionStateBuilder => def overrideConfs: Map[String, String] override protected lazy val conf: SQLConf = { val overrideConfigurations = overrideConfs val conf = parentState.map(_.conf.clone()).getOrElse { new SQLConf { clear() override def clear(): Unit = { super.clear() // Make sure we start with the default test configs even after clear overrideConfigurations.foreach { case (key, value) => setConfString(key, value) } } } } mergeSparkConf(conf, session.sparkContext.conf) conf } }
actuaryzhang/spark
sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
Scala
apache-2.0
12,552
package edu.berkeley.nlp.coref import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.HashMap import scala.util.Random import edu.berkeley.nlp.futile.util.Counter import edu.berkeley.nlp.futile.util.Iterators import edu.berkeley.nlp.futile.util.Logger import edu.berkeley.nlp.coref.bp.DocumentFactorGraph class DocumentGraph(val corefDoc: CorefDoc, val addToFeaturizer: Boolean) { // addToFeaturizer should be true for train documents (if a feature is unseen on // these, we add it to the featurizer) and false for dev/test documents // By convention: a feature vector is empty if it has been pruned var cachedFeats = new Array[Array[Seq[Int]]](corefDoc.numPredMents); for (i <- 0 until corefDoc.numPredMents) { cachedFeats(i) = Array.fill(i + 1)(Seq[Int]()); } // These are just here so we don't have to reinstantiate them; they should // be overwritten every time the weights change (which is all the time) val cachedScoreMatrix = new Array[Array[Double]](corefDoc.numPredMents); val cachedMarginalMatrix = new Array[Array[Double]](corefDoc.numPredMents); for (i <- 0 until corefDoc.numPredMents) { cachedScoreMatrix(i) = Array.fill(i + 1)(0.0); cachedMarginalMatrix(i) = Array.fill(i + 1)(0.0); } // Only used for DocumentInferencerRahman val cachedMentClusterMapping = new MentClusterMapping(corefDoc.numPredMents); var cachedFeaturizer: PairwiseIndexingFeaturizer = null; var cacheEmpty = true; // If an edge is pruned, it will never be featurized var prunedEdges = new Array[Array[Boolean]](corefDoc.numPredMents); for (i <- 0 until prunedEdges.size) { prunedEdges(i) = Array.fill(i + 1)(false); } // Stored bit information var storedClusterPosteriors = new ArrayBuffer[Array[Array[Double]]](); var storedDistributedLabels = new ArrayBuffer[Array[Array[Int]]](); def size() = corefDoc.numPredMents def getMention(idx: Int) = corefDoc.predMentions(idx); def getMentions() = corefDoc.predMentions; def getOraclePredClustering() = corefDoc.getOraclePredClustering; def getMentionStrAndContext(idx: Int): String = { val ment = getMention(idx); val mentionStart = ment.startIdx; val mentionEnd = ment.endIdx; val sentence = corefDoc.rawDoc.words(ment.sentIdx); val contextStart = Math.max(0, mentionStart - 3); val contextEnd = Math.min(mentionEnd + 3, sentence.size); (sentence.slice(contextStart, mentionStart).foldLeft("")(_ + " " + _) + " [" + sentence.slice(mentionStart, mentionEnd).foldLeft("")(_ + " " + _) + "] " + sentence.slice(mentionEnd, contextEnd).foldLeft("")(_ + " " + _)).trim(); } def isGoldNoPruning(currIdx: Int, antecedentIdx: Int) = getGoldAntecedentsNoPruning(currIdx).contains(antecedentIdx); def isGoldCurrentPruning(currIdx: Int, antecedentIdx: Int) = getGoldAntecedentsUnderCurrentPruning(currIdx) .contains(antecedentIdx); def isPruned(currIdx: Int, antecedentIdx: Int): Boolean = prunedEdges(currIdx)(antecedentIdx); def getPrunedDomain(idx: Int, gold: Boolean): Array[Int] = { val currAntecedents = getGoldAntecedentsUnderCurrentPruning(idx); val domainSeq = new ArrayBuffer[Int](); for (j <- 0 to idx) { if (!isPruned(idx, j) && (!gold || currAntecedents.contains(j))) { domainSeq += j; } } domainSeq.toArray; } private def pruneEdgesMentDistanceSentDistance(maxBackptrMentDistance: Int, maxPronounSentDistance: Int) { for (i <- 0 until prunedEdges.size) { val iSentIdx = getMention(i).sentIdx; for (j <- 0 to i) { val jSentIdx = getMention(j).sentIdx; if (j < i - maxBackptrMentDistance || (getMention(i).mentionType == MentionType.PRONOMINAL && iSentIdx - jSentIdx > maxPronounSentDistance)) { prunedEdges(i)(j) = true; cachedFeats(i)(j) = Seq[Int](); } } } } private def pruneEdgesLogRatio(scorer: PairwiseScorer, logRatio: Double) { val (featsChart, scoresChart) = featurizeIndexAndScoreNonPrunedUseCache(scorer); for (i <- 0 until prunedEdges.size) { val edgeWeights: Counter[Int] = new Counter[Int](); for (j <- 0 to i) { edgeWeights.setCount(j, scoresChart(i)(j)); } val bestScore = edgeWeights.max(); for (backptrToPrune <- 0 to i) { if (scoresChart(i)(backptrToPrune) + logRatio < bestScore) { prunedEdges(i)(backptrToPrune) = true; cachedFeats(i)(backptrToPrune) = Seq[Int](); } } require(prunedEdges(i).foldLeft(false)((curr, isPruned) => curr || !isPruned), "Everyone was pruned for " + i); } } private def computePruningStats(): PruningStats = { var totalMentions = 0; var totalAnaphoricMentions = 0; var totalEdges = 0; var edgesPruned = 0; var numGoldBackptrs = 0; var numGoldBackptrsPruned = 0; var numAllBackptrsPruned = 0; var numAnaphoricAllBackptrsPruned = 0; for (i <- 0 until this.size) { totalMentions += 1; val thisAntecedentsNoPruning = getGoldAntecedentsNoPruning(i); val thisAntecedentsWithPruning = getGoldAntecedentsUnderCurrentPruningOrEmptySet(i); totalEdges += (i + 1); edgesPruned += prunedEdges(i).foldRight(0)((pruned: Boolean, value: Int) => if (pruned) value + 1 else value); val goldAntecedentIsSelf = thisAntecedentsNoPruning.size == 1 && thisAntecedentsNoPruning(0) == i; val allAntecedentsPruned = thisAntecedentsWithPruning.size == 0; totalAnaphoricMentions += (if (goldAntecedentIsSelf) 0 else 1); numGoldBackptrs += thisAntecedentsNoPruning.size; val numAntecedentsPruned = thisAntecedentsNoPruning.size - thisAntecedentsWithPruning.size; numGoldBackptrsPruned += numAntecedentsPruned; numAllBackptrsPruned += (if (allAntecedentsPruned) 1 else 0); numAnaphoricAllBackptrsPruned += (if (!goldAntecedentIsSelf && allAntecedentsPruned) 1 else 0); } new PruningStats(totalMentions, totalAnaphoricMentions, totalEdges, edgesPruned, numGoldBackptrs, numGoldBackptrsPruned, numAllBackptrsPruned, numAnaphoricAllBackptrsPruned); } def getGoldClustersNoPruning(): Seq[Seq[Mention]] = { val allClusters = new ArrayBuffer[Seq[Mention]](); val oracleClustering = corefDoc.getOraclePredClustering for (cluster <- oracleClustering.clusters) { // val clusterIndices = cluster.asScala.map(_.mentionID).toSeq; // val clusterIndices2 = cluster.asScala.map(doc.predMentions.indexOf(_)).toSeq; // require(clusterIndices == clusterIndices2); allClusters += cluster.map(getMention(_)); } allClusters; } def getAllAntecedentsCurrentPruning(idx: Int): Seq[Int] = { val antecedents = new ArrayBuffer[Int]; for (i <- 0 to idx) { if (!prunedEdges(idx)(i)) { antecedents += i; } } antecedents; } def getGoldAntecedentsNoPruning(): Array[Seq[Int]] = { (0 until this.size).map(getGoldAntecedentsNoPruning(_)).toArray; } def getGoldAntecedentsNoPruning(idx: Int): Seq[Int] = { val oracleClustering = corefDoc.getOraclePredClustering val antecedents = oracleClustering.getAllAntecedents(idx); if (antecedents.isEmpty) Seq(idx) else antecedents; } // This and the following return the set of allowed antecedents if all gold // antecedents have been pruned; effectively this ignores examples where // there is no gold. Always returns nonempty. def getGoldAntecedentsUnderCurrentPruning(): Array[Seq[Int]] = { (0 until this.size).map(getGoldAntecedentsUnderCurrentPruning(_)).toArray; } def getGoldAntecedentsUnderCurrentPruning(idx: Int): Seq[Int] = { val oracleClustering = corefDoc.getOraclePredClustering val antecedentsRaw = oracleClustering.getAllAntecedents(idx); val antecedents = if (antecedentsRaw.isEmpty) Seq(idx) else antecedentsRaw; val unprunedAntecedents = antecedents.filter(j => !prunedEdges(idx)(j)) if (unprunedAntecedents.isEmpty) { // This is a little inefficient but this code isn't called that much (extremely rare in coarse pass // and generally not called for nonanaphoric guys, and most things are nonanaphoric) val allUnprunedBackptrs = prunedEdges(idx).zipWithIndex.filter((prunedAndIdx) => !prunedAndIdx._1).map(_._2) .toSeq; allUnprunedBackptrs } else { unprunedAntecedents; } } // This and the following return the set of unpruned antecedents, possibly empty def getGoldAntecedentsUnderCurrentPruningOrEmptySet(): Array[Seq[Int]] = { (0 until this.size).map(getGoldAntecedentsUnderCurrentPruningOrEmptySet(_)).toArray; } def getGoldAntecedentsUnderCurrentPruningOrEmptySet(idx: Int): Seq[Int] = { val oracleClustering = corefDoc.getOraclePredClustering val antecedentsRaw = oracleClustering.getAllAntecedents(idx); val antecedents = if (antecedentsRaw.isEmpty) Seq(idx) else antecedentsRaw; val unprunedAntecedents = antecedents.filter(j => !prunedEdges(idx)(j)) unprunedAntecedents; } // N.B. The matrices returned by this method are volatile. The feats one hangs around // unless you refeaturize, but the other one gets mutated every time you call this // method (though obviously it's only different if you prune or if the weights have changed). def featurizeIndexAndScoreNonPrunedUseCache(scorer: PairwiseScorer): (Array[Array[Seq[Int]]], Array[Array[Double]]) = { val featsChart = featurizeIndexNonPrunedUseCache(scorer.featurizer); // val scoreChart = new Array[Array[Double]](corefDoc.numPredMents); val scoreChart = cachedScoreMatrix; for (i <- 0 until corefDoc.numPredMents) { // scoreChart(i) = Array.fill(i + 1)(Double.NegativeInfinity); for (j <- 0 to i) { if (!prunedEdges(i)(j)) { require(featsChart(i)(j).size > 0); scoreChart(i)(j) = scorer.scoreIndexedFeats(featsChart(i)(j)); } else { scoreChart(i)(j) = Double.NegativeInfinity; } } } (featsChart, scoreChart) } def featurizeIndexNonPrunedUseCache(featurizer: PairwiseIndexingFeaturizer): Array[Array[Seq[Int]]] = { if (cacheEmpty || featurizer != cachedFeaturizer) { cachedFeats = featurizeIndexNonPruned(featurizer); cachedFeaturizer = featurizer; cacheEmpty = false; } cachedFeats; } private def featurizeIndexNonPruned(featurizer: PairwiseIndexingFeaturizer): Array[Array[Seq[Int]]] = { val featsChart = new Array[Array[Seq[Int]]](corefDoc.numPredMents); for (i <- 0 until corefDoc.numPredMents) { featsChart(i) = Array.fill(i + 1)(Seq[Int]()); for (j <- 0 to i) { if (!prunedEdges(i)(j)) { featsChart(i)(j) = featurizer.featurizeIndex(this, i, j, addToFeaturizer); } } } featsChart; } def setPrunedEdges(prunedEdges: Array[Array[Boolean]]) { this.prunedEdges = prunedEdges; for (i <- 0 until prunedEdges.size) { for (j <- 0 until prunedEdges(i).size) { if (prunedEdges(i)(j)) { cachedFeats(i)(j) = Seq[Int](); } } } } def printAverageFeatureCountInfo() { var numerAnaphoric = 0; var denomAnaphoric = 0; var numerNonanaphoric = 0; var denomNonanaphoric = 0; for (i <- 0 until cachedFeats.size) { for (j <- 0 until cachedFeats(i).size) { if (!prunedEdges(i)(j)) { if (i != j) { numerAnaphoric += cachedFeats(i)(j).size; denomAnaphoric += 1; } else { numerNonanaphoric += cachedFeats(i)(j).size; denomNonanaphoric += 1; } } } } Logger.logss("Avg feature counts anaphoric: " + numerAnaphoric.toDouble / denomAnaphoric.toDouble); Logger.logss("Avg feature counts nonanaphoric: " + numerNonanaphoric.toDouble / denomNonanaphoric.toDouble); } // Caching various information that we might want to use later // def computeAndStoreClusterPosteriors(clusterer: Clusterer) { // val clusterPosteriors = new Array[Array[Double]](this.size); // for (i <- 0 until size) { //// val currMent = doc.predMentions.get(i); //// clusterPosteriors(i) = clusterer.computeClusterPosteriors(getMentionInContext(currMent.sentNum, // currMent.startIndex, currMent.endIndex, currMent.headIndex)); // // val currMent = getMention(i); // clusterPosteriors(i) = clusterer.computeClusterPosteriors(getMentionInContext(currMent.sentIdx, // currMent.startIdx, currMent.endIdx, currMent.headIdx)); // // Do a little smoothing on the posteriors // (0 until clusterPosteriors(i).size).foreach(j => clusterPosteriors(i)(j) += 1e-10); // } // this.storedClusterPosteriors += clusterPosteriors; // } // // def computeAndStoreDistributedLabels(clustererIdx: Int, lowerThresholds: Array[Double], // upperThresholds: Array[Double]) { // val distributedLabels = new Array[Array[Int]](this.size); // for (i <- 0 until size) { // distributedLabels(i) = new Array[Int](this.storedClusterPosteriors(clustererIdx)(i).size); // for (j <- 0 until this.storedClusterPosteriors(clustererIdx)(i).size) { // val posterior = this.storedClusterPosteriors(clustererIdx)(i)(j); // distributedLabels(i)(j) = if (posterior > upperThresholds(j)) 1 else if (posterior < lowerThresholds(j)) // 0 else -1; // } // } // this.storedDistributedLabels += distributedLabels; // } def numClusterers = storedClusterPosteriors.size; def numClusters(clustererIdx: Int) = storedClusterPosteriors(clustererIdx)(0).size; def getClusterPosteriors(clustererIdx: Int, mentIdx: Int): Array[Double] = { storedClusterPosteriors(clustererIdx)(mentIdx); } def getBestCluster(clustererIdx: Int, mentIdx: Int): Int = { var bestScore = Double.NegativeInfinity; var bestIdx = -1; for (i <- 0 until storedClusterPosteriors(clustererIdx)(mentIdx).length) { if (storedClusterPosteriors(clustererIdx)(mentIdx)(i) > bestScore) { bestScore = storedClusterPosteriors(clustererIdx)(mentIdx)(i); bestIdx = i; } } bestIdx; } def computeAndStoreCheatingPosteriors(numCheatingClusters: Int, rng: java.util.Random) { val cheatingPosteriors = new Array[Array[Double]](this.size); val rawIdxToClusterIdx = new HashMap[Int, Int](); for (i <- 0 until size) { val rawIdx = corefDoc.getOraclePredClustering.getClusterIdx(i); if (!rawIdxToClusterIdx.contains(rawIdx)) { rawIdxToClusterIdx(rawIdx) = rng.nextInt(numCheatingClusters); } val goldIdx = rawIdxToClusterIdx(rawIdx); cheatingPosteriors(i) = OraclePosteriorSampler.randomPosterior(numCheatingClusters, goldIdx, rng); } this.storedClusterPosteriors += cheatingPosteriors; } def computeAndStorePhiPosteriors(useNumber: Boolean, useGender: Boolean, useNert: Boolean) { if (useNumber) { computeAndStorePhiPosterior((ment: Mention) => ment.number.ordinal(), Number.values().size - 1, Number.UNKNOWN.ordinal()) } if (useGender) { computeAndStorePhiPosterior((ment: Mention) => ment.gender.ordinal(), Gender.values().size - 1, Gender.UNKNOWN.ordinal()) } if (useNert) { // Assumes that "O" is last in the list computeAndStorePhiPosterior((ment: Mention) => DocumentFactorGraph.nerTypesIncludingO.indexOf(ment.nerString), DocumentFactorGraph.nerTypes.size, DocumentFactorGraph.nerTypesIncludingO.size - 1) } } def computeAndStorePhiPosterior(fcn: (Mention => Int), domainSize: Int, unknown: Int) { val EstimatorConfidence = 0.75; val posteriors = new Array[Array[Double]](this.size); for (i <- 0 until size) { val idx = fcn(getMention(i)); if (idx == unknown) { posteriors(i) = Array.tabulate(domainSize)(j => 1.0 / domainSize); } else if (idx >= domainSize) { throw new RuntimeException("Bad idx: " + idx + " for domain size " + domainSize + " " + getMention(i) .nerString); } else { posteriors(i) = Array.tabulate(domainSize)(j => (1.0 - EstimatorConfidence) / domainSize); posteriors(i)(idx) += EstimatorConfidence; } } this.storedClusterPosteriors += posteriors; } } case class PruningStats(val totalMentions: Int, val totalAnaphoricMentions: Int, val totalEdges: Int, val edgesPruned: Int, val numGoldBackptrs: Int, val numGoldBackptrsPruned: Int, val numAllBackptrsPruned: Int, val numAnaphoricAllBackptrsPruned: Int) { def add(other: PruningStats) = { new PruningStats(this.totalMentions + other.totalMentions, this.totalAnaphoricMentions + other.totalAnaphoricMentions, this.totalEdges + other.totalEdges, this.edgesPruned + other.edgesPruned, this.numGoldBackptrs + other.numGoldBackptrs, this.numGoldBackptrsPruned + other.numGoldBackptrsPruned, this.numAllBackptrsPruned + other.numAllBackptrsPruned, this.numAnaphoricAllBackptrsPruned + other.numAnaphoricAllBackptrsPruned); } override def toString(): String = { "totalMentions: " + this.totalMentions + ", totalAnaphoricMentions: " + this.totalAnaphoricMentions + ", " + "totalEdges: " + this.totalEdges + ", edgesPruned: " + this.edgesPruned + ", numGoldBackptrs: " + this.numGoldBackptrs + ", numGoldBackptrsPruned: " + this.numGoldBackptrsPruned + ", numAllBackptrsPruned: " + this.numAllBackptrsPruned + ", numAnaphoricAllBackptrsPruned: " + this.numAnaphoricAllBackptrsPruned; } }; object DocumentGraph { def pruneEdgesAll(docGraphs: Seq[DocumentGraph], pruningStrategy: PruningStrategy, scorer: PairwiseScorer) { if (pruningStrategy.strategy.startsWith("distance")) { val args = pruningStrategy.getDistanceArgs(); pruneEdgesAll(docGraphs, (doc: DocumentGraph) => doc.pruneEdgesMentDistanceSentDistance(args._1, args._2)); } else if (pruningStrategy.strategy.startsWith("c2flogratio")) { pruneEdgesAll(docGraphs, (doc: DocumentGraph) => doc.pruneEdgesLogRatio(scorer, pruningStrategy.getLogRatio())); } else { throw new RuntimeException("Unrecognized pruning strategy: " + pruningStrategy); } } private def pruneEdgesAll(docGraphs: Seq[DocumentGraph], pruningFcn: (DocumentGraph) => Unit) { var pruningStats = new PruningStats(0, 0, 0, 0, 0, 0, 0, 0); for (docGraph <- docGraphs) { pruningFcn(docGraph); pruningStats = pruningStats.add(docGraph.computePruningStats()); } Logger.logss("Pruning result: " + pruningStats); } }
timfeu/berkeleycoref-thesaurus
src/main/java/edu/berkeley/nlp/coref/DocumentGraph.scala
Scala
gpl-3.0
18,850
package com.twitter.finatra.multiserver.Add1HttpServer import com.twitter.finatra.http.HttpServer import com.twitter.finatra.http.filters.CommonFilters import com.twitter.finatra.http.routing.HttpRouter import com.twitter.finatra.thrift.ThriftClientExceptionMapper class Add1Server extends HttpServer { override val modules = Seq(Adder1ThriftClientModule) override def configureHttp(router: HttpRouter) { router .exceptionMapper[ThriftClientExceptionMapper] .filter[CommonFilters] .add[Add1Controller] } }
joecwu/finatra
inject-thrift-client-http-mapper/src/test/scala/com/twitter/finatra/multiserver/Add1HttpServer/Add1Server.scala
Scala
apache-2.0
537
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package models case class SummarySectionModel(rows: Seq[SummaryRowModel]) { }
hmrc/pensions-lifetime-allowance-frontend
app/models/SummarySectionModel.scala
Scala
apache-2.0
685
package com.xenopsconsulting.gamedayapi import fetchstrategies.{DefaultFetchStrategy, FetchStrategy} import xml.{Node, Elem} import java.util.Date case class EpgGame(gameNode: Node) { def id():String = (gameNode \\ "@id").text def gameday():String = (gameNode \\ "@gameday").text def ind(): String = (gameNode \\ "@ind").text def status(): String = (gameNode \\ "@status").text def gameType(): String = (gameNode \\ "@game_type").text def homeCode():String = (gameNode \\ "@home_code").text def awayCode():String = (gameNode \\ "@away_code").text /** * Convenience method that returns the gid for the game, which is just the 'gameday' attribute, with a 'gid_' * prepended. * * @return gid */ def gid():String = { "gid_" + gameday() } /** * Check to see if this game is the second of a doubleheader. * * @return true for nightcaps, false for single games or the first of two */ def nightcap():Boolean = { if (gameday().last.toString == "2") { true } else { false } } }
ecopony/scala-gameday-api
src/main/scala/com/xenopsconsulting/gamedayapi/EpgGame.scala
Scala
mit
1,048
/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.adam.projections import org.apache.avro.Schema import scala.collection.JavaConversions._ import org.apache.avro.Schema.Field /** * Avro utility object to create a projection of a Schema. */ object Projection { private def createProjection(fullSchema: Schema, fields: Set[String], exclude: Boolean = false): Schema = { val projectedSchema = Schema.createRecord(fullSchema.getName, fullSchema.getDoc, fullSchema.getNamespace, fullSchema.isError) projectedSchema.setFields(fullSchema.getFields.filter(createFilterPredicate(fields, exclude)) .map(p => new Field(p.name, p.schema, p.doc, p.defaultValue, p.order))) projectedSchema } private def createFilterPredicate(fieldNames: Set[String], exclude: Boolean = false): Field => Boolean = { val filterPred = (f: Field) => fieldNames.contains(f.name) val includeOrExlcude = (contains: Boolean) => if (exclude) !contains else contains filterPred.andThen(includeOrExlcude) } // TODO: Unify these various methods def apply(includedFields: FieldValue*): Schema = { assert(!includedFields.isEmpty, "Can't project down to zero fields!") Projection(false, includedFields: _*) } def apply(includedFields: Traversable[FieldValue]): Schema = { assert(includedFields.size > 0, "Can't project down to zero fields!") val baseSchema = includedFields.head.schema createProjection(baseSchema, includedFields.map(_.toString).toSet, false) } def apply(exclude: Boolean, includedFields: FieldValue*): Schema = { val baseSchema = includedFields.head.schema createProjection(baseSchema, includedFields.map(_.toString).toSet, exclude) } } object Filter { def apply(excludeFields: FieldValue*): Schema = { Projection(true, excludeFields: _*) } }
tdanford/adam
adam-core/src/main/scala/org/bdgenomics/adam/projections/Projection.scala
Scala
apache-2.0
2,582
package im.actor.api.rpc import akka.actor._ import cats.data.Xor import im.actor.api.rpc.CommonRpcErrors.InvalidAccessHash import im.actor.api.rpc.peers._ import im.actor.server.acl.ACLUtils import im.actor.server.api.rpc.service.groups.GroupRpcErrors import im.actor.server.db.DbExtension import im.actor.server.group.GroupErrors.GroupNotFound import im.actor.server.group.GroupExtension import im.actor.server.model._ import im.actor.server.persist._ import im.actor.server.user.UserErrors.UserNotFound import im.actor.util.misc.StringUtils import slick.dbio.DBIO import scala.concurrent.{ ExecutionContext, Future } object PeerHelpers { def withOutPeer[R <: RpcResponse](outPeer: ApiOutPeer)(f: ⇒ Future[RpcError Xor R])( implicit client: AuthorizedClientData, system: ActorSystem ): Future[RpcError Xor R] = { import FutureResultRpc._ import system.dispatcher val action: Result[R] = for { valid ← fromFuture(handleNotFound)(ACLUtils.checkOutPeer(outPeer, client.authId)) result ← if (valid) fromFutureXor(f) else fromXor(Xor.left(InvalidAccessHash)) } yield result action.value } private def handleNotFound: PartialFunction[Throwable, RpcError] = { case _: UserNotFound ⇒ CommonRpcErrors.UserNotFound case _: GroupNotFound ⇒ CommonRpcErrors.GroupNotFound case e ⇒ throw e } def withOutPeerDBIO[R <: RpcResponse](outPeer: ApiOutPeer)(f: ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, system: ActorSystem ): DBIO[RpcError Xor R] = DBIO.from(withOutPeer(outPeer)(DbExtension(system).db.run(f))) def withOutPeerAsGroupPeer[R <: RpcResponse](outPeer: ApiOutPeer)( f: ApiGroupOutPeer ⇒ DBIO[RpcError Xor R] )(implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext): DBIO[RpcError Xor R] = { outPeer.`type` match { case ApiPeerType.Group ⇒ f(ApiGroupOutPeer(outPeer.id, outPeer.accessHash)) case ApiPeerType.Private ⇒ DBIO.successful(Error(RpcError(403, "PEER_IS_NOT_GROUP", "", false, None))) } } def withUserOutPeerF[R <: RpcResponse](userOutPeer: ApiUserOutPeer)(f: ⇒ Future[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): Future[RpcError Xor R] = DbExtension(actorSystem).db.run(withUserOutPeer(userOutPeer)(DBIO.from(f))) def withUserOutPeer[R <: RpcResponse](userOutPeer: ApiUserOutPeer)(f: ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = { renderCheckResult(Seq(checkUserPeer(userOutPeer.userId, userOutPeer.accessHash)), f) } def withOwnGroupMember[R <: RpcResponse](groupOutPeer: ApiGroupOutPeer, userId: Int)(f: FullGroup ⇒ DBIO[RpcError Xor R])(implicit ec: ExecutionContext): DBIO[RpcError Xor R] = { withGroupOutPeer(groupOutPeer) { group ⇒ (for (user ← GroupUserRepo.find(group.id, userId)) yield user).flatMap { case Some(user) ⇒ f(group) case None ⇒ DBIO.successful(Error(CommonRpcErrors.forbidden("You are not a group member."))) } } } def withValidGroupTitle[R <: RpcResponse](title: String)(f: String ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = StringUtils.validName(title) match { case Xor.Left(err) ⇒ DBIO.successful(Error(GroupRpcErrors.WrongGroupTitle)) case Xor.Right(validTitle) ⇒ f(validTitle) } def withUserOutPeers[R <: RpcResponse](userOutPeers: Seq[ApiUserOutPeer])(f: ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = { val checkOptsFutures = userOutPeers map { case ApiUserOutPeer(userId, accessHash) ⇒ checkUserPeer(userId, accessHash) } renderCheckResult(checkOptsFutures, f) } def withUserOutPeersF[R <: RpcResponse](userOutPeers: Seq[ApiUserOutPeer])(f: ⇒ Future[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): Future[RpcError Xor R] = DbExtension(actorSystem).db.run(withUserOutPeers(userOutPeers)(DBIO.from(f))) val InvalidToken = RpcError(403, "INVALID_INVITE_TOKEN", "No correct token provided.", false, None) def withValidInviteToken[R <: RpcResponse](baseUrl: String, urlOrToken: String)(f: (FullGroup, GroupInviteToken) ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = { val extractedToken = if (urlOrToken.startsWith(baseUrl)) { urlOrToken.drop(genInviteUrl(baseUrl).length).takeWhile(c ⇒ c != '?' && c != '#') } else { urlOrToken } extractedToken.isEmpty match { case false ⇒ (for { token ← GroupInviteTokenRepo.findByToken(extractedToken) group ← token.map(gt ⇒ GroupRepo.findFull(gt.groupId)).getOrElse(DBIO.successful(None)) } yield for (g ← group; t ← token) yield (g, t)).flatMap { case Some((g, t)) ⇒ f(g, t) case None ⇒ DBIO.successful(Error(InvalidToken)) } case true ⇒ DBIO.successful(Error(InvalidToken)) } } def withKickableGroupMember[R <: RpcResponse]( groupOutPeer: ApiGroupOutPeer, kickUserOutPeer: ApiUserOutPeer )(f: FullGroup ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = { withGroupOutPeer(groupOutPeer) { group ⇒ GroupUserRepo.find(group.id, kickUserOutPeer.userId).flatMap { case Some(GroupUser(_, _, inviterUserId, _, _, _)) ⇒ if (kickUserOutPeer.userId != client.userId && (inviterUserId == client.userId || group.creatorUserId == client.userId)) { f(group) } else { DBIO.successful(Error(CommonRpcErrors.forbidden("You are permitted to kick this user."))) } case None ⇒ DBIO.successful(Error(RpcError(404, "USER_NOT_FOUND", "User is not a group member.", false, None))) } } } def withPublicGroup[R <: RpcResponse](groupOutPeer: ApiGroupOutPeer)(f: FullGroup ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = { withGroupOutPeer(groupOutPeer) { group ⇒ if (group.isPublic) { f(group) } else { DBIO.successful(Error(RpcError(400, "GROUP_IS_NOT_PUBLIC", "The group is not public.", false, None))) } } } def genInviteUrl(baseUrl: String, token: String = "") = s"$baseUrl/join/$token" def withGroupOutPeerF[R <: RpcResponse](groupOutPeer: ApiGroupOutPeer)(f: FullGroup ⇒ Future[RpcError Xor R])( implicit ec: ExecutionContext, system: ActorSystem ) = DbExtension(system).db.run(withGroupOutPeer(groupOutPeer)(fg ⇒ DBIO.from(f(fg)))) def withGroupOutPeer[R <: RpcResponse](groupOutPeer: ApiGroupOutPeer)(f: FullGroup ⇒ DBIO[RpcError Xor R])(implicit ec: ExecutionContext): DBIO[RpcError Xor R] = { GroupRepo.findFull(groupOutPeer.groupId) flatMap { case Some(group) ⇒ if (group.accessHash != groupOutPeer.accessHash) { DBIO.successful(Error(InvalidAccessHash)) } else { f(group) } case None ⇒ DBIO.successful(Error(CommonRpcErrors.GroupNotFound)) } } def withGroupOutPeers[R <: RpcResponse](groupOutPeers: Seq[ApiGroupOutPeer])(f: ⇒ DBIO[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[RpcError Xor R] = { val checkOptsFutures = groupOutPeers map { case ApiGroupOutPeer(groupId, accessHash) ⇒ DBIO.from(ACLUtils.checkOutPeer(ApiOutPeer(ApiPeerType.Group, groupId, accessHash), client.authId) map (Some(_))) } renderCheckResult(checkOptsFutures, f) } def withGroupOutPeersF[R <: RpcResponse](groupOutPeers: Seq[ApiGroupOutPeer])(f: ⇒ Future[RpcError Xor R])( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): Future[RpcError Xor R] = DbExtension(actorSystem).db.run(withGroupOutPeers(groupOutPeers)(DBIO.from(f))) private def checkUserPeer(userId: Int, accessHash: Long)( implicit client: AuthorizedClientData, actorSystem: ActorSystem, ec: ExecutionContext ): DBIO[Option[Boolean]] = { for { userOpt ← UserRepo.find(userId) } yield { userOpt map (u ⇒ ACLUtils.userAccessHash(client.authId, u.id, u.accessSalt) == accessHash) } } private def renderCheckResult[R <: RpcResponse](checkOptsActions: Seq[DBIO[Option[Boolean]]], f: ⇒ DBIO[RpcError Xor R])(implicit ec: ExecutionContext): DBIO[RpcError Xor R] = { DBIO.sequence(checkOptsActions) flatMap { checkOpts ⇒ if (checkOpts.contains(None)) { DBIO.successful(Error(RpcError(404, "PEER_NOT_FOUND", "Peer not found.", false, None))) } else if (checkOpts.flatten.contains(false)) { DBIO.successful(Error(RpcError(401, "ACCESS_HASH_INVALID", "Invalid access hash.", false, None))) } else { f } } } }
ljshj/actor-platform
actor-server/actor-rpc-api/src/main/scala/im/actor/api/rpc/PeerHelpers.scala
Scala
mit
9,639
package pl.tk.scalaconstructs.typeclasses import java.net.URL import java.nio.file.{Paths, Path} /** * Created by tomaszk on 17.02.14. */ object ImplicitClasses { implicit class UrlWithDownloadMethod(url:URL) { def download(where:Path) { println(s"Downloading $url to ${where.toAbsolutePath}") } } def main(args: Array[String]) { val u = new URL("http://www.fake.com/some_image.jpg") u.download(Paths.get(".")) } }
almendar/scala-constructs-learn
src/main/scala/pl/tk/scalaconstructs/typeclasses/ImplicitClasses.scala
Scala
apache-2.0
482