code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package io.mpjsons.impl.deserializer import io.mpjsons.JsonTypeDeserializer import io.mpjsons.impl.StringIterator import io.mpjsons.impl.util.{ObjectConstructionUtil, TypesUtil} import scala.reflect.runtime.universe._ class SingletonObjectDeserializer(tpe: Type) extends JsonTypeDeserializer[AnyRef] { override def deserialize(jsonIterator: StringIterator): AnyRef = { jsonIterator.consumeObjectStart() val instance = ObjectConstructionUtil.retrieveObjectInstance(TypesUtil.getClassFromType(tpe)) if (jsonIterator.currentChar == '}') { jsonIterator.nextCharOrNullIfLast instance } else { throw new IllegalArgumentException("Unexpected value in deserialized 'object'. Object should be serialized to {} only.") } } }
marpiec/mpjsons
src/main/scala/io/mpjsons/impl/deserializer/SingletonObjectDeserializer.scala
Scala
apache-2.0
764
/* * Copyright (c) 2014-2016 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, and * you may not use this file except in compliance with the Apache License * Version 2.0. You may obtain a copy of the Apache License Version 2.0 at * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the Apache License Version 2.0 is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the Apache License Version 2.0 for the specific language * governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow package collectors package scalastream // Scala import scala.collection.mutable.MutableList // Akka import akka.actor.{ActorSystem, Props} // Specs2 and Spray testing import org.specs2.matcher.AnyMatchers import org.specs2.mutable.Specification import org.specs2.specification.{Scope,Fragments} import spray.testkit.Specs2RouteTest // Spray import spray.http.{DateTime, HttpHeader, HttpRequest, HttpCookie, RemoteAddress} import spray.http.HttpHeaders.{ Cookie, `Set-Cookie`, `Remote-Address`, `Raw-Request-URI` } // Config import com.typesafe.config.{ConfigFactory,Config,ConfigException} // Thrift import org.apache.thrift.TDeserializer // Snowplow import sinks._ import CollectorPayload.thrift.model1.CollectorPayload class PostSpec extends Specification with Specs2RouteTest with AnyMatchers { val testConf: Config = ConfigFactory.parseString(""" collector { interface = "0.0.0.0" port = 8080 production = true p3p { policyref = "/w3c/p3p.xml" CP = "NOI DSP COR NID PSA OUR IND COM NAV STA" } cookie { enabled = true expiration = 365 days name = sp domain = "test-domain.com" } sink { enabled = "test" kinesis { aws { access-key: "cpf" secret-key: "cpf" } stream { region: "us-east-1" good: "snowplow_collector_example" bad: "snowplow_collector_example" } backoffPolicy { minBackoff: 3000 # 3 seconds maxBackoff: 600000 # 5 minutes } } kafka { brokers: "localhost:9092" topic { good: "good-topic" bad: "bad-topic" } } buffer { byte-limit: 4000000 # 4MB record-limit: 500 # 500 records time-limit: 60000 # 1 minute } } } """) val collectorConfig = new CollectorConfig(testConf) val sink = new TestSink val sinks = CollectorSinks(sink, sink) val responseHandler = new ResponseHandler(collectorConfig, sinks) val collectorService = new CollectorService(collectorConfig, responseHandler, system) val thriftDeserializer = new TDeserializer // By default, spray will always add Remote-Address to every request // when running with the `spray.can.server.remote-address-header` // option. However, the testing does not read this option and a // remote address always needs to be set. def CollectorPost(uri: String, cookie: Option[`HttpCookie`] = None, remoteAddr: String = "127.0.0.1") = { val headers: MutableList[HttpHeader] = MutableList(`Remote-Address`(remoteAddr),`Raw-Request-URI`(uri)) cookie.foreach(headers += `Cookie`(_)) Post(uri).withHeaders(headers.toList) } "Snowplow's Scala collector" should { "return a cookie expiring at the correct time" in { CollectorPost("/com.snowplowanalytics.snowplow/tp2") ~> collectorService.collectorRoute ~> check { headers must not be empty val httpCookies: List[HttpCookie] = headers.collect { case `Set-Cookie`(hc) => hc } httpCookies must not be empty // Assume we only return a single cookie. // If the collector is modified to return multiple cookies, // this will need to be changed. val httpCookie = httpCookies(0) httpCookie.name must beEqualTo(collectorConfig.cookieName.get) httpCookie.name must beEqualTo("sp") httpCookie.path must beSome("/") httpCookie.domain must beSome httpCookie.domain.get must be(collectorConfig.cookieDomain.get) httpCookie.expires must beSome httpCookie.content.matches("""[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}""") val expiration = httpCookie.expires.get val offset = expiration.clicks - collectorConfig.cookieExpiration.get - DateTime.now.clicks offset.asInstanceOf[Int] must beCloseTo(0, 2000) // 1000 ms window. } } "return a cookie containing nuid query parameter" in { CollectorPost("/com.snowplowanalytics.snowplow/tp2?nuid=UUID_Test_New") ~> collectorService.collectorRoute ~> check { headers must not be empty val httpCookies: List[HttpCookie] = headers.collect { case `Set-Cookie`(hc) => hc } httpCookies must not be empty // Assume we only return a single cookie. // If the collector is modified to return multiple cookies, // this will need to be changed. val httpCookie = httpCookies(0) httpCookie.name must beEqualTo(collectorConfig.cookieName.get) httpCookie.name must beEqualTo("sp") httpCookie.path must beSome("/") httpCookie.domain must beSome httpCookie.domain.get must be(collectorConfig.cookieDomain.get) httpCookie.expires must beSome httpCookie.content must beEqualTo("UUID_Test_New") val expiration = httpCookie.expires.get val offset = expiration.clicks - collectorConfig.cookieExpiration.get - DateTime.now.clicks offset.asInstanceOf[Int] must beCloseTo(0, 3600000) // 1 hour window. } } "return the same cookie as passed in" in { CollectorPost("/com.snowplowanalytics.snowplow/tp2", Some(HttpCookie(collectorConfig.cookieName.get, "UUID_Test"))) ~> collectorService.collectorRoute ~> check { val httpCookies: List[HttpCookie] = headers.collect { case `Set-Cookie`(hc) => hc } // Assume we only return a single cookie. // If the collector is modified to return multiple cookies, // this will need to be changed. val httpCookie = httpCookies(0) httpCookie.content must beEqualTo("UUID_Test") } } "override cookie with nuid parameter" in { CollectorPost("/com.snowplowanalytics.snowplow/tp2?nuid=UUID_Test_New", Some(HttpCookie("sp", "UUID_Test"))) ~> collectorService.collectorRoute ~> check { val httpCookies: List[HttpCookie] = headers.collect { case `Set-Cookie`(hc) => hc } // Assume we only return a single cookie. // If the collector is modified to return multiple cookies, // this will need to be changed. val httpCookie = httpCookies(0) httpCookie.content must beEqualTo("UUID_Test_New") } } "return a P3P header" in { CollectorPost("/com.snowplowanalytics.snowplow/tp2") ~> collectorService.collectorRoute ~> check { val p3pHeaders = headers.filter { h => h.name.equals("P3P") } p3pHeaders.size must beEqualTo(1) val p3pHeader = p3pHeaders(0) val policyRef = collectorConfig.p3pPolicyRef val CP = collectorConfig.p3pCP p3pHeader.value must beEqualTo( "policyref=\\"%s\\", CP=\\"%s\\"".format(policyRef, CP)) } } "store the expected event as a serialized Thrift object in the enabled sink" in { val payloadData = "param1=val1&param2=val2" val storedRecordBytes = responseHandler.cookie(payloadData, null, None, None, "localhost", RemoteAddress("127.0.0.1"), new HttpRequest(), None, "/com.snowplowanalytics.snowplow/tp2", false)._2 val storedEvent = new CollectorPayload this.synchronized { thriftDeserializer.deserialize(storedEvent, storedRecordBytes.head) } storedEvent.timestamp must beCloseTo(DateTime.now.clicks, 60000) storedEvent.encoding must beEqualTo("UTF-8") storedEvent.ipAddress must beEqualTo("127.0.0.1") storedEvent.collector must beEqualTo("ssc-0.9.0-test") storedEvent.path must beEqualTo("/com.snowplowanalytics.snowplow/tp2") storedEvent.querystring must beEqualTo(payloadData) } } }
Propertyfinder/snowplow
2-collectors/scala-stream-collector/src/test/scala/com.snowplowanalytics.snowplow.collectors.scalastream/PostSpec.scala
Scala
apache-2.0
8,426
/* * Copyright 2017 PayPal * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.squbs.cluster import java.util.concurrent.atomic.AtomicBoolean import akka.actor._ import akka.util.ByteString import com.typesafe.scalalogging.LazyLogging import org.apache.curator.framework.CuratorFramework import org.apache.curator.framework.api.CuratorWatcher import org.apache.zookeeper.Watcher.Event.EventType import org.apache.zookeeper.{CreateMode, WatchedEvent} import scala.collection.JavaConverters._ import scala.language.postfixOps import scala.util.control.NonFatal import scala.util.{Failure, Try} private[cluster] case class ZkRebalance(planedPartitions: Map[ByteString, ZkPartitionData]) private[cluster] case class ZkPartitionsChanged(segment:String, partitions: Map[ByteString, ZkPartitionData]) private[cluster] case class ZkResizePartition(partitionKey: ByteString, size: Int) private[cluster] case class ZkSegmentChanged(segment: String, changes: Set[ByteString]) private[cluster] case class ZkOnBoardPartitions(onBoards: Set[ByteString]) private[cluster] case class ZkDropOffPartitions(dropOffs: Set[ByteString]) /** * The major responsibility of ZkPartitionsManager is to maintain partitions */ private[cluster] class ZkPartitionsManager extends Actor with Stash with LazyLogging { private[this] val zkCluster = ZkCluster(context.system) import zkCluster._ private[this] implicit val segLogic = segmentationLogic import segLogic._ import ZkPartitionsManager._ private[this] implicit val log = logger private[this] var segmentsToPartitions = Map.empty[String, Set[ByteString]] private[this] var partitionWatchers = Map.empty[String, CuratorWatcher] private[this] val stopped = new AtomicBoolean(false) override def postStop(): Unit = stopped set true private def initialize()(implicit curatorFwk: CuratorFramework) = { segmentsToPartitions = curatorFwk.getChildren.forPath("/segments").asScala.map{ segment => segment -> watchOverSegment(segment) }.toMap } private def watchOverSegment(segment:String)(implicit curatorFwk: CuratorFramework) = { val segmentZkPath = s"/segments/${keyToPath(segment)}" //watch over changes of creation/removal of any partition (watcher over /partitions) lazy val segmentWatcher: CuratorWatcher = new CuratorWatcher { override def process(event: WatchedEvent): Unit = { event.getType match { case EventType.NodeChildrenChanged if !stopped.get => self ! ZkSegmentChanged( segment, curatorFwk.getChildren.usingWatcher(segmentWatcher).forPath(segmentZkPath) .asScala .map { p => ByteString(pathToKey(p)) }.toSet ) case _ => } } } //watch over changes of members of a partition (watcher over /partitions/some-partition) lazy val partitionWatcher: CuratorWatcher = new CuratorWatcher { override def process(event: WatchedEvent): Unit = { event.getType match { case EventType.NodeDataChanged if !stopped.get => val sectors = event.getPath.split("[/]") val partitionKey = ByteString(pathToKey(sectors(sectors.length - 2))) sectors(sectors.length - 1) match { case "servants" | "$size" => watchOverPartition(segment, partitionKey, partitionWatcher) foreach { partitionData => whenPartitionChanged(segment, partitionData) } case _ => } case _ => } } } partitionWatchers += segment -> partitionWatcher //initialize with the current set of partitions curatorFwk.getChildren.usingWatcher(segmentWatcher).forPath(segmentZkPath).asScala.map{p => val partitionKey = ByteString(pathToKey(p)) partitionKey -> watchOverPartition(segment, partitionKey, partitionWatcher) }.collect{ case (partitionKey, Some(partitionData)) => partitionKey }.toSet } private def watchOverPartition(segment: String, partitionKey: ByteString, partitionWatcher: CuratorWatcher) (implicit curatorFwk: CuratorFramework): Option[ZkPartitionData] = { Try { guarantee(servantsOfParZkPath(partitionKey), None, CreateMode.PERSISTENT) guarantee(sizeOfParZkPath(partitionKey), None, CreateMode.PERSISTENT) val servants = curatorFwk.getData.usingWatcher(partitionWatcher).forPath(servantsOfParZkPath(partitionKey)).toAddressSet val expectedSize = curatorFwk.getData.usingWatcher(partitionWatcher).forPath(sizeOfParZkPath(partitionKey)).toInt ZkPartitionData(partitionKey, servants, partitionSize(partitionKey), expectedSize) } recoverWith { case NonFatal(t) => log.error("partitions refresh failed due to unknown reason: {}", t.getMessage) Failure(t) } toOption } private def whenPartitionChanged(segment: String, change: ZkPartitionData) = { log.debug("[partitions] partitions change detected from zk: {}", keyToPath(change.partitionKey) -> change ) zkClusterActor ! ZkPartitionsChanged(segment, Map(change.partitionKey -> change)) } def receive: Receive = receiveZkClientUpdate lazy val receiveZkClientUpdate: Receive = { case ZkClientUpdated(updated) => implicit val curatorFwk = updated initialize() context become { receiveZkClientUpdate orElse receivePartitionChange() } } def receivePartitionChange()(implicit curatorFwk: CuratorFramework): Receive = { case ZkSegmentChanged(segment, changes) => log.debug("[partitions] segment change detected from zk: {}", segment -> (changes map (keyToPath(_)))) val onBoardPartitions = changes.diff(segmentsToPartitions.getOrElse(segment, Set.empty)) .map(partitionKey => partitionKey -> watchOverPartition(segment, partitionKey, partitionWatchers(segment))) .collect{case (key, Some(partition)) => key -> partition}.toMap val dropOffPartitions = segmentsToPartitions.getOrElse(segment, Set.empty) diff changes segmentsToPartitions += (segment -> changes) log.info("[partitions] create partitions {}, remove partitions {}", onBoardPartitions.map(entry => keyToPath(entry._1)), dropOffPartitions.map(entry => keyToPath(entry)) ) if (onBoardPartitions.nonEmpty) { zkClusterActor ! ZkPartitionsChanged(segment, onBoardPartitions) } if (dropOffPartitions.nonEmpty) { zkClusterActor ! ZkPartitionsChanged(segment, dropOffPartitions.map(key => key -> ZkPartitionData(key, expectedSize = 0)).toMap ) } case ZkRebalance(updates) => log.info("[partitions] update partitions based on plan:{}", updates.values) updates foreach { entry => updatePartition(entry._1, entry._2)} case ZkRemovePartition(partitionKey) => log.debug("[partitions] remove partition {}", keyToPath(partitionKey)) safelyDiscard(partitionZkPath(partitionKey)) sender ! ZkPartitionRemoval(partitionKey) case ZkResizePartition(partitionKey, size) => guarantee(sizeOfParZkPath(partitionKey), Some(size), CreateMode.PERSISTENT) } private def updatePartition(partitionKey: ByteString, partitionData: ZkPartitionData) (implicit curatorFwk: CuratorFramework) = { guarantee(partitionZkPath(partitionKey), Some(partitionData.props), CreateMode.PERSISTENT) guarantee(servantsOfParZkPath(partitionKey), Some(partitionData.members), CreateMode.PERSISTENT) if (partitionData.expectedSize != partitionSize(partitionKey)) { guarantee(sizeOfParZkPath(partitionKey), Some(partitionData.expectedSize), CreateMode.PERSISTENT) } } } object ZkPartitionsManager { def loadPartitions()(implicit zkClient: CuratorFramework, segmentationLogic: SegmentationLogic): Map[ByteString, ZkPartitionData] = { import segmentationLogic._ zkClient.getChildren.forPath("/segments").asScala.flatMap { segment => zkClient.getChildren.forPath(s"/segments/$segment").asScala }.map { key => val parKey = ByteString(pathToKey(key)) val size = partitionSize(parKey) val members = partitionServants(parKey) val props = Try(zkClient.getData.forPath(partitionZkPath(parKey))) getOrElse Array.empty parKey -> ZkPartitionData(parKey, members, size, props) }.toMap } private def partitionServants(partitionKey: ByteString) (implicit zkClient: CuratorFramework, segmentationLogic: SegmentationLogic): Set[Address] = { import segmentationLogic._ Try { zkClient.getData.forPath(servantsOfParZkPath(partitionKey)).toAddressSet } getOrElse Set.empty } private def partitionSize(partitionKey: ByteString) (implicit zkClient: CuratorFramework, segmentationLogic: SegmentationLogic): Int = { import segmentationLogic._ Try { zkClient.getData.forPath(sizeOfParZkPath(partitionKey)).toInt } getOrElse 0 } }
anilgursel/squbs
squbs-zkcluster/src/main/scala/org/squbs/cluster/ZkPartitionsManager.scala
Scala
apache-2.0
9,717
/* * Derived from https://github.com/spray/spray/blob/v1.1-M7/spray-http/src/main/scala/spray/http/parser/CookieHeaders.scala * * Copyright (C) 2011-2012 spray.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.http4s package parser import java.time.Instant import org.parboiled2._ import org.http4s.headers.`Set-Cookie` import shapeless.{HNil, ::} private[parser] trait CookieHeader { def SET_COOKIE(value: String): ParseResult[`Set-Cookie`] = new SetCookieParser(value).parse def COOKIE(value: String): ParseResult[headers.Cookie] = new CookieParser(value).parse // scalastyle:off public.methods.have.type private class SetCookieParser(input: ParserInput) extends BaseCookieParser[`Set-Cookie`](input) { def entry: Rule1[`Set-Cookie`] = rule { CookiePair ~ zeroOrMore(";" ~ OptWS ~ CookieAttrs) ~ EOI ~> (`Set-Cookie`(_)) } } private class CookieParser(input: ParserInput) extends BaseCookieParser[headers.Cookie](input) { def entry: Rule1[headers.Cookie] = rule { oneOrMore(CookiePair).separatedBy(";" ~ OptWS) ~ EOI ~> {xs: Seq[Cookie] => headers.Cookie(xs.head, xs.tail: _*)} } } private abstract class BaseCookieParser[H <: Header](input: ParserInput) extends Http4sHeaderParser[H](input) { def CookiePair = rule { Token ~ ch('=') ~ CookieValue ~> (Cookie(_, _)) } def CookieValue: Rule1[String] = rule { (('"' ~ capture(zeroOrMore(CookieOctet)) ~ "\\"") | (capture(zeroOrMore(CookieOctet)))) ~ OptWS } def CookieOctet = rule { "\\u003c" - "\\u005b" | "\\u005d" - "\\u007e" | '\\u0021' | "\\u0023" - "\\u002b" | "\\u002d" - "\\u003a" } def CookieAttrs: Rule[Cookie::HNil, Cookie::HNil] = rule { "Expires=" ~ HttpDate ~> { (cookie: Cookie, dateTime: Instant) => cookie.copy(expires = Some(dateTime)) } | "Max-Age=" ~ NonNegativeLong ~> { (cookie: Cookie, seconds: Long) => cookie.copy(maxAge = Some(seconds)) } | "Domain=" ~ DomainName ~> { (cookie: Cookie, domainName: String) => cookie.copy(domain = Some(domainName)) } | "Path=" ~ StringValue ~> { (cookie: Cookie, pathValue: String) => cookie.copy(path = Some(pathValue)) } | // TODO: Capture so we can create the rule, but there must be a better way capture("Secure") ~> { (cookie: Cookie, s: String) => cookie.copy(secure = true) } | capture("HttpOnly") ~> { (cookie: Cookie, s: String) => cookie.copy(httpOnly = true) } | StringValue ~> { (cookie: Cookie, stringValue: String) => cookie.copy(extension = Some(stringValue)) } } def NonNegativeLong: Rule1[Long] = rule { capture(oneOrMore(Digit)) ~> { s: String => s.toLong } } def DomainName: Rule1[String] = rule { capture(oneOrMore(DomainNamePart).separatedBy('.')) } def DomainNamePart: Rule0 = rule { AlphaNum ~ zeroOrMore(AlphaNum | ch('-')) } def StringValue: Rule1[String] = rule { capture(oneOrMore((!(CTL | ch(';'))) ~ Char)) } } // scalastyle:on public.methods.have.type }
hvesalai/http4s
core/src/main/scala/org/http4s/parser/CookieHeader.scala
Scala
apache-2.0
3,545
// see ticket #3651 object Test { def main(args: Array[String]): Unit = { val s = new Extended("s") println(s.foo) //works val i = new Extended(1) println(i.foo) //infinite loop with StackOverflowError println(runtime.BoxesRunTime.integerBoxCount) } } class Base[@specialized(Int) T](val t: T) { def foo() :T = t } class Extended [@specialized(Int) T](t: T) extends Base[T](t) { override def foo() :T = super.foo }
martijnhoekstra/scala
test/files/specialized/spec-super.scala
Scala
apache-2.0
447
package breeze.stats.distributions /* Copyright 2009 David Hall, Daniel Ramage Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import scala.collection.mutable.ArrayBuffer import collection.TraversableLike import collection.generic.CanBuildFrom import breeze.linalg.DenseVector import org.apache.commons.math3.random.{MersenneTwister, RandomGenerator} import java.util.concurrent.atomic.AtomicInteger import scala.reflect.ClassTag import spire.implicits.cfor /** * A trait for monadic distributions. Provides support for use in for-comprehensions * @author dlwh */ trait Rand[@specialized(Int, Double) +T] extends Serializable { outer => /** * Gets one sample from the distribution. Equivalent to sample() */ def draw() : T def get() = draw() /** Overridden by filter/map/flatmap for monadic invocations. Basically, rejeciton samplers will return None here */ def drawOpt():Option[T] = Some(draw()) /** * Gets one sample from the distribution. Equivalent to get() */ def sample() = get() /** * Gets n samples from the distribution. */ def sample(n : Int) = IndexedSeq.fill(n)(draw()) /** * An infinitely long iterator that samples repeatedly from the Rand * @return an iterator that repeatedly samples */ def samples:Iterator[T] = new Iterator[T] { def hasNext = true def next() = get() } /** * Return a vector of samples. */ def samplesVector[U >: T](size: Int)(implicit m: ClassTag[U]): DenseVector[U] = { val result = new DenseVector[U](new Array[U](size)) cfor(0)(i => i < size, i => i+1)(i => { result(i) = draw() }) result } /** * Converts a random sampler of one type to a random sampler of another type. * Examples: * randInt(10).flatMap(x => randInt(3 * x.asInstanceOf[Int]) gives a Rand[Int] in the range [0,30] * Equivalently, for(x &lt;- randInt(10); y &lt;- randInt(30 *x)) yield y * * @param f the transform to apply to the sampled value. * */ def flatMap[E](f : T => Rand[E] ):Rand[E] = FlatMappedRand(outer, f) /** * Converts a random sampler of one type to a random sampler of another type. * Examples: * uniform.map(_*2) gives a Rand[Double] in the range [0,2] * Equivalently, for(x &lt;- uniform) yield 2*x * * @param f the transform to apply to the sampled value. * */ def map[E](f : T=>E):Rand[E] = MappedRand(outer, f) /** * Samples one element and qpplies the provided function to it. * Despite the name, the function is applied once. Sample usage: * <pre> for(x &lt;- Rand.uniform) { println(x) } </pre> * * @param f the function to be applied */ def foreach(f : T=>Unit) = f(get()) def filter(p: T=>Boolean) = condition(p) def withFilter(p: T=>Boolean) = condition(p) // Not the most efficient implementation ever, but meh. def condition(p : T => Boolean):Rand[T] = SinglePredicateRand[T](outer, p) } private final case class MappedRand[@specialized(Int, Double) T, @specialized(Int, Double) U](rand: Rand[T], func: T => U) extends Rand[U] { def draw() = func(rand.draw()) override def drawOpt() = rand.drawOpt().map(func) override def map[E](f : U=>E):Rand[E] = MappedRand(rand, (x:T) => f(func(x))) } private final case class FlatMappedRand[@specialized(Int, Double) T, @specialized(Int, Double) U](rand: Rand[T], func: T => Rand[U]) extends Rand[U] { def draw() = func(rand.draw()).draw() override def drawOpt() = rand.drawOpt().flatMap(x => func(x).drawOpt()) override def flatMap[E](f: U => Rand[E]): Rand[E] = FlatMappedRand(rand, (x:T) => f(func(x).draw())) } private trait PredicateRandDraws[@specialized(Int, Double) T] extends Rand[T] { protected val rand: Rand[T] protected def predicate(x: T): Boolean def draw() = { // Not the most efficient implementation ever, but meh. var x = rand.draw() while(!predicate(x)) { x = rand.draw() } x } override def drawOpt() = { val x = rand.get() if (predicate(x)) { Some(x) } else { None } } } private final case class SinglePredicateRand[@specialized(Int, Double) T](rand: Rand[T], pred: T => Boolean) extends PredicateRandDraws[T] { protected final def predicate(x: T): Boolean = pred(x) override def condition(p: T => Boolean): Rand[T] = { val newPredicates = new Array[T => Boolean](2) newPredicates(0) = pred newPredicates(1) = p MultiplePredicatesRand(rand, newPredicates) } } private final case class MultiplePredicatesRand[@specialized(Int, Double) T](rand: Rand[T], private val predicates: Array[T => Boolean]) extends PredicateRandDraws[T] { override def condition(p: T => Boolean): Rand[T] = { val newPredicates = new Array[T => Boolean](predicates.size + 1) cfor(0)(i => i < predicates.size, i => i+1)(i => { newPredicates(i) = predicates(i) }) newPredicates(predicates.size) = p MultiplePredicatesRand(rand, newPredicates) } protected final def predicate(x:T) = { var result: Boolean = true var i=0 while ((i < predicates.size) && result) { result = result && predicates(i)(x) i = i + 1 } result } } /** * Provides standard combinators and such to use * to compose new Rands. */ class RandBasis(val generator: RandomGenerator) extends Serializable { /** * Chooses an element from a collection. */ def choose[T](c: Iterable[T]):Rand[T] = new Rand[T] { def draw() = { val sz = uniform.get * c.size val elems = c.iterator var i = 1 var e = elems.next() while(i < sz) { e = elems.next() i += 1 } e } } def choose[T](c : Seq[T]) = randInt(c.size).map( c(_)) /** * The trivial random generator: always returns the argument */ def always[T](t : T):Rand[T] = new Rand[T] { def draw = t } /** * Simply reevaluate the body every time get is called */ def fromBody[T](f : =>T):Rand[T] = new Rand[T] { def draw = f } /** * Convert a Collection of Rand[T] into a Rand[Collection[T]] */ def promote[T, CC[X] <: Traversable[X] with TraversableLike[X, CC[X]]] (col : CC[Rand[T]])(implicit cbf: CanBuildFrom[CC[Rand[T]], T, CC[T]]):Rand[CC[T]] = fromBody(col.map(_.get)) /** * Convert an Seq of Rand[T] into a Rand[Seq[T]] */ def promote[U](col : Seq[Rand[U]]) = fromBody(col.map(_.get)) def promote[T1,T2](t : (Rand[T1],Rand[T2])) = fromBody( (t._1.get,t._2.get)) def promote[T1,T2,T3](t : (Rand[T1],Rand[T2],Rand[T3])) = fromBody( (t._1.get,t._2.get,t._3.get)) def promote[T1,T2,T3,T4](t : (Rand[T1],Rand[T2],Rand[T3],Rand[T4])) = fromBody( (t._1.get,t._2.get,t._3.get,t._4.get)) /** * Uniformly samples in [0,1] */ val uniform:Rand[Double] = new Rand[Double] { def draw = generator.nextDouble } /** * Uniformly samples an integer in [0,MAX_INT] */ val randInt:Rand[Int] = new Rand[Int] { def draw = generator.nextInt & Int.MaxValue } /** * Uniformly samples an integer in [0,n) */ def randInt(n : Int):Rand[Int] = new Rand[Int] { def draw = generator.nextInt(n) } /** * Uniformly samples an integer in [n,m) */ def randInt(n : Int, m: Int):Rand[Int] = new Rand[Int] { def draw = generator.nextInt(m-n)+n } /** * Uniformly samples a long integer in [0,MAX_LONG] */ val randLong: Rand[Long] = new Rand[Long] { def draw = generator.nextLong & Long.MaxValue } /** * Uniformly samples a long integer in [0,n) */ def randLong(n: Long): Rand[Long] = new Rand[Long] { def draw = { val value = generator.nextLong & Long.MaxValue value % n } } /** * Uniformly samples a long integer in [n,m) */ def randLong(n: Long, m: Long): Rand[Long] = new Rand[Long] { def draw = { val value = generator.nextLong & Long.MaxValue value % (m - n) + n } } /** * Samples a gaussian with 0 mean and 1 std */ val gaussian :Rand[Double] = new Rand[Double] { def draw = generator.nextGaussian } /** * Samples a gaussian with m mean and s std */ def gaussian(m : Double, s : Double): Rand[Double] = new Rand[Double] { def draw = m + s * gaussian.get } /** * Implements the Knuth shuffle of numbers from 0 to n. */ def permutation(n : Int):Rand[IndexedSeq[Int]] = new Rand[IndexedSeq[Int]] { def draw = { val arr = new ArrayBuffer[Int]() arr ++= (0 until n) var i = n while(i > 1) { val k = generator.nextInt(i) i -= 1 val tmp = arr(i) arr(i) = arr(k) arr(k) = tmp } arr } } /** * Knuth shuffle of a subset of size n from a set */ def subsetsOfSize[T](set: IndexedSeq[T], n: Int):Rand[IndexedSeq[T]] = new Rand[IndexedSeq[T]] { def draw = { val arr = Array.range(0,set.size) var i = 0 while( i < n.min(set.size)) { val k = generator.nextInt(set.size-i) + i val temp = arr(i) arr(i) = arr(k) arr(k) = temp i+=1 } arr.take(n).map(set) } } } /** * Provides a number of random generators. */ object Rand extends RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister())) object RandBasis { /** * Returns a new MersenneTwister-backed rand basis with seed set to 0. Note that * if multiple threads use this, each thread gets a new generator with an increasing random * seed. * @return */ def mt0: RandBasis = withSeed(0) /** * Returns a new MersenneTwister-backed rand basis with seed set to a specific value * if multiple threads use this, each thread gets a new generator with an increasing random (starting from seed) */ def withSeed(seed: Int): RandBasis = { val int = new AtomicInteger(seed) new RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister(int.getAndIncrement()))) } }
crealytics/breeze
math/src/main/scala/breeze/stats/distributions/Rand.scala
Scala
apache-2.0
10,374
package geotrellis.network.graph import geotrellis.network._ import spire.syntax.cfor._ sealed abstract class VertexType case object StationVertex extends VertexType { def apply(location:Location,name:String) = Vertex(location,name,StationVertex) } case object StreetVertex extends VertexType { def apply(location:Location,name:String) = Vertex(location,name,StreetVertex) } case class Vertex(location:Location, name:String, vertexType:VertexType) { override def toString = { s"V($name,$location)" } override def hashCode = location.hashCode override def equals(other: Any) = other match { case that: Vertex => this.location == that.location case _ => false } }
flibbertigibbet/open-transit-indicators
scala/geotrellis-transit/src/main/scala/geotrellis/network/graph/Vertex.scala
Scala
gpl-3.0
717
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.config import scala.collection.JavaConversions._ import org.apache.samza.config.StorageConfig._ import org.junit.Assert.assertFalse import org.junit.Assert.assertTrue import org.junit.Test class TestStorageConfig { @Test def testIsChangelogSystem { val configMap = Map[String, String]( FACTORY.format("system1") -> "some.factory.Class", CHANGELOG_STREAM.format("system1") -> "system1.stream1", FACTORY.format("system2") -> "some.factory.Class") val config = new MapConfig(configMap) assertFalse(config.isChangelogSystem("system3")) assertFalse(config.isChangelogSystem("system2")) assertTrue(config.isChangelogSystem("system1")) } }
vjagadish/samza-clone
samza-core/src/test/scala/org/apache/samza/config/TestStorageConfig.scala
Scala
apache-2.0
1,512
/* This file is part of Intake24. Copyright 2015, 2016 Newcastle University. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package uk.ac.ncl.openlab.intake24.services.dataexport.controllers import java.time._ import java.time.format.{DateTimeFormatter, DateTimeParseException} import java.time.temporal.ChronoUnit import cats.data.EitherT import cats.instances.future._ import javax.inject.Inject import org.slf4j.LoggerFactory import play.api.Configuration import play.api.mvc.{BaseController, ControllerComponents, PlayBodyParsers} import uk.ac.ncl.openlab.intake24.api.data.ErrorDescription import uk.ac.ncl.openlab.intake24.errors.AnyError import uk.ac.ncl.openlab.intake24.play.utils.{DatabaseErrorHandler, JsonBodyParser} import uk.ac.ncl.openlab.intake24.security.authorization.Intake24RestrictedActionBuilder import uk.ac.ncl.openlab.intake24.services.dataexport._ import uk.ac.ncl.openlab.intake24.services.dataexport.views.html.DataExportNotification import uk.ac.ncl.openlab.intake24.services.fooddb.admin.FoodGroupsAdminService import uk.ac.ncl.openlab.intake24.services.systemdb.Roles import uk.ac.ncl.openlab.intake24.services.systemdb.admin._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} import io.circe.generic.auto._ import scala.concurrent.duration.FiniteDuration case class NewExportTaskInfo(taskId: Long) case class NewScheduledTaskRequest(daysOfWeek: Int, time: LocalTime, timeZone: String, period: Option[Int], action: String, actionConfig: String) class SurveyDataExportController @Inject()(configuration: Configuration, service: DataExportService, surveyAdminService: SurveyAdminService, foodGroupsAdminService: FoodGroupsAdminService, dataExporter: SingleThreadedDataExporter, secureUrlService: SecureUrlService, exportScheduler: ScheduledDataExportService, ndnsGroupsCache: NdnsCompoundsFoodGroupsCache, emailSender: EmailSender, rab: Intake24RestrictedActionBuilder, playBodyParsers: PlayBodyParsers, jsonBodyParser: JsonBodyParser, userAdminService: UserAdminService, csvExportFormats: Map[String, SurveyCSVExporter], val controllerComponents: ControllerComponents, implicit val executionContext: ExecutionContext) extends BaseController with DatabaseErrorHandler { val logger = LoggerFactory.getLogger(classOf[SurveyDataExportController]) val urlValidityPeriod = configuration.get[FiniteDuration](s"intake24.dataExport.secureUrl.validityPeriod") def getSurveySubmissions(surveyId: String, userName: Option[String], dateFrom: Option[String], dateTo: Option[String], offset: Int, limit: Int) = rab.restrictToRoles(Roles.superuser, Roles.surveyAdmin, Roles.surveyStaff(surveyId))(playBodyParsers.empty) { _ => Future { try { val parsedFrom = dateFrom.map(ZonedDateTime.parse) val parsedTo = dateTo.map(ZonedDateTime.parse) val result = userName match { case Some(userName) => for ( userProfile <- userAdminService.getUserByAlias(SurveyUserAlias(surveyId, userName)); submissions <- service.getSurveySubmissions(surveyId, parsedFrom, parsedTo, offset, limit, Some(userProfile.id)) ) yield submissions case None => service.getSurveySubmissions(surveyId, parsedFrom, parsedTo, offset, limit, None) } translateDatabaseResult(result) } catch { case e: DateTimeParseException => BadRequest(toJsonString(ErrorDescription("DateFormat", "Failed to parse date parameter. Expected a UTC date in ISO 8601 format, e.g. '2017-02-15T16:40:30Z'."))) } } } def getMySurveySubmissions(surveyId: String) = rab.restrictToRoles(Roles.surveyRespondent(surveyId))(playBodyParsers.empty) { request => Future { val respondentId = request.subject.userId val includeFoodGroups = request.getQueryString("compoundFoodGroups").isDefined try { if (includeFoodGroups) { val submissionsWithFoodGroups = for (submissions <- service.getSurveySubmissions(surveyId, None, None, 0, Int.MaxValue, Some(respondentId)); withFoodGroups <- ndnsGroupsCache.addFoodGroups(submissions)) yield withFoodGroups translateDatabaseResult(submissionsWithFoodGroups) } else translateDatabaseResult(service.getSurveySubmissions(surveyId, None, None, 0, Int.MaxValue, Some(respondentId))) } catch { case e: DateTimeParseException => BadRequest(toJsonString(ErrorDescription("DateFormat", "Failed to parse date parameter. Expected a UTC date in ISO 8601 format, e.g. '2017-02-15T16:40:30Z'."))) } } } def getSurveySubmissionsAsCSV(surveyId: String, dateFrom: String, dateTo: String, format: String) = rab.restrictToRoles(Roles.superuser, Roles.surveyAdmin, Roles.surveyStaff(surveyId))(playBodyParsers.empty) { request => Future { csvExportFormats.get(format) match { case Some(exporter) => try { val parsedFrom = ZonedDateTime.parse(dateFrom) val parsedTo = ZonedDateTime.parse(dateTo) val forceBOM = request.getQueryString("forceBOM").isDefined val data = for (params <- surveyAdminService.getSurveyParameters(surveyId).right; localFields <- surveyAdminService.getLocalFields(params.localeId).right; localNutrients <- surveyAdminService.getLocalNutrientTypes(params.localeId).right; dataScheme <- surveyAdminService.getCustomDataScheme(params.schemeId).right; foodGroups <- foodGroupsAdminService.listFoodGroups(params.localeId).right; submissions <- service.getSurveySubmissions(surveyId, Some(parsedFrom), Some(parsedTo), 0, Integer.MAX_VALUE, None).right) yield ((localFields, localNutrients, dataScheme, foodGroups, submissions)) data match { case Right((localFields, localNutrients, dataScheme, foodGroups, submissions)) => exporter.exportSurveySubmissions(dataScheme, foodGroups, localFields, localNutrients, submissions, forceBOM) match { case Right(csvFile) => val dateStamp = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(LocalDateTime.ofInstant(Clock.systemUTC().instant(), ZoneId.systemDefault).withNano(0)).replace(":", "-").replace("T", "-") Ok.sendFile(csvFile, fileName = _ => s"intake24-$surveyId-data-$dateStamp.csv", onClose = () => csvFile.delete()).as(if (forceBOM) "application/octet-stream" else "text/csv;charset=utf-8") case Left(exportError) => InternalServerError(toJsonString(ErrorDescription("ExportError", exportError))) } case Left(databaseError) => translateDatabaseError(databaseError) } } catch { case e: DateTimeParseException => BadRequest(toJsonString(ErrorDescription("DateFormat", "Failed to parse date parameter. Expected a UTC date in ISO 8601 format, e.g. '2017-02-15T16:40:30Z'."))) } case None => BadRequest(s"Output format version not supported: $format") } } } //val body = DataExportNotification(task.userName, task.surveyId, downloadUrl, config.s3UrlExpirationTimeMinutes.toInt / 60) //val message = Email(, , Seq(email), None, Some(body.toString())) def downloadAvailableMessage(surveyId: String, url: String) = (userProfile: UserProfile) => DataExportNotification(userProfile.name, surveyId, url, urlValidityPeriod.toHours.toInt).toString() private def checkResult(result: Either[AnyError, Unit], errorMessage: String) = result match { case Right(()) => () case Left(error) => logger.error(errorMessage, error.exception) } def queueCSVExportForDownload(surveyId: String, dateFrom: String, dateTo: String, format: String) = rab.restrictToRoles(Roles.superuser, Roles.surveyAdmin, Roles.surveyStaff(surveyId))(playBodyParsers.empty) { request => try { val parsedFrom = ZonedDateTime.parse(dateFrom) val parsedTo = ZonedDateTime.parse(dateTo) val forceBOM = request.getQueryString("forceBOM").isDefined val supportEmail = configuration.get[String]("intake24.supportEmail") val queueResult = dataExporter.queueCsvExport(request.subject.userId, surveyId, parsedFrom, parsedTo, forceBOM, "download", format) val exportResult = (for (exportTaskHandle <- EitherT(queueResult); file <- EitherT(exportTaskHandle.result)) yield (file, exportTaskHandle)).value exportResult.map { case Right((file, exportTaskHandle)) => val dateStamp = DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(LocalDateTime.ofInstant(Clock.systemUTC().instant(), ZoneId.systemDefault).withNano(0)).replace(":", "-").replace("T", "-") val urlExpirationDate = ZonedDateTime.now().plus(urlValidityPeriod.toMillis, ChronoUnit.MILLIS) secureUrlService.createUrl(s"intake24-$surveyId-data-${exportTaskHandle.id}-$dateStamp.csv", file, urlExpirationDate) match { case Success(secureUrl) => checkResult(service.setExportTaskDownloadUrl(exportTaskHandle.id, secureUrl, urlExpirationDate), "Failed to set download URL") checkResult(emailSender.sendHtml(request.subject.userId, s"Your Intake24 survey ($surveyId) data is available for download", s"Intake24 <$supportEmail>", downloadAvailableMessage(surveyId, secureUrl.toString())), "Failed to send e-mail notification") case Failure(exception) => logger.error("Failed to create secure URL for file download", exception) checkResult(service.setExportTaskDownloadFailed(exportTaskHandle.id, exception), "Failed to update download URL status after secure URL service failed") } case Left(error) => logger.error("Failed to queue CSV export", error.exception) } queueResult.map(r => translateDatabaseResult(r.map(h => NewExportTaskInfo(h.id)))) } catch { case _: DateTimeParseException => Future.successful(BadRequest(toJsonString(ErrorDescription("DateFormat", "Failed to parse date parameter. Expected a UTC date in ISO 8601 format, e.g. '2017-02-15T16:40:30Z'.")))) } } case class GetExportTaskStatusResult(activeTasks: Seq[ExportTaskInfo]) def getExportTaskStatus(surveyId: String) = rab.restrictToRoles(Roles.superuser, Roles.surveyAdmin, Roles.surveyStaff(surveyId))(playBodyParsers.empty) { request => Future { translateDatabaseResult(service.getActiveExportTasks(surveyId, request.subject.userId).right.map(GetExportTaskStatusResult(_))) } } def scheduleExport(surveyId: String) = rab.restrictToRoles(Roles.superuser, Roles.surveyAdmin)(jsonBodyParser.parse[NewScheduledTaskRequest]) { request => Future { val r = request.body translateDatabaseResult(exportScheduler.createScheduledTask(request.subject.userId, surveyId, r.period, r.daysOfWeek, r.time, r.timeZone, r.action, r.actionConfig)) } } }
digitalinteraction/intake24
DataExportService/app/uk/ac/ncl/openlab/intake24/services/dataexport/controllers/SurveyDataExportController.scala
Scala
apache-2.0
12,489
import scala.quoted.* import scala.language.implicitConversions case class Xml(parts: String, args: List[Any]) object XmlQuote { // Encoding for // // implicit class SCOps(s: StringContext) { // object xml { // def apply(exprs: Any*) = ... // def unapplySeq(...) = ... // } // } object XMLOps { opaque type StringContext = scala.StringContext extension (ctx: scala.StringContext) def xml: StringContext = ctx } extension (inline ctx: XMLOps.StringContext) inline def apply(inline args: Any*): Xml = ${XmlQuote.impl('ctx, 'args)} // extension (inline ctx: SCOps.StringContext) inline def unapplySeq(...): Xml = ... def impl(receiver: Expr[XMLOps.StringContext], args: Expr[Seq[Any]])(using Quotes): Expr[Xml] = { val string = receiver match { case '{ XMLOps.xml(${Expr(sc)}) } => sc.parts.mkString("??") } '{new Xml(${Expr(string)}, $args.toList)} } }
lampepfl/dotty
tests/run-macros/xml-interpolation-7/Macros_1.scala
Scala
apache-2.0
928
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.twitter.zipkin.query import com.twitter.conversions.time._ import com.twitter.finagle.stats.{DefaultStatsReceiver, StatsReceiver} import com.twitter.finagle.tracing.{Trace => FTrace} import com.twitter.logging.Logger import com.twitter.util.{Future, Time} import com.twitter.zipkin.common.Span import com.twitter.zipkin.conversions.thrift._ import com.twitter.zipkin.query.adjusters._ import com.twitter.zipkin.query.constants._ import com.twitter.zipkin.storage._ import com.twitter.zipkin.{gen => thrift} import java.nio.ByteBuffer class ThriftQueryService( spanStore: SpanStore, aggsStore: Aggregates = new NullAggregates, realtimeStore: RealtimeAggregates = NullRealtimeAggregates, adjusters: Map[thrift.Adjust, Adjuster] = Map.empty[thrift.Adjust, Adjuster], traceDurationFetchBatchSize: Int = 500, stats: StatsReceiver = DefaultStatsReceiver.scope("ThriftQueryService"), log: Logger = Logger.get("ThriftQueryService") ) extends thrift.ZipkinQuery[Future] { private[this] val methodStats = stats.scope("perMethod") private[this] def opt[T](param: T): Option[T] = param match { case null | "" => None case s => Some(s) } private[this] def getOrderBy(order: thrift.Order): ((TraceIdDuration, TraceIdDuration) => Boolean) = { order match { case thrift.Order.None => (a: TraceIdDuration, b: TraceIdDuration) => a.duration > b.duration case thrift.Order.DurationDesc => (a: TraceIdDuration, b: TraceIdDuration) => a.duration > b.duration case thrift.Order.DurationAsc => (a: TraceIdDuration, b: TraceIdDuration) => a.duration < b.duration case thrift.Order.TimestampDesc => (a: TraceIdDuration, b: TraceIdDuration) => a.startTimestamp > b.startTimestamp case thrift.Order.TimestampAsc => (a: TraceIdDuration, b: TraceIdDuration) => a.startTimestamp < b.startTimestamp } } private[this] def getTraceIdDurations(fIds: Future[Seq[Long]]): Future[Seq[TraceIdDuration]] = { fIds flatMap { ids => val ret = ids.grouped(traceDurationFetchBatchSize).toSeq.map(spanStore.getTracesDuration(_)) Future.collect(ret).map(_.flatten) } } private[this] def sortedTraceIds(traceIds: Future[Seq[Long]], limit: Int, order: thrift.Order): Future[Seq[Long]] = if (order == thrift.Order.None) traceIds else { val orderBy = getOrderBy(order) getTraceIdDurations(traceIds) map { _.sortWith(orderBy).slice(0, limit).map(_.traceId) } } private[this] def sort(traces: Future[Seq[IndexedTraceId]], limit: Int, order: thrift.Order): Future[Seq[Long]] = sortedTraceIds(traces.map(_.map(_.traceId)), limit, order) private[this] def adjustedTraces(traces: Seq[Seq[Span]], adjusts: Seq[thrift.Adjust]): Seq[Trace] = { val as = adjusts flatMap { adjusters.get(_) } traces map { spans => as.foldLeft(Trace(spans)) { (t, adjuster) => adjuster.adjust(t) } } } private[this] def padTimestamp(timestamp: Long): Long = timestamp + TraceTimestampPadding.inMicroseconds private[this] def traceIdsIntersect(idSeqs: Seq[Seq[IndexedTraceId]]): Seq[IndexedTraceId] = { /* Find the trace IDs present in all the Seqs */ val idMaps = idSeqs.map(_.groupBy(_.traceId)) val traceIds = idMaps.map(_.keys.toSeq) val commonTraceIds = traceIds.tail.fold(traceIds(0))(_.intersect(_)) /* * Find the timestamps associated with each trace ID and construct a new IndexedTraceId * that has the trace ID's maximum timestamp (ending) as the timestamp */ commonTraceIds map { id => IndexedTraceId(id, idMaps.flatMap(_(id).map(_.timestamp)).max) } } private[this] def queryResponse( ids: Seq[IndexedTraceId], qr: thrift.QueryRequest, endTs: Long = -1 ): Future[thrift.QueryResponse] = { sortedTraceIds(Future.value(ids.map(_.traceId)), qr.limit, qr.order) map { sortedIds => val (min, max) = sortedIds match { case Nil => (-1L, endTs) case _ => val ts = ids.map(_.timestamp) (ts.min, ts.max) } thrift.QueryResponse(sortedIds, min, max) } } private trait SliceQuery private case class SpanSliceQuery(name: String) extends SliceQuery private case class AnnotationSliceQuery(key: String, value: Option[ByteBuffer]) extends SliceQuery private[this] def querySlices(slices: Seq[SliceQuery], qr: thrift.QueryRequest): Future[Seq[Seq[IndexedTraceId]]] = Future.collect(slices map { case SpanSliceQuery(name) => spanStore.getTraceIdsByName(qr.serviceName, Some(name), qr.endTs, qr.limit) case AnnotationSliceQuery(key, value) => spanStore.getTraceIdsByAnnotation(qr.serviceName, key, value, qr.endTs, qr.limit) case s => Future.exception(new Exception("Uknown SliceQuery: %s".format(s))) }) private[this] def handle[T](name: String)(f: => Future[T]): Future[T] = { val errorStats = methodStats.scope("errors") val ret = try { methodStats.timeFuture(name)(f) } catch { case e: Exception => Future.exception(e) } ret rescue { case e: Exception => log.error(e, "%s error".format(name)) errorStats.counter(name).incr() errorStats.scope(name).counter(e.getClass.getName).incr() Future.exception(thrift.QueryException(e.toString)) } } private[this] val noServiceNameError = Future.exception(thrift.QueryException("No service name provided")) private[this] def handleQuery[T](name: String, qr: thrift.QueryRequest)(f: => Future[T]): Future[T] = if (!opt(qr.serviceName).isDefined) noServiceNameError else { FTrace.recordBinary("serviceName", qr.serviceName) FTrace.recordBinary("endTs", qr.endTs) FTrace.recordBinary("limit", qr.limit) FTrace.recordBinary("order", qr.order) handle(name)(f) } def getTraceIds(qr: thrift.QueryRequest): Future[thrift.QueryResponse] = handleQuery("getTraceIds", qr) { val sliceQueries = Seq[Option[Seq[SliceQuery]]]( qr.spanName.map { n => Seq(SpanSliceQuery(n)) }, qr.annotations.map { _.map { AnnotationSliceQuery(_, None) } }, qr.binaryAnnotations.map { _.map { b => AnnotationSliceQuery(b.key, Some(b.value)) } } ).flatten.flatten sliceQueries match { case Nil => spanStore.getTraceIdsByName(qr.serviceName, None, qr.endTs, qr.limit) flatMap { queryResponse(_, qr) } case slice :: Nil => querySlices(sliceQueries, qr) flatMap { ids => queryResponse(ids.flatten, qr) } case _ => // TODO: timestamps endTs is the wrong name for all this querySlices(sliceQueries, qr.copy(limit = 1)) flatMap { ids => val ts = padTimestamp(ids.flatMap(_.map(_.timestamp)).reduceOption(_ min _).getOrElse(0)) querySlices(sliceQueries, qr.copy(endTs = ts)) flatMap { ids => traceIdsIntersect(ids) match { case Nil => val endTs = ids.map(_.map(_.timestamp).reduceOption(_ min _).getOrElse(0L)).reduceOption(_ max _).getOrElse(0L) queryResponse(Nil, qr, endTs) case seq => queryResponse(seq, qr) } } } } } def getTraceIdsBySpanName( serviceName: String, spanName: String, endTs: Long, limit: Int, order: thrift.Order ): Future[Seq[Long]] = { val qr = thrift.QueryRequest(serviceName, opt(spanName), None, None, endTs, limit, order) handleQuery("getTraceIdsBySpanName", qr) { sort(spanStore.getTraceIdsByName(serviceName, qr.spanName, endTs, limit), limit, order) } } def getTraceIdsByServiceName( serviceName: String, endTs: Long, limit: Int, order: thrift.Order ): Future[Seq[Long]] = { val qr = thrift.QueryRequest(serviceName, None, None, None, endTs, limit, order) handleQuery("getTraceIdsBySpanName", qr) { sort(spanStore.getTraceIdsByName(serviceName, None, endTs, limit), limit, order) } } def getTraceIdsByAnnotation( serviceName: String, key: String, value: ByteBuffer, endTs: Long, limit: Int, order: thrift.Order ): Future[Seq[Long]] = { val qr = thrift.QueryRequest(serviceName, None, None, None, endTs, limit, order) handleQuery("getTraceIdsByAnnotation", qr) { sort(spanStore.getTraceIdsByAnnotation(serviceName, key, opt(value), endTs, limit), limit, order) } } def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] = handle("tracesExist") { FTrace.recordBinary("numIds", traceIds.length) spanStore.tracesExist(traceIds) } def getTracesByIds(traceIds: Seq[Long], adjust: Seq[thrift.Adjust]): Future[Seq[thrift.Trace]] = handle("getTracesByIds") { FTrace.recordBinary("numIds", traceIds.length) spanStore.getSpansByTraceIds(traceIds) map { adjustedTraces(_, adjust).map(_.toThrift) } } def getTraceTimelinesByIds(traceIds: Seq[Long], adjust: Seq[thrift.Adjust]): Future[Seq[thrift.TraceTimeline]] = handle("getTraceTimelinesByIds") { FTrace.recordBinary("numIds", traceIds.length) spanStore.getSpansByTraceIds(traceIds) map { traces => adjustedTraces(traces, adjust) flatMap { TraceTimeline(_).map(_.toThrift) } } } def getTraceSummariesByIds(traceIds: Seq[Long], adjust: Seq[thrift.Adjust]): Future[Seq[thrift.TraceSummary]] = handle("getTraceSummariesByIds") { FTrace.recordBinary("numIds", traceIds.length) spanStore.getSpansByTraceIds(traceIds) map { traces => adjustedTraces(traces, adjust) flatMap { TraceSummary(_).map(_.toThrift) } } } def getTraceCombosByIds(traceIds: Seq[Long], adjust: Seq[thrift.Adjust]): Future[Seq[thrift.TraceCombo]] = handle("getTraceCombosByIds") { FTrace.recordBinary("numIds", traceIds.length) spanStore.getSpansByTraceIds(traceIds) map { traces => adjustedTraces(traces, adjust) map { TraceCombo(_).toThrift } } } // TODO def getDataTimeToLive: Future[Int] = handle("getDataTimeToLive") { Future.exception(new Exception("not implemented")) } def getServiceNames: Future[Set[String]] = handle("getServiceNames") { spanStore.getAllServiceNames } def getSpanNames(serviceName: String): Future[Set[String]] = handle("getSpanNames") { spanStore.getSpanNames(serviceName) } def setTraceTimeToLive(traceId: Long, ttl: Int): Future[Unit] = handle("setTraceTimeToLive") { spanStore.setTimeToLive(traceId, ttl.seconds) } def getTraceTimeToLive(traceId: Long): Future[Int] = handle("getTraceTimeToLive") { spanStore.getTimeToLive(traceId).map(_.inSeconds) } def getDependencies(startTime: Option[Long], endTime: Option[Long]) : Future[thrift.Dependencies] = handle("getDependencies") { val start = startTime map { Time.fromMicroseconds(_) } val end = endTime map { Time.fromMicroseconds(_) } aggsStore.getDependencies(start, end).map(_.toThrift) } def getTopAnnotations(serviceName: String): Future[Seq[String]] = handle("getTopAnnotations") { aggsStore.getTopAnnotations(serviceName) } def getTopKeyValueAnnotations(serviceName: String): Future[Seq[String]] = handle("getTopKeyValueAnnotations") { aggsStore.getTopKeyValueAnnotations(serviceName) } def getSpanDurations( timeStamp: Long, serverServiceName: String, rpcName: String ): Future[Map[String, List[Long]]] = handle("getSpanDurations") { val time = Time.fromMicroseconds(timeStamp) realtimeStore.getSpanDurations(time, serverServiceName, rpcName) } }
siddhaism/zipkin
zipkin-query/src/main/scala/com/twitter/zipkin/query/ThriftQueryService.scala
Scala
apache-2.0
12,265
package mesosphere.marathon package raml import mesosphere.UnitTest import mesosphere.marathon.core.launcher.OfferMatchResult import mesosphere.marathon.core.launchqueue.LaunchStats.QueuedInstanceInfoWithStatistics import mesosphere.marathon.state.{AbsolutePathId, AppDefinition, Timestamp} import mesosphere.marathon.test.{MarathonTestHelper, SettableClock} import mesosphere.mesos.NoOfferMatchReason class QueueInfoConversionTest extends UnitTest { "QueueInfoConversion" should { "A reject reason is converted correctly" in { Given("A reject reason") val reason = NoOfferMatchReason.InsufficientCpus When("The value is converted to raml") val raml = reason.toRaml[String] Then("The value is converted correctly") raml should be(reason.toString) } "A NoMatch is converted correctly" in { Given("A NoMatch") val app = AppDefinition(AbsolutePathId("/test"), role = "*") val offer = MarathonTestHelper.makeBasicOffer().build() val noMatch = OfferMatchResult.NoMatch(app, offer, Seq(NoOfferMatchReason.InsufficientCpus), Timestamp.now()) When("The value is converted to raml") val raml = noMatch.toRaml[UnusedOffer] Then("The value is converted correctly") raml.offer should be(offer.toRaml[Offer]) raml.reason should be(noMatch.reasons.toRaml[Seq[String]]) raml.timestamp should be(noMatch.timestamp.toOffsetDateTime) } "A QueueInfoWithStatistics is converted correctly" in { Given("A QueueInfoWithStatistics") val clock = new SettableClock() val now = clock.now() val app = AppDefinition(AbsolutePathId("/test"), role = "*") val offer = MarathonTestHelper.makeBasicOffer().build() val noMatch = Seq( OfferMatchResult.NoMatch(app, offer, Seq(NoOfferMatchReason.InsufficientCpus), now), OfferMatchResult.NoMatch(app, offer, Seq(NoOfferMatchReason.InsufficientCpus), now), OfferMatchResult.NoMatch(app, offer, Seq(NoOfferMatchReason.InsufficientCpus), now), OfferMatchResult.NoMatch(app, offer, Seq(NoOfferMatchReason.InsufficientMemory), now) ) val summary: Map[NoOfferMatchReason, Int] = Map( NoOfferMatchReason.InsufficientCpus -> 75, NoOfferMatchReason.InsufficientMemory -> 15, NoOfferMatchReason.InsufficientDisk -> 10 ) val lastSummary: Map[NoOfferMatchReason, Int] = Map( NoOfferMatchReason.InsufficientCpus -> 3, NoOfferMatchReason.InsufficientMemory -> 1 ) val offersSummary: Seq[DeclinedOfferStep] = List( DeclinedOfferStep("UnfulfilledRole", 0, 123), DeclinedOfferStep("UnfulfilledConstraint", 0, 123), DeclinedOfferStep("NoCorrespondingReservationFound", 0, 123), DeclinedOfferStep("AgentMaintenance", 0, 123), DeclinedOfferStep("InsufficientCpus", 75, 123), // 123 - 75 = 48 DeclinedOfferStep("InsufficientMemory", 15, 48), // 48 - 15 = 33 DeclinedOfferStep("InsufficientDisk", 10, 33), // 33 - 10 = 23 DeclinedOfferStep("InsufficientGpus", 0, 23), DeclinedOfferStep("InsufficientPorts", 0, 23), DeclinedOfferStep("DeclinedScarceResources", 0, 23) ) val lastOffersSummary: Seq[DeclinedOfferStep] = List( DeclinedOfferStep("UnfulfilledRole", 0, 4), DeclinedOfferStep("UnfulfilledConstraint", 0, 4), DeclinedOfferStep("NoCorrespondingReservationFound", 0, 4), DeclinedOfferStep("AgentMaintenance", 0, 4), DeclinedOfferStep("InsufficientCpus", 3, 4), // 4 - 3 = 1 DeclinedOfferStep("InsufficientMemory", 1, 1), // 1 - 1 = 0 DeclinedOfferStep("InsufficientDisk", 0, 0), DeclinedOfferStep("InsufficientGpus", 0, 0), DeclinedOfferStep("InsufficientPorts", 0, 0), DeclinedOfferStep("DeclinedScarceResources", 0, 0) ) val info = QueuedInstanceInfoWithStatistics( app, "*", inProgress = true, instancesLeftToLaunch = 23, finalInstanceCount = 23, backOffUntil = None, startedAt = now, rejectSummaryLastOffers = lastSummary, rejectSummaryLaunchAttempt = summary, processedOffersCount = 123, unusedOffersCount = 100, lastMatch = None, lastNoMatch = Some(noMatch.head), lastNoMatches = noMatch ) When("The value is converted to raml") val raml = (Seq(info), true, clock).toRaml[Queue] Then("The value is converted correctly") raml.queue should have size 1 raml.queue.head shouldBe a[QueueApp] val item = raml.queue.head.asInstanceOf[QueueApp] item.app.id should be(app.id.toString) item.count should be(23) item.processedOffersSummary.processedOffersCount should be(info.processedOffersCount) item.processedOffersSummary.unusedOffersCount should be(info.unusedOffersCount) item.processedOffersSummary.lastUnusedOfferAt should be(Some(now.toOffsetDateTime)) item.processedOffersSummary.lastUsedOfferAt should be(None) item.processedOffersSummary.rejectSummaryLaunchAttempt should be(offersSummary) item.processedOffersSummary.rejectSummaryLastOffers should be(lastOffersSummary) item.lastUnusedOffers should be(defined) item.since should be(now.toOffsetDateTime) } } }
mesosphere/marathon
src/test/scala/mesosphere/marathon/raml/QueueInfoConversionTest.scala
Scala
apache-2.0
5,343
package controllers import javax.inject._ import play.api._ import play.api.libs.json._ import play.api.mvc._ import play.api.mvc.Results._ import models._ import dal._ import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.forkjoin._ import scala.language.postfixOps import ExecutionContext.Implicits.global import scala.util.{Success, Failure} /** * This controller creates an `Action` to handle HTTP requests to the * application's API. */ @Singleton class ApiController @Inject()( categories: CategoryRepository, landmarks: LandmarkRepository, photos: PhotoRepository, tours: TourRepository, waypoints: WaypointRepository, landmarkPhotos: LandmarkPhotoRepository, tourCategories: TourCategoryRepository, tourLandmarks: TourLandmarkRepository) extends Controller { def successCode(): JsObject = JsObject(Seq( ("status") -> JsString("success") ) ) def errMsg(msg: String): JsObject = JsObject( Seq( ("status" -> JsString("failure")), ("error" -> JsString(msg)) ) ) /** * Given a landmark ID, get the JSON for the photos for that given landmark. */ def findPhotosByLandmarkId(landmarkId: Long): Future[JsValue] = { for { photoIds <- landmarkPhotos.findByLandmarkId(landmarkId) lp <- Future.sequence { photoIds.map { photoId => photos.findById(photoId) } } } yield (Json.toJson(lp)) } /** * Given a tour ID, find all the landmarks that are a part of that tour. * Unfortunately, we need to go through a secondary table for this. Not too * bad, though. */ def findLandmarksByTourId(tourId: Long): Future[JsValue] = { for { landmarkIds <- tourLandmarks.findByTourId(tourId) ls <- Future.sequence { landmarkIds.map { landmarkId => for { landmark <- landmarks.findById(landmarkId) photos <- findPhotosByLandmarkId(landmarkId) } yield { Json.toJson(landmark).as[JsObject] + ("photos" -> Json.toJson(photos)) } } } } yield (Json.toJson(ls)) } /** * Find the tour by its ID and return it along with landmark/waypoint data. */ def findTourById(tourId: Long): Future[JsValue] = { tours.findById(tourId).flatMap { case Some(tour) => findLandmarksByTourId(tourId).flatMap { currLandmarks => waypoints.findByTourId(tourId).map { wp => Json.toJson(tour).as[JsObject] + // Now that we have the actual tour objects, we can add waypoint and // landmark data. Waypoints must be in a specific order, landmarks // don't matter quite so much. ("waypoints" -> Json.toJson(wp)) + ("landmarks" -> currLandmarks) } } case None => Future(errMsg(s"could not find tour with ID ${tourId}")) } } /** * Given a category ID, find how many tours that category has. */ def findNumToursForCategory(categoryId: Long): Future[JsValue] = { tourCategories.findByCategoryId(categoryId).map(_.length).map { n => Json.toJson(n) } } /** * GET /api/v1/tours * GET all the tours in the database. */ def getTours = Action.async { implicit request => tours.list.flatMap { case allTours: Seq[Tour] => Future.sequence(allTours.map(t => findTourById(t.id))).map { tourJS => Ok { Json.obj("content" -> tourJS) ++ successCode } } } } /** * GET /api/v1/tours/:id * Get the tour with given ID (includes landmark, photo, and waypoint info). */ def getTour(id: Long) = Action.async { implicit request => tours.findById(id) .flatMap { case Some(tour) => findTourById(tour.id).map { t => Ok { Json.obj("content" -> t) ++ successCode } } case None => Future { NotFound(errMsg(s"Could not find tour with ID ${id}")) } } } /** * GET /api/v1/categories/:id * Get the category with given ID */ def getCategory(id: Long) = Action.async { implicit request => categories.findById(id).flatMap { case Some(category) => findNumToursForCategory(category.id).map { numTours => Ok(Json.obj("content" -> (Json.toJson(category).as[JsObject] + ("numAvailableTours" -> numTours))) ++ successCode) } case None => Future { NotFound(errMsg(s"Could not find category with ID ${id}")) } } } /** * GET /api/v1/categories * Get all the categories in the database, as well as the number of tours * available within that category. */ def getCategories = Action.async { implicit request => categories.list.flatMap { case allCategories: Seq[Category] => Future.sequence { allCategories.map { category => findNumToursForCategory(category.id).map { numTours => Json.toJson(category).as[JsObject] ++ Json.obj("numAvailableTours" -> numTours) } } } }.map { c => Ok { Json.obj("content" -> Json.toJson(c)) ++ successCode } } } /** * GET /api/v1/categories/:id/tours * Get all the tours which belong to the given category. */ def getToursForCategory(id: Long) = Action.async { implicit request => categories.findById(id).flatMap { case Some(_) => tourCategories.findByCategoryId(id).flatMap { tourIds => Future.sequence { tourIds.map { tourId => findTourById(tourId) } }.map { tcs => Ok(Json.obj("content" -> tcs) ++ successCode) } } case None => Future { NotFound(errMsg(s"Could not find category with ID ${id}")) } } } /** * GET /api/v1/categories/:id/tour_info * Get the basic tour info for the category, */ def getTourInfoForCategory(id: Long) = Action.async { implicit request => tourCategories.findByCategoryId(id).flatMap { tourIds => Future.sequence { tourIds.map { tourId => tours.findById(tourId) } }.map { tours => Ok { Json.obj("content" -> Json.toJson(tours)) ++ successCode } } } } /** * GET /api/v1/tours/:id/last_updated * Get the last time the selected tour was updated */ def getTimeTourLastUpdated(id: Long) = Action.async { implicit request => tours.findById(id).map { case Some(tour) => Ok(Json.obj( ("content" -> JsObject(Seq("lastUpdated" -> JsString(tour.lastUpdated)))) ) ++ successCode) case None => NotFound(errMsg(s"Could not find tour with ID ${id}")) } } /** * GET /api/v1/categories/:id/last_updated * Get the last time the selected category was updated */ def getTimeCatLastUpdated(id: Long) = Action.async { implicit request => categories.findById(id).map { case Some(cat) => Ok(Json.obj( ("content" -> JsObject(Seq("lastUpdated" -> JsString(cat.lastUpdated)))) ) ++ successCode) case None => NotFound(errMsg(s"Could not find category with ID ${id}")) } } /** * GET /api/v1/categories_last_updated * Get the time each category was last updated */ def getTimeAllCatsLastUpdated() = Action.async { implicit request => categories.list.map { allCategories => Ok { Json.obj(("content" -> Json.toJson{ allCategories.map { cat => Json.obj(("category_id" -> cat.id), ("last_updated" -> cat.lastUpdated)) } })) ++ successCode } } } }
kelleyb/RPI-Tours-Backend
app/controllers/ApiController.scala
Scala
mit
7,786
import scala.language.{ higherKinds, existentials } object Test extends App { def get[T](x: T) = { println("get: "+ x); x } // TESTS // re-order using names, call-site evaluation order test1(1, "@") test1(b = get("$"), a = get(2)) test1(a = get(3), b = get("**")) // should not transform into a block. how to test? test3(b = get(110), a = get(11))(c = get("\\"), d = get(2.399)) test3(get(14), get(3920))(d = get("}"), c = get("[")) // mixing named and positional test1(get(4), b = get("@")) test1(a = get(10), get("flu")) test2(get(8), v = get(9))(get("%"), l = get(5)) test3(12, 13)("'", d = 16) test3(a = 1, "swine")(c = "bird", d = 10L) // anonymous functions { def doMod(f: Int => Unit) { f(20) } var var1 = 0 doMod(var1 = _) println(var1) synchronized(var1 = 30) println(var1) var var2 = 0 def delay(var2: => Int) = { var2 } println(delay(var2 = 40)) } val f1: (Int, String) => Unit = test1(_, _); f1(6, "~") test4(14) // defaults: subclass overrides, adds and inherits default val b = new Base b.test1(b = "nix")(982)(f = 0) val s = new Sub1 s.test1(a = new { override def toString = "bla" })(m = 0)() // defaults are chosen dynamically val b2: Base = new Sub1 b2.test1(b = "")(c = 93.3)(f = -1) // overloading resolution object t1 { def f(a: Int, b: String) = "first" def f(b: String, a: Int) = "second" } println(t1.f(1, "2")) // first object t2 { def f(a: Int, b: Double, c: Object) = "first" def f(a: Int, b: Double, c: String) = "second" } println(t2.f(1, c = new Base(), b = 2.2)) // first println(t2.f(28, b = 3.89, c = "ldksfj")) // second object t3 { def f(a1: Int) = "first" def f(a2: Int)(b: Int) = "second" } println(t3.f(a1 = 10)) // first println(t3.f(a2 = 20)(1)) // second object t4 { def f(a: Int, b: String = "foo") = "first" def f(a: Int) = "second" } println(t4.f(109)) // second println(t4.f(a = 20)) // second object t5 { def f(a: Object) = "first" val f: String => String = a => "second" } println(t5.f(new Sub1())) // first println(t5.f("dfklj")) // second object t6 { def f(a: String = "sdf", b: Int) = "f" def f(a: Int, b: Int) = "s" } println(t6.f(b = 289)) // f object t7 { def f(a: Int, b: String*) = "first" def f(a: Int) = "second" def g(a: Sub1, b: Int*) = "third" def g(a: Base) = "fourth" def h(a: Base, b: Int*) = "fifth" def h(a: Sub1) = "sixth" } println(t7.f(1)) // second println(t7.f(a = 19)) // second println(t7.f(b = "sl19", a = 28)) // first println(t7.g(new Sub1(), 1, 2)) // third println(t7.g(new Base())) // fourth println(t7.h(new Base())) // fifth println(t7.h(new Sub1())) // sixth object t9 { def f(a: String, b: Int = 11) = "first" def f(a: Double) = "second" } println(t9.f("bla")) // first // vararg def test5(a: Int, b: Int)(c: Int, d: String*) = a +", "+ d.toList println(test5(b = 1, a = 2)(3, "4", "4", "4")) println(test5(b = 1, a = 2)(c = 29)) // tuple conversion def foo(a: Int, b: Int)(c: (Int, String)) = a + c._1 println(foo(b = 1, a = 2)(3, "4")) // by-name parameters def bn1(a: Int, b: => Int) = a println(bn1(b = get(10), a = get(11))) // should not see get(10) def bn2(a: Int, b: => Int)(c: Int = b) = a + b println(bn2(b = get(2), a = get(1))()) // should get: 1, 2, 2 def bn3(a: => Int = get(10)) = 0 def bn4(a: => Int = get(20)) = {a; a} println(bn3()) println(bn4()) println(bn4(a = 0)) class t2929(x: => Int = 1) { def foo = x } println((new t2929()).foo) // constructors val a1 = new A(b = "dlkfj")(d = 102) println(a1.print) val a2 = new A[String, Nothing](2, "dkflj")(d = 2, c = "lskf") println(a2.print) val b1 = new B("dklfj")(e = "nixda") println(b1.printB) val c1 = new C(a = "dlkf", c = new { override def toString() = "struct" })(e = "???") println(c1.print) val c2 = C("dflkj", c = Some(209): Option[Int])(None, "!!") println(c2.print) // "super" qualifier val b10 = new B1 println(b10.bar()) // defaults in traits / abstract classes val mn = new MN println(mn.foo()()) println(mn.bar(10)) // anonymous class println((new M { def foo[T >: String](x: Int, y: T)(z: String = "2") = z ; def bar(x: Int, y: Double) = x }).foo()()) // copy method for case classes val fact = Factory(y = "blabla")() println(fact) println(fact.copy(x = -1)("dldl")) println(Fact2()("jyp")) println(Fact2(x = 1)()) println(Fact2(10)().copy(y = "blabla")(3)) // assignment to var <-> named argument var argName = 1 test5(argName = (argName = 2)) println(argName) // should be 2 test5({argName = 3}) println(argName) // should be 3 test5((argName = 4)) println(argName) // should be 4 test5 { argName = 5 } println(argName) // should be 5 val a: Unit = test1(a = 10, b = "2") // local values a and b exist, but it's not ambiguous since they're vals // dependent types and copy method val a11 = new A2 val b11 = a11.B2(new a11.C2)(1) println(b11.copy()(2)) // bug #2057 class O { class I(val x: Int = 1) } class U extends O { val f = new I() } val u1 = new U println(u1.f.x) // names / defaults in self constructor call new A3("lskfdjlk") new A4(1.23, ",") // names / defaults in super constructor call new B4() new B5() // no re-naming of parameters which are free in a closure of the body (lambdalift) println(test6(10)()) test7("jaa") // implicits + defaults { implicit val implInt = 10101 println(test8()) } println(test9) { implicit val implString = "blublu" println(test9) } // result type of default getters: parameter type, except if this one mentions any type // parameter, in which case the result type is inferred. examples: // result type of default getter is "String => String". if it were inferred, the compiler // would put "Nothing => Nothing", which is useless def transform(s: String, f: String => String = identity _) = f(s) println(transform("my text")) // a bug reported on a mailing list: see comment in Typer.typedModuleDef object TT class TT(x: Int = 1) val v = new TT() // result type of the default getter is inferred (parameter type mentions type parameter T) def test10[T](x: List[T] = List(1,2)) = x println(test10()) // some complicated type which mentions T def test11[T[P]](x: T[T[List[T[X forSome { type X }]]]] = List(1,2)) = x // (cannot call f using the default, List(1,2) doesn't match the param type) def multinest = { def bar(x: Int = 1) = { def bar(x: Int = 2) = x; bar() + x }; bar() } println(multinest) // #2290 def spawn(a: Int, b: => Unit) = { () } def t { spawn(b = { val ttt = 1; ttt }, a = 0) } // #2382 class A2382[+T](x: T => Int) { def foo(a: T => Int = x) = 0 } // #2390 case class A2390[T](x: Int) { def copy(a: Int)(b: Int = 0) = 0 } // #2489 class A2489 { def foo { def bar(a: Int = 1) = a; bar(); val u = 0 } } class A2489x2 { def foo { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } // a bug reported on the mailing lists, related to #2489 class Test2489 { def foo(): Int = { val i = 10 case class Foo(j: Int) i } } // #2784 class Test2784 { object t { def f(x: Int) = x } val one = t f (x = 1) } // #2820 class Test2820 { class A[T](f: String = "ski!") class C extends A } object t3178 { def foo(x: String) = x def foo(x: Int) = x def bar(foo: Int) = foo bar(foo = 1) } // #3207 trait P3207[T] { class Inner(val f: T => Unit = (x: T) => println(x)) } object Test3207_1 { val p = new P3207[Int] {} val q = new p.Inner() { def g = 0 } } object Test3207_2 { val p = new P3207[Int] { val inner = new Inner() { def g = 0 } } } // #3344 def m3344_1 = { case class C(x: Int); C(1).copy(2).x } m3344_1 def m3344_2 = { class C(val x: Int = 1); new C().x } m3344_2 // #3338 object t3338 { class Container { class GenericClass[T](arg: String = "") } object Container extends Container class Test { val a = new Container.GenericClass() } } (new t3338.Test).a // subclassing and defaults in both class constructors class CBLAH(val x: Int = 1) class DBLAH(val y: String = "2") extends CBLAH() (new DBLAH()) // deprecated names def deprNam1(@deprecatedName('x) a: Int, @deprecatedName('y) b: Int) = a + b deprNam1(y = 10, a = 1) deprNam1(b = 2, x = 10) object deprNam2 { def f(@deprecatedName('s) x: String) = 1 def f(s: Object) = 2 def g(@deprecatedName('x) s: Object) = 3 def g(s: String) = 4 } println(deprNam2.f(s = "dlf")) println(deprNam2.f(s = new Object)) println(deprNam2.g(x = "sljkfd")) // #3697 object t3697 { def a(x: Int*)(s: Int = 3) = s def b(a: Int, b: Int, c: Int*) = a + b } println(t3697.a(Seq(3): _*)()) println(t3697.a(3)()) println(t3697.a()()) println(t3697.a(2,3,1)()) println(t3697.b(a = 1, b = 2)) println(t3697.b(a = 1, b = 2, 3)) println(t3697.b(b = 1, a = 2, c = 3)) println(t3697.b(a = 1, b = 2, 3, 4)) println(t3697.b(a = 1, b = 2, Seq(3, 4): _*)) println(t3697.b(b = 1, a = 2, c = Seq(3, 4): _*)) // #4041 object t4041 { def _1 = (0, 0) copy (_1 = 1) def _2 = (1, 1) copy (_2 = 2) } println(""+ t4041._1 +", "+ t4041._2) // #4441 case class C4441a() case class C4441b()() C4441a().copy() C4441b()().copy()() // SI-8117 def f8177(a: Int = 0, b: Int = 0, c: Int = 0) = s"$a $b $c" println(f8177(a = 1, 1)) // DEFINITIONS def test1(a: Int, b: String) = println(a +": "+ b) def test2(u: Int, v: Int)(k: String, l: Int) = println(l +": "+ k +", "+ (u + v)) def test3[T1, T2](a: Int, b: T1)(c: String, d: T2) = println(a +": "+ c +", "+ b +", "+ d) def test4(a: Int) = { def inner(b: Int = a, c: String) = println(b +": "+ c) inner(c = "/") } def test5(argName: Unit) = println("test5") def test6(x: Int) = { () => x } def test7(s: String) = List(1).foreach(_ => println(s)) def test8(x: Int = 1)(implicit y: Int, z: String = "kldfj") = z + x + y def test9(implicit x: Int = 1, z: String = "klfj") = z + x } class Base { def test1[T1, T2](a: Int = 100, b: T1)(c: T2, d: String = a +": "+ b)(e: T2 = c, f: Int) = println(a +": "+ d +", "+ b +", "+ c +", "+ e +", "+ f) } class Sub1 extends Base { override def test1[U1, U2](b: Int, a: U1)(m: U2, r: String = "overridden")(o: U2, f: Int = 555) = println(b +": "+ r +", "+ a +", "+ m +", "+ o +", "+ f) } class A[T <: String, U](a: Int = 0, b: T)(c: String = b, d: Int) { def print = c + a + b + d } class B[T](a: T, b: Int = 1)(c: T = a, e: String = "dklsf") extends A(5, e)("dlkd", 10) { def printB = super.print + e + a + b + c } case class C[U](a: String, b: Int = 234, c: U)(d: U = c, e: String = "dlkfj") { def print = toString + d + e } class A1 { def foo(a: Int = 10, b: String) = b + a } class B1 extends A1 { def bar(a: String = "dflk") = super.foo(b = a) } trait N { def foo[T >: String](x: Int = -1, y: T = "jupee")(z: String): Object } abstract class M extends N { // also tests #2116, specialize return type when overriding. def foo[T >: String](x: Int, y: T)(z: String = "1"): String def bar(n: Int, m: Double = 1.239): Double } class MN extends M { def foo[T >: String](x: Int, y: T)(z: String) = z + x + y def bar(n: Int, m: Double) = n*m } case class Factory(x: Int = 1, y: String)(z: String = y) case class Fact2[T, +U](x: T = "ju", y: U = 1)(z: T = 2) // dependent types and copy method class A2 { case class B2(x: C2)(y: Int) extends A2 { override def toString = "slkdfj" + y } class C2 } // using names / defaults in self constructor call. // overloading resolution: calling A3("string") picks the second, method with default is always less specific. class A3(x: String, y: Int = 10) { def this(a: Object) { this(y = 10, x = a.toString()) println(x) } } class A4(x: String, y: Int = 11) { def this(b: Double, sep: String) { this(sep + b + sep) println(y) } } // using names / defaults in super constructor call class A5(x: Int, val y: Int = 2)(z: Int = x + y) class B4 extends A5(10)() { println(y) } class B5 extends A5(y = 20, x = 2)() { println(y) } // overriding default can be less specific (but has to conform to argument type!) class A6 { def foo(a: Object = "dlkf") = 0 } class B6 extends A6 { override def foo(a: Object = new Object) = 1 }
felixmulder/scala
test/files/run/names-defaults.scala
Scala
bsd-3-clause
12,683
/* Copyright (c) 2012 Joshua Garnett Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.adverserealms.astar.basic2d import scala.collection.immutable.List import scala.collection.mutable.ListBuffer import com.adverserealms.astar.core._ import com.adverserealms.astar.basic2d._ import org.slf4j.{ Logger, LoggerFactory } class MockSquareGridMap extends AstarMap { protected lazy val log = LoggerFactory.getLogger(getClass()) private val MAP_WIDTH = 4 private val MAP_HEIGHT: Int = 4 private val tiles: List[MockSquareTile] = populateMockTiles() private val diagonalMultiplier = 1.4d private val normalMultiplier = 1.0d private val defaultCost = 1.0d /** * Map: x's are not walkable * * 0000 * 0xx0 * 0xx0 * 0000 */ def populateMockTiles(): List[MockSquareTile] = { val tiles = new ListBuffer[MockSquareTile] for (y <- 0 until MAP_HEIGHT) { for (x <- 0 until MAP_WIDTH) { val tile = new MockSquareTile(new Point(x, y)) if (x == 1 && y == 1) { tile.setWalkable(false) } if (x == 2 && y == 1) { tile.setWalkable(false) } if (x == 1 && y == 2) { tile.setWalkable(false) } if (x == 2 && y == 2) { tile.setWalkable(false) } tiles += tile } } tiles.toList } def getNeighbors(tile: AstarTile): List[AstarTile] = { val neighbors = new ListBuffer[AstarTile] val position = tile.asInstanceOf[PositionTile].getPosition() val x = position.getX val y = position.getY //up, left if (getTile(x - 1, y - 1) != null) { neighbors += getTile(x - 1, y - 1) } //up if (getTile(x, y - 1) != null) { neighbors += getTile(x, y - 1) } //up, right if (getTile(x + 1, y - 1) != null) { neighbors += getTile(x + 1, y - 1) } //left if (getTile(x - 1, y) != null) { neighbors += getTile(x - 1, y) } //right if (getTile(x + 1, y) != null) { neighbors += getTile(x + 1, y) } //down, left if (getTile(x - 1, y + 1) != null) { neighbors += getTile(x - 1, y + 1) } //down if (getTile(x, y + 1) != null) { neighbors += getTile(x, y + 1) } //down, right if (getTile(x + 1, y + 1) != null) { neighbors += getTile(x + 1, y + 1) } neighbors.toList } def getTile(x: Int, y: Int): MockSquareTile = { if (x < 0 || x >= MAP_WIDTH) { null } else if (y < 0 || y >= MAP_HEIGHT) { null } else { tiles(x + (y * MAP_WIDTH)) } } def getHeuristic(tile: AstarTile, req: AstarPathRequest): Float = { val start = tile.asInstanceOf[PositionTile].getPosition() val end = req.end.asInstanceOf[PositionTile].getPosition() //using a diagonal distance heuristic val distance: Point = getXYDistanceBetweenPoints(start, end); var h = scala.math.max(distance.getX, distance.getY) h } def getXYDistanceBetweenPoints(start: Point, end: Point): Point = { new Point(getAxisDistance(start.getX, end.getX), getAxisDistance(start.getY, end.getY)) } private def getAxisDistance(start: Int, end: Int): Int = { scala.math.abs(start - end) } def getDistance(start: AstarTile, end: AstarTile): Float = { val startP = start.asInstanceOf[PositionTile].getPosition() val endP = end.asInstanceOf[PositionTile].getPosition() if (startP.getX != endP.getX && startP.getY != endP.getY) { //diagonal move 1.4f } else { 1.0f } } }
joshgarnett/Astar-Scala
test/com/adverserealms/astar/basic2d/MockSquareGridMap.scala
Scala
mit
4,723
package ru.dgolubets import ru.dgolubets.neo4s.util.CypherStringContext import scala.language.implicitConversions package object neo4s { // here I import types that should be used often implicit def cypherStringContext(sc: StringContext): CypherStringContext = new CypherStringContext(sc) }
DGolubets/neo4s
src/main/scala/ru/dgolubets/neo4s/package.scala
Scala
apache-2.0
299
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js API ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-lang.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ package scala.scalajs.js import scala.language.implicitConversions import scala.scalajs.js import scala.collection._ import scala.concurrent.{ExecutionContext, Future} import scala.scalajs.runtime.genTraversableOnce2jsArray sealed abstract class JSConvertersLowPrioImplicits { this: JSConverters.type => @inline implicit def JSRichFutureNonThenable[A](f: Future[A]): JSRichFuture[A] = new JSRichFuture[A](f.asInstanceOf[Future[A | Thenable[A]]]) } /** A collection of decorators that allow converting Scala types to * corresponding JS facade types */ object JSConverters extends JSConvertersLowPrioImplicits { implicit class JSRichOption[T](val opt: Option[T]) extends AnyVal { @inline final def orUndefined: UndefOr[T] = opt.fold[UndefOr[T]](undefined)(v => v) } implicit class JSRichGenTraversableOnce[T]( val col: GenTraversableOnce[T]) extends AnyVal { @inline final def toJSArray: Array[T] = genTraversableOnce2jsArray(col) } implicit class JSRichGenMap[T](val map: GenMap[String, T]) extends AnyVal { @inline final def toJSDictionary: Dictionary[T] = { val result = Dictionary.empty[T] map.foreach { case (key, value) => result(key) = value } result } } @inline implicit def genTravConvertible2JSRichGenTrav[T, C](coll: C)( implicit ev: C => GenTraversableOnce[T]): JSRichGenTraversableOnce[T] = new JSRichGenTraversableOnce(coll) /** Special case for scala.Array of [[genTravConvertible2JSRichGenTrav]]. * Needed for the 2.10.x series. */ @inline implicit def array2JSRichGenTrav[T]( arr: scala.Array[T]): JSRichGenTraversableOnce[T] = new JSRichGenTraversableOnce(arr) @inline implicit def JSRichFutureThenable[A](f: Future[Thenable[A]]): JSRichFuture[A] = new JSRichFuture[A](f.asInstanceOf[Future[A | Thenable[A]]]) final class JSRichFuture[A](val self: Future[A | Thenable[A]]) extends AnyVal { /** Converts the Future to a JavaScript [[Promise]]. * * Attention! The nature of the [[Promise]] class, from the ECMAScript * specification, makes this method inherently un-typeable, because it is * not type parametric. * * The signature of the `toJSPromise` method is only valid * <i>provided that</i> the values of `A` do not have a `then` method. */ def toJSPromise(implicit executor: ExecutionContext): Promise[A] = { new Promise[A]({ (resolve: js.Function1[A | Thenable[A], _], reject: js.Function1[scala.Any, _]) => self onComplete { case scala.util.Success(value) => resolve(value) case scala.util.Failure(th) => reject(th match { case JavaScriptException(e) => e case _ => th }) } }) } } }
lrytz/scala-js
library/src/main/scala/scala/scalajs/js/JSConverters.scala
Scala
bsd-3-clause
3,383
package com.github.cuzfrog.scmd.runtime import com.github.cuzfrog.scmd.ScmdUtils._ private[runtime] trait ArgTreeUtils { implicit class ArgTreeOps(a: ArgTree) { } implicit class CmdNodeOps(a: CmdNode) { def getMandatoriesDownstream: Seq[Node] = { val params: Seq[Node] = a.params.filter(_.entity.isMandatory) val opts: Seq[Node] = a.opts.filter(_.entity.isMandatory) (params ++ opts ++ a.subCmdEntry.getMandatoriesDownstream) :+ a } def countMandatoryDownstream: Int = { val paramCnt = a.params.count(_.entity.isMandatory) val optCnt = a.opts.count(_.entity.isMandatory) paramCnt + optCnt + a.subCmdEntry.countMandatoryDownstream } } implicit class CmdEntryNodeOps(a: CmdEntryNode) { def getMandatoriesDownstream: Seq[Node] = { if (!a.entity.isMandatory) Nil else a.children.flatMap(_.getMandatoriesDownstream) } def countMandatoryDownstream: Int = { if (!a.entity.isMandatory) 0 else a.children.map(_.countMandatoryDownstream).sum + 1 //1 = cmd itself } } implicit val nodeSeqCanFormPrettyString: CanFormPrettyString[Seq[Node]] = new CanFormPrettyString[Seq[Node]] { override def mkPrettyString(a: Seq[Node]): String = { a.map(_.prettyString).mkString(System.lineSeparator) } } }
cuzfrog/simple-cmd
src/main/scala/com/github/cuzfrog/scmd/runtime/ArgTreeUtils.scala
Scala
apache-2.0
1,325
package wandou.util.pinyin import wandou.util.pinyin.format.HanyuPinyinCaseType import wandou.util.pinyin.format.HanyuPinyinOutputFormat import wandou.util.pinyin.format.HanyuPinyinToneType import wandou.util.pinyin.format.HanyuPinyinVCharType import wandou.util.pinyin.format.exception.BadHanyuPinyinOutputFormatCombination /** * Contains logic to format given Pinyin string * */ object PinyinFormatter { /** * @param pinyinStr * unformatted Hanyu Pinyin string * @param outputFormat * given format of Hanyu Pinyin * @return formatted Hanyu Pinyin string * @throws BadHanyuPinyinOutputFormatCombination */ @throws(classOf[BadHanyuPinyinOutputFormatCombination]) def formatHanyuPinyin(pinyinStr: String, outputFormat: HanyuPinyinOutputFormat): String = { if ((HanyuPinyinToneType.WITH_TONE_MARK == outputFormat.toneType) && ((HanyuPinyinVCharType.WITH_V == outputFormat.vCharType) || (HanyuPinyinVCharType.WITH_U_AND_COLON == outputFormat.vCharType))) { throw new BadHanyuPinyinOutputFormatCombination("tone marks cannot be added to v or u:") } var pinyin = pinyinStr outputFormat.toneType match { case HanyuPinyinToneType.WITHOUT_TONE => pinyin = pinyin.replaceAll("[1-5]", "") case HanyuPinyinToneType.WITH_TONE_MARK => pinyin = pinyin.replaceAll("u:", "v") pinyin = toneNumberToToneMark(pinyin) case _ => } outputFormat.vCharType match { case HanyuPinyinVCharType.WITH_V => pinyin = pinyin.replaceAll("u:", "v") case HanyuPinyinVCharType.WITH_U_UNICODE => pinyin = pinyin.replaceAll("u:", "ü") case _ => } outputFormat.caseType match { case HanyuPinyinCaseType.UPPERCASE => pinyin = pinyin.toUpperCase case _ => } pinyin } /** * Convert tone numbers to tone marks using Unicode <br/><br/> * * <b>Algorithm for determining location of tone mark</b><br/> * * A simple algorithm for determining the vowel on which the tone mark * appears is as follows:<br/> * * <ol> * <li>First, look for an "a" or an "e". If either vowel appears, it takes * the tone mark. There are no possible pinyin syllables that contain both * an "a" and an "e". * * <li>If there is no "a" or "e", look for an "ou". If "ou" appears, then * the "o" takes the tone mark. * * <li>If none of the above cases hold, then the last vowel in the syllable * takes the tone mark. * * </ol> * * @param pinyinStr * the ascii represention with tone numbers * @return the unicode represention with tone marks */ private def toneNumberToToneMark(pinyin: String): String = { val lowerCasePinyin = pinyin.toLowerCase if (lowerCasePinyin.matches("[a-z]*[1-5]?")) { val defautlCharValue = '$' val defautlIndexValue = -1 var unmarkedVowel = defautlCharValue var idxOfUnmarkedVowel = defautlIndexValue val charA = 'a' val charE = 'e' val ouStr = "ou" val allUnmarkedVowelStr = "aeiouv" val allMarkedVowelStr = "āáăàaēéĕèeīíĭìiōóŏòoūúŭùuǖǘǚǜü" if (lowerCasePinyin.matches("[a-z]*[1-5]")) { val tuneNumber = Character.getNumericValue(lowerCasePinyin.charAt(lowerCasePinyin.length - 1)) val idxOfA = lowerCasePinyin.indexOf(charA) val idxOfE = lowerCasePinyin.indexOf(charE) val idxOfOU = lowerCasePinyin.indexOf(ouStr) if (-1 != idxOfA) { idxOfUnmarkedVowel = idxOfA unmarkedVowel = charA } else if (-1 != idxOfE) { idxOfUnmarkedVowel = idxOfE unmarkedVowel = charE } else if (-1 != idxOfOU) { idxOfUnmarkedVowel = idxOfOU unmarkedVowel = ouStr.charAt(0) } else { var i = lowerCasePinyin.length - 1 var break = false while (i >= 0 && !break) { if (String.valueOf(lowerCasePinyin.charAt(i)).matches("[" + allUnmarkedVowelStr + "]")) { idxOfUnmarkedVowel = i unmarkedVowel = lowerCasePinyin.charAt(i) break = true } i -= 1 } } if ((defautlCharValue != unmarkedVowel) && (defautlIndexValue != idxOfUnmarkedVowel)) { val rowIdx = allUnmarkedVowelStr.indexOf(unmarkedVowel) val colIdx = tuneNumber - 1 val vowelLocation = rowIdx * 5 + colIdx val markedVowel = allMarkedVowelStr.charAt(vowelLocation) val sb = new StringBuffer sb.append(lowerCasePinyin.substring(0, idxOfUnmarkedVowel).replaceAll("v", "ü")) sb.append(markedVowel) sb.append(lowerCasePinyin.substring(idxOfUnmarkedVowel + 1, lowerCasePinyin.length - 1).replaceAll("v", "ü")) sb.toString } else { // error happens in the procedure of locating vowel lowerCasePinyin } } else { // input string has no any tune number // only replace v with ü (umlat) character lowerCasePinyin.replaceAll("v", "ü") } } else { // bad format lowerCasePinyin } } }
wandoulabs/wandou-math
wandou-util/src/main/scala/wandou/util/pinyin/PinyinFormatter.scala
Scala
apache-2.0
5,330
package com.twitter.finagle.mux.transport import com.twitter.concurrent.{AsyncQueue, Broker, Offer} import com.twitter.finagle.mux.transport.Message.{Rdiscarded, Tdiscarded} import com.twitter.finagle.stats.StatsReceiver import com.twitter.finagle.transport.{LegacyContext, Transport, TransportContext} import com.twitter.finagle.{Failure, Status} import com.twitter.io.{Buf, BufByteWriter, ByteReader} import com.twitter.util.{Future, Promise, Return, Throw, Time} import java.net.SocketAddress import java.security.cert.Certificate import java.util.concurrent.atomic.AtomicInteger import scala.util.control.NonFatal /** * Defines a [[com.twitter.finagle.transport.Transport]] which allows a * mux session to be shared between multiple tag streams. The transport splits * mux messages into fragments with a size defined by a parameter. Writes are * then interleaved to achieve equity and goodput over the entire stream. * Fragments are aggregated into complete mux messages when read. The fragment size * is negotiated when a mux session is initialized. * * @see [[com.twitter.finagle.mux.Handshake]] for usage details. * * @note Our current implementation does not offer any mechanism to resize * the window after a session is established. However, it is possible to * compose a flow control algorithm over this which can dynamically control * the size of `window`. */ private[finagle] object MuxFramer { /** * Defines mux framer keys and values exchanged as part of a * mux session header during initialization. */ object Header { val KeyBuf: Buf = Buf.Utf8("mux-framer") /** * Returns a header value with the given frame `size` encoded. */ def encodeFrameSize(size: Int): Buf = { require(size > 0) val bw = BufByteWriter.fixed(4) bw.writeIntBE(size) bw.owned() } /** * Extracts frame size from the `buf`. */ def decodeFrameSize(buf: Buf): Int = { val br = ByteReader(buf) try { val size = ByteReader(buf).readIntBE() require(size > 0) size } finally br.close() } } /** * Represents a tag stream while writing fragments. To avoid unnecessary allocations * `FragmentStream` carries some mutable state. In particular, `fragments` is a mutable * iterator and its contents should not be written concurrently. */ case class FragmentStream(tag: Int, fragments: Iterator[Buf], writePromise: Promise[Unit]) /** * Represents an interrupt for a stream. */ case class Interrupt(tag: Int, exc: Throwable) /** * Creates a new [[Transport]] which fragments writes into `writeWindowBytes` * sized payloads and defragments reads into mux [[Message]]s. * * @param writeWindowBytes messages larger than this value are fragmented on * write. If the value is not defined, writes are proxied to the underlying * transport. However, the transport is always prepared to read fragments. */ def apply( trans: Transport[Buf, Buf], writeWindowBytes: Option[Int], statsReceiver: StatsReceiver ): Transport[Message, Message] = new Transport[Message, Message] { type Context = TransportContext require( writeWindowBytes.isEmpty || writeWindowBytes.exists(_ > 0), s"writeWindowBytes must be positive: $writeWindowBytes" ) // stats for both read and write paths private[this] val pendingWriteStreams, pendingReadStreams = new AtomicInteger(0) private[this] val writeStreamBytes = statsReceiver.stat("write_stream_bytes") private[this] val readStreamBytes = statsReceiver.stat("read_stream_bytes") private[this] val gauges = Seq( statsReceiver.addGauge("pending_write_streams") { pendingWriteStreams.get }, statsReceiver.addGauge("pending_read_streams") { pendingReadStreams.get }, statsReceiver.addGauge("write_window_bytes") { writeWindowBytes match { case Some(bytes) => bytes.toFloat case None => -1F } } ) // Queues incoming streams which are dequeued and // flushed in `writeLoop`. private[this] val writeq = new Broker[FragmentStream] // Kicks off a new writeLoop with an incoming stream. private[this] val newWriteLoop: Offer[Unit] = writeq.recv.map { stream => writeLoop(Seq(stream)) } // Communicates interrupts for outstanding streams. private[this] val interrupts = new Broker[Interrupt] // This is lifted out of writeLoop to avoid a closure. Technically, we could // inline `Offer.const(writeLoop)` in the loop, but this makes it difficult to // feign concurrency over a single thread because of how the LocalScheduler is // implemented. private[this] val unitOffer = Offer.const(()) /** * Write fragments from `streams` recursively. Each iteration, a layer * from `streams` is written to the transport, effectively load balancing * across all streams to ensure a diverse and equitable session. New streams * join the `writeLoop` via the `writeq` broker and are interrupted via the * `interrupts` broker. * * @note The order in which we iterate over `streams` (and thus write to * the transport) isn't strictly guaranteed and can change in the presence * of interrupts, for example. */ private[this] def writeLoop(streams: Seq[FragmentStream]): Future[Unit] = if (streams.isEmpty) newWriteLoop.sync() else { val round = streams.foldLeft[Seq[Future[FragmentStream]]](Nil) { case (writes, s @ FragmentStream(_, fragments, writep)) if fragments.hasNext => val buf = fragments.next() writeStreamBytes.add(buf.length) val write = trans.write(buf).transform { case Return(_) => Future.value(s) case exc @ Throw(_) => // `streams` should only contain streams where the // the write promise is not complete because interrupted // streams are filtered out before entering `writeLoop`. writep.update(exc) Future.value(s) } write +: writes case (writes, FragmentStream(_, _, writep)) => // We have completed the stream. It's not possible for // `writep` to be complete since interrupted streams are // guaranteed to be filtered out of `streams`. writep.update(Return.Unit) writes } // Note, we don't need to `collectToTry` here because `round` always // completes succesfully. Failures to write per-stream are encoded in // the stream's `writePromise`. Future.collect(round).flatMap { nextStreams => // After each round, we choose between the following cases // (note, if more than one offer is available we choose the // first available w.r.t to the argument order): Offer .prioritize[Unit]( // 1. Remove a stream which has been interrupted. We interrupt first // to allow a backup in `writeq` to be drained on interrupts. Note that // an interrupt before an element reaches the writeq is possible and // handled in `write`. interrupts.recv.map { case Interrupt(tag, exc) => writeLoop(nextStreams.foldLeft[Seq[FragmentStream]](Nil) { case (ss, FragmentStream(`tag`, _, writep)) => writep.update(Throw(exc)) ss case (ss, s) => s +: ss }) }, // 2. Add an incoming stream. writeq.recv.map { s => writeLoop(s +: nextStreams) }, // 3. Dispatch another round of writes. unitOffer.map { _ => writeLoop(nextStreams) } ) .sync() } } // kick off the loop. newWriteLoop.sync() def write(msg: Message): Future[Unit] = if (writeWindowBytes.isEmpty) { trans.write(Message.encode(msg)) } else msg match { // The sender of a Tdispatch has indicated it is no longer // interested in the request, in which case, we need to make // sure the Tdispatch is removed from the writeLoop if it // exists. case m @ Message.Tdiscarded(tag, why) => val intr = interrupts ! Interrupt(tag, Failure(why)) intr.before { trans.write(Message.encode(m)) } case m: Message => val p = new Promise[Unit] p.setInterruptHandler { case NonFatal(exc) => // if an Rdispatch stream is interrupted, we send the // receiver an `Rdiscarded` so they can safely relinquish // any outstanding fragments and we remove the pending stream // from our `writeLoop`. Note, `Tdiscarded` is handled above. if (m.typ == Message.Types.Rdispatch) { val intr = interrupts ! Interrupt(m.tag, exc) // We make sure to interrupt before sending the Rdiscarded // so we can sequence the discard relative to fragments sitting // in the writeLoop. intr.before { trans.write(Message.encode(Message.Rdiscarded(m.tag))) } } } pendingWriteStreams.incrementAndGet() // There is no upper bound on writeq and elements can only // be removed via interrupts. However, the transport can // be bounded which is the underlying resource backing the // writeq. val nq = writeq ! FragmentStream(m.tag, Message.encodeFragments(m, writeWindowBytes.get), p) nq.before(p).ensure { pendingWriteStreams.decrementAndGet() } } /** * Stores fully aggregated mux messages that were read from `trans`. */ private[this] val readq = new AsyncQueue[Message] /** * The `readLoop` is responsible for demuxing and defragmenting tag streams. * If we read an `Rdiscarded` remove the corresponding stream from `tags`. */ private[this] def readLoop(tags: Map[Int, Buf]): Future[Unit] = trans.read().flatMap { buf => readStreamBytes.add(buf.length) val br = ByteReader(buf) val header = br.readIntBE() val typ = Message.Tags.extractType(header) val tag = Message.Tags.extractTag(header) // normalize tag by flipping the tag MSB val t = Message.Tags.setMsb(tag) val nextTags = if (Message.Types.isDiscard(typ)) { val msg = Message.decode(buf) // `Tdiscarded` has a tag of 0 since its a MarkerMessage and encodes // the discarded tag in the `which` field whereas `Rdiscarded` is // not a marker tag. (shrug) val discardTag = msg match { case Tdiscarded(which, _) => Message.Tags.setMsb(which) case Rdiscarded(_) => t case _ => unexpectedMessage("[Tdiscarded | Rdiscarded]", msg) } // We only want to intercept discards in this loop if we // are processing the stream. if (tags.contains(discardTag)) { // if we're the client, we need to propagate the Rdiscarded to the dispatcher // if we're the server, we need to send the client a synthesized Rdiscarded msg match { case Tdiscarded(which, _) => write(Rdiscarded(which)) case Rdiscarded(_) => readq.offer(msg) case _ => unexpectedMessage("[Tdiscarded | Rdiscarded]", msg) } tags - discardTag } else { readq.offer(msg) tags } } else if (Message.Tags.isFragment(tag)) { tags.updated(t, tags.get(t) match { case Some(buf0) => buf0.concat(br.readAll()) case None => br.readAll() }) } else { val resBuf = if (!tags.contains(t)) buf else { val head = buf.slice(0, 4) val rest = tags(t) val last = buf.slice(4, buf.length) Buf(Seq(head, rest, last)) } readq.offer(Message.decode(resBuf)) tags - t } pendingReadStreams.set(nextTags.size) readLoop(nextTags) } // failures are pushed to the readq which are propagated to // the layers above. readLoop(Map.empty).onFailure { exc => readq.fail(exc) } def read(): Future[Message] = readq.poll() def status: Status = trans.status val onClose: Future[Throwable] = trans.onClose def localAddress: SocketAddress = trans.localAddress def remoteAddress: SocketAddress = trans.remoteAddress def peerCertificate: Option[Certificate] = trans.peerCertificate def close(deadline: Time): Future[Unit] = trans.close(deadline) val context: TransportContext = new LegacyContext(this) } private[this] def unexpectedMessage(expected: String, found: Message): Nothing = { throw new IllegalStateException(s"Unexpected message. Expected $expected, observed $found.") } }
mkhq/finagle
finagle-mux/src/main/scala/com/twitter/finagle/mux/transport/MuxFramer.scala
Scala
apache-2.0
13,424
/* * # Trove * * This file is part of Trove - A FREE desktop budgeting application that * helps you track your finances, FREES you from complex budgeting, and * enables you to build your TROVE of savings! * * Copyright © 2016-2021 Eric John Fredericks. * * Trove is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Trove is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Trove. If not, see <http://www.gnu.org/licenses/>. */ import java.io.File import java.text.DecimalFormat import grizzled.slf4j.Logging package object trove extends Logging { object constants { val ApplicationName = "Trove" val ApplicationVersion = "0.1.0" val UserHomeDir = new File(System.getProperty("user.home")) val ApplicationHomeDir = new File(UserHomeDir, ".trove") val ProjectsHomeDir = new File(ApplicationHomeDir, "projects") ProjectsHomeDir.mkdirs() } val monetaryValueFormatter: DecimalFormat = new DecimalFormat() { setMinimumFractionDigits(2) setMaximumFractionDigits(2) } }
emanchgo/trove
src/main/scala/trove/package.scala
Scala
gpl-3.0
1,490
package skuber.apps import skuber.ResourceSpecification.{Names, Scope} import skuber._ import play.api.libs.functional.syntax._ import play.api.libs.json.{Format, JsPath, Json} import skuber.json.format._ // reuse some core skuber json formatters /** * Created by hollinwilkins on 4/5/17. * The api version of this StatefulSet type is v1beta2, which is for use with k8s 1.8+. * For earlier versions of k8s, use skuber.apps.v1beta1.StatefulSet */ case class StatefulSet(override val kind: String ="StatefulSet", override val apiVersion: String = "apps/v1beta2", // correct at k8s 1.8 metadata: ObjectMeta, spec: Option[StatefulSet.Spec] = None, status: Option[StatefulSet.Status] = None) extends ObjectResource { def withResourceVersion(version: String) = this.copy(metadata = metadata.copy(resourceVersion=version)) lazy val copySpec = this.spec.getOrElse(new StatefulSet.Spec(template = Pod.Template.Spec())) private val rollingUpdateStrategy = StatefulSet.UpdateStrategy(`type`=StatefulSet.UpdateStrategyType.RollingUpdate, None) private def rollingUpdateStrategy(partition: Int)= StatefulSet.UpdateStrategy(`type`=StatefulSet.UpdateStrategyType.RollingUpdate,Some(StatefulSet.RollingUpdateStrategy(partition))) def withReplicas(count: Int) = this.copy(spec=Some(copySpec.copy(replicas=Some(count)))) def withServiceName(serviceName: String) = this.copy(spec=Some(copySpec.copy(serviceName=Some(serviceName)))) def withTemplate(template: Pod.Template.Spec) = this.copy(spec=Some(copySpec.copy(template=template))) def withLabelSelector(sel: LabelSelector) = this.copy(spec=Some(copySpec.copy(selector=Some(sel)))) def withRollingUpdateStrategyPartition(partition:Int) = this.copy(spec=Some(copySpec.copy(updateStrategy = Some(rollingUpdateStrategy(partition))))) def withVolumeClaimTemplate(claim: PersistentVolumeClaim) = { val spec = copySpec.withVolumeClaimTemplate(claim) this.copy(spec=Some(spec)) } } object StatefulSet { val specification=NonCoreResourceSpecification ( apiGroup="apps", version="v1beta2", // version as at k8s v1.8 scope = Scope.Namespaced, names=Names( plural = "statefulsets", singular = "statefulset", kind = "StatefulSet", shortNames = List() ) ) implicit val stsDef = new ResourceDefinition[StatefulSet] { def spec=specification } implicit val stsListDef = new ResourceDefinition[StatefulSetList] { def spec=specification } implicit val scDef = new Scale.SubresourceSpec[StatefulSet] { override def apiVersion = "apps/v1beta2"} def apply(name: String): StatefulSet = StatefulSet(metadata=ObjectMeta(name=name)) object PodManagementPolicyType extends Enumeration { type PodManagementPolicyType = Value val OrderedReady,Parallel = Value } object UpdateStrategyType extends Enumeration { type UpdateStrategyType = Value val OnDelete,RollingUpdate = Value } case class UpdateStrategy(`type`: UpdateStrategyType.UpdateStrategyType, rollingUpdate: Option[RollingUpdateStrategy]=None) case class RollingUpdateStrategy(partition: Int) case class Spec(replicas: Option[Int] = Some(1), serviceName: Option[String] = None, selector: Option[LabelSelector] = None, template: Pod.Template.Spec, volumeClaimTemplates: List[PersistentVolumeClaim] = Nil, podManagmentPolicy: Option[PodManagementPolicyType.PodManagementPolicyType] = None, updateStrategy: Option[UpdateStrategy] = None, revisionHistoryLimit: Option[Int] = None) { def withVolumeClaimTemplate(claim: PersistentVolumeClaim) = copy(volumeClaimTemplates = claim :: volumeClaimTemplates) } case class Condition(`type`:String,status:String,lastTransitionTime:Option[Timestamp],reason:Option[String],message:Option[String]) case class Status(observedGeneration: Option[Int], replicas: Int, readyReplicas: Option[Int], updatedReplicas: Option[Int], currentRevision: Option[String], updateRevision: Option[String], collisionCount: Option[Int], conditions: Option[List[Condition]]) // json formatters implicit val statefulSetPodPcyMgmtFmt: Format[StatefulSet.PodManagementPolicyType.PodManagementPolicyType] = Format(enumReads(StatefulSet.PodManagementPolicyType, StatefulSet.PodManagementPolicyType.OrderedReady), enumWrites) implicit val statefulSetRollUp: Format[StatefulSet.RollingUpdateStrategy] = Json.format[StatefulSet.RollingUpdateStrategy] implicit val statefulSetUpdStrFmt: Format[StatefulSet.UpdateStrategy] = ( (JsPath \ "type").formatEnum(StatefulSet.UpdateStrategyType, Some(StatefulSet.UpdateStrategyType.RollingUpdate)) and (JsPath \ "rollingUpdate").formatNullable[StatefulSet.RollingUpdateStrategy] )(StatefulSet.UpdateStrategy.apply _,unlift(StatefulSet.UpdateStrategy.unapply)) implicit val statefulSetSpecFmt: Format[StatefulSet.Spec] = ( (JsPath \ "replicas").formatNullable[Int] and (JsPath \ "serviceName").formatNullable[String] and (JsPath \ "selector").formatNullableLabelSelector and (JsPath \ "template").format[Pod.Template.Spec] and (JsPath \ "volumeClaimTemplates").formatMaybeEmptyList[PersistentVolumeClaim] and (JsPath \ "podManagmentPolicy").formatNullableEnum(StatefulSet.PodManagementPolicyType) and (JsPath \ "updateStrategy").formatNullable[StatefulSet.UpdateStrategy] and (JsPath \ "revisionHistoryLimit").formatNullable[Int] )(StatefulSet.Spec.apply _, unlift(StatefulSet.Spec.unapply)) implicit val statefulSetCondFmt: Format[StatefulSet.Condition] = Json.format[StatefulSet.Condition] implicit val statefulSetStatusFmt: Format[StatefulSet.Status] = Json.format[StatefulSet.Status] implicit lazy val statefulSetFormat: Format[StatefulSet] = ( objFormat and (JsPath \ "spec").formatNullable[StatefulSet.Spec] and (JsPath \ "status").formatNullable[StatefulSet.Status] )(StatefulSet.apply _, unlift(StatefulSet.unapply)) implicit val statefulSetListFormat: Format[StatefulSetList] = ListResourceFormat[StatefulSet] }
doriordan/skuber
client/src/main/scala/skuber/apps/StatefulSet.scala
Scala
apache-2.0
6,325
object exercise2_3 { def curry[A, B, C](f: (A, B) => C): A => (B => C) = (a: A) => ((b: B) => f(a, b)) def add(a: Int, b: Int): Int = a + b def main(args: Array[String]): Unit = { val c1 = curry(add)(3) println(c1) val c2 = c1(4) println(c2) } }
joonjeong/shadow-boxing
fp-in-scala/exercise2/currying.scala
Scala
mit
277
package org.jetbrains.plugins.scala package compiler import java.io.{File, IOException} import javax.swing.event.HyperlinkEvent import com.intellij.notification.{Notification, NotificationListener, NotificationType, Notifications} import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.ApplicationComponent import com.intellij.openapi.project.Project import com.intellij.openapi.projectRoots.{JavaSdk, ProjectJdkTable} import com.intellij.openapi.roots.ProjectRootManager import com.intellij.openapi.util.io.FileUtil import com.intellij.util.PathUtil import com.intellij.util.net.NetUtils import gnu.trove.TByteArrayList import org.jetbrains.jps.incremental.BuilderService import org.jetbrains.plugins.scala.compiler.CompileServerLauncher._ import org.jetbrains.plugins.scala.extensions._ import scala.collection.JavaConverters._ import scala.util.control.Exception._ /** * @author Pavel Fatin */ class CompileServerLauncher extends ApplicationComponent { private var serverInstance: Option[ServerInstance] = None def initComponent() {} def disposeComponent() { if (running) stop() } def tryToStart(project: Project): Boolean = { if (!running) { val started = start(project) if (started) { try new RemoteServerRunner(project).send("addDisconnectListener", Seq.empty, null) catch { case e: Exception => } } started } else true } private def start(project: Project): Boolean = { val applicationSettings = ScalaCompileServerSettings.getInstance if (applicationSettings.COMPILE_SERVER_SDK == null) { // Try to find a suitable JDK val choice = Option(ProjectRootManager.getInstance(project).getProjectSdk).orElse { val all = ProjectJdkTable.getInstance.getSdksOfType(JavaSdk.getInstance()).asScala all.headOption } choice.foreach(sdk => applicationSettings.COMPILE_SERVER_SDK = sdk.getName) // val message = "JVM SDK is automatically selected: " + name + // "\n(can be changed in Application Settings / Scala)" // Notifications.Bus.notify(new Notification("scala", "Scala compile server", // message, NotificationType.INFORMATION)) } findJdkByName(applicationSettings.COMPILE_SERVER_SDK) .left.map(_ + "\nPlease either disable Scala compile server or configure a valid JVM SDK for it.") .right.flatMap(start(project, _)) match { case Left(error) => val title = "Cannot start Scala compile server" val content = s"<html><body>${error.replace("\n", "<br>")} <a href=''>Configure</a></body></html>" Notifications.Bus.notify(new Notification("scala", title, content, NotificationType.ERROR, ConfigureLinkListener)) false case Right(_) => ApplicationManager.getApplication invokeLater new Runnable { override def run() { CompileServerManager.instance(project).configureWidget() } } true } } private def start(project: Project, jdk: JDK): Either[String, Process] = { import org.jetbrains.plugins.scala.compiler.CompileServerLauncher.{compilerJars, jvmParameters} compilerJars.partition(_.exists) match { case (presentFiles, Seq()) => val classpath = (jdk.tools +: presentFiles).map(_.canonicalPath).mkString(File.pathSeparator) val settings = ScalaCompileServerSettings.getInstance val freePort = CompileServerLauncher.findFreePort if (settings.COMPILE_SERVER_PORT != freePort) { new RemoteServerStopper(settings.COMPILE_SERVER_PORT).sendStop() settings.COMPILE_SERVER_PORT = freePort ApplicationManager.getApplication.saveSettings() } val ngRunnerFqn = "org.jetbrains.plugins.scala.nailgun.NailgunRunner" val id = settings.COMPILE_SERVER_ID val shutdownDelay = settings.COMPILE_SERVER_SHUTDOWN_DELAY val shutdownDelayArg = if (settings.COMPILE_SERVER_SHUTDOWN_IDLE && shutdownDelay >= 0) { Seq(s"-Dshutdown.delay=$shutdownDelay") } else Nil val commands = jdk.executable.canonicalPath +: "-cp" +: classpath +: jvmParameters ++: shutdownDelayArg ++: ngRunnerFqn +: freePort.toString +: id.toString +: Nil val builder = new ProcessBuilder(commands.asJava) if (settings.USE_PROJECT_HOME_AS_WORKING_DIR) { projectHome(project).foreach(dir => builder.directory(dir)) } catching(classOf[IOException]).either(builder.start()) .left.map(_.getMessage) .right.map { process => val watcher = new ProcessWatcher(process, "scalaCompileServer") serverInstance = Some(ServerInstance(watcher, freePort, builder.directory())) watcher.startNotify() process } case (_, absentFiles) => val paths = absentFiles.map(_.getPath).mkString(", ") Left("Required file(s) not found: " + paths) } } // TODO stop server more gracefully def stop() { serverInstance.foreach { it => it.destroyProcess() } } def stop(project: Project) { stop() ApplicationManager.getApplication invokeLater new Runnable { override def run() { CompileServerManager.instance(project).configureWidget() } } } def running: Boolean = serverInstance.exists(_.running) def errors(): Seq[String] = serverInstance.map(_.errors()).getOrElse(Seq.empty) def port: Option[Int] = serverInstance.map(_.port) def getComponentName = getClass.getSimpleName } object CompileServerLauncher { def instance = ApplicationManager.getApplication.getComponent(classOf[CompileServerLauncher]) def compilerJars = { val jpsBuildersJar = new File(PathUtil.getJarPathForClass(classOf[BuilderService])) val utilJar = new File(PathUtil.getJarPathForClass(classOf[FileUtil])) val trove4jJar = new File(PathUtil.getJarPathForClass(classOf[TByteArrayList])) val pluginRoot = if (ApplicationManager.getApplication.isUnitTestMode) new File(System.getProperty("plugin.path"), "lib").getCanonicalPath else new File(PathUtil.getJarPathForClass(getClass)).getParent val jpsRoot = new File(pluginRoot, "jps") Seq( jpsBuildersJar, utilJar, trove4jJar, new File(pluginRoot, "scala-library.jar"), new File(pluginRoot, "scala-nailgun-runner.jar"), new File(pluginRoot, "compiler-settings.jar"), new File(jpsRoot, "nailgun.jar"), new File(jpsRoot, "sbt-interface.jar"), new File(jpsRoot, "incremental-compiler.jar"), new File(jpsRoot, "jline.jar"), new File(jpsRoot, "scala-jps-plugin.jar")) } def jvmParameters: Seq[String] = { val settings = ScalaCompileServerSettings.getInstance val xmx = settings.COMPILE_SERVER_MAXIMUM_HEAP_SIZE |> { size => if (size.isEmpty) Nil else List("-Xmx%sm".format(size)) } val (userMaxPermSize, otherParams) = settings.COMPILE_SERVER_JVM_PARAMETERS.split(" ").partition(_.contains("-XX:MaxPermSize")) val defaultMaxPermSize = Some("-XX:MaxPermSize=256m") val needMaxPermSize = settings.COMPILE_SERVER_SDK < "1.8" val maxPermSize = if (needMaxPermSize) userMaxPermSize.headOption.orElse(defaultMaxPermSize) else None xmx ++ otherParams ++ maxPermSize } def ensureServerRunning(project: Project) { val launcher = CompileServerLauncher.instance if (needRestart(project)) launcher.stop() if (!launcher.running) launcher.tryToStart(project) } def needRestart(project: Project): Boolean = { val launcher = CompileServerLauncher.instance ScalaCompileServerSettings.getInstance().USE_PROJECT_HOME_AS_WORKING_DIR && projectHome(project) != launcher.serverInstance.map(_.workingDir) } def ensureNotRunning(project: Project) { val launcher = CompileServerLauncher.instance if (launcher.running) launcher.stop(project) } def findFreePort: Int = { val port = ScalaCompileServerSettings.getInstance().COMPILE_SERVER_PORT if (NetUtils.canConnectToSocket("localhost", port)) NetUtils.findAvailableSocketPort() else port } private def projectHome(project: Project): Option[File] = { for { dir <- Option(project.getBaseDir) path <- Option(dir.getCanonicalPath) file = new File(path) if file.exists() } yield file } } private case class ServerInstance(watcher: ProcessWatcher, port: Int, workingDir: File) { private var stopped = false def running: Boolean = !stopped && watcher.running def errors(): Seq[String] = watcher.errors() def destroyProcess() { stopped = true watcher.destroyProcess() } } private object ConfigureLinkListener extends NotificationListener.Adapter { def hyperlinkActivated(notification: Notification, event: HyperlinkEvent) { CompileServerManager.showCompileServerSettingsDialog() notification.expire() } }
igrocki/intellij-scala
src/org/jetbrains/plugins/scala/compiler/CompileServerLauncher.scala
Scala
apache-2.0
9,010
package scala.pickling package tags private[pickling] final case class SimpleFastTypeTag[T]( typeConstructor: String, typeArgs: List[FastTypeTag[_]]) extends FastTypeTag[T] { override def isSimpleType = typeArgs.isEmpty override val key = if (typeArgs.isEmpty) typeConstructor else s"$typeConstructor[${typeArgs.map(_.key).mkString(",")}]" }
scala/pickling
core/src/main/scala/scala/pickling/tags/SimpleFastTypeTag.scala
Scala
bsd-3-clause
366
package com.example.db import slick.driver.H2Driver.api._ import scala.concurrent.Await import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.Duration // Demonstrates various ways of reading data object QueryActions extends App { // A simple dictionary table with keys and values class Dict(tag: Tag) extends Table[(Int, String)](tag, "INT_DICT") { def key = column[Int]("KEY", O.PrimaryKey) def value = column[String]("VALUE") def * = (key, value) } val dict = TableQuery[Dict] val db = Database.forConfig("h2mem1") try { // Define a pre-compiled parameterized query for reading all key/value // pairs up to a given key. val upTo = Compiled { k: Column[Int] => dict.filter(_.key <= k).sortBy(_.key) } // A second pre-compiled query which returns a Set[String] val upToSet = upTo.map(_.andThen(_.to[Set])) Await.result(db.run(DBIO.seq( // Create the dictionary table and insert some data dict.ddl.create, dict ++= Seq(1 -> "a", 2 -> "b", 3 -> "c", 4 -> "d", 5 -> "e"), upTo(3).result.map { r => println("Seq (Vector) of k/v pairs up to 3") println("- " + r) }, upToSet(3).result.map { r => println("Set of k/v pairs up to 3") println("- " + r) }, dict.map(_.key).to[Array].result.map { r => println("All keys in an unboxed Array[Int]") println("- " + r) }, upTo(3).result.head.map { r => println("Only get the first result, failing if there is none") println("- " + r) }, upTo(3).result.headOption.map { r => println("Get the first result as an Option, or None") println("- " + r) } )), Duration.Inf) // The Publisher captures a Database plus a DBIO action. // The action does not run until you consume the stream. val p = db.stream(upTo(3).result) println("Stream k/v pairs up to 3 via Reactive Streams") Await.result(p.foreach { v => println("- " + v) }, Duration.Inf) } finally db.close }
waxmittmann/finatra-sandbox
old/QueryActions.scala
Scala
apache-2.0
2,099
package delta.testing import java.util.concurrent.CountDownLatch import java.util.concurrent.TimeUnit.{ MILLISECONDS, SECONDS } import scala.collection.concurrent.TrieMap import scala.collection.immutable.Seq import scala.concurrent.{ Future, Promise } import scala.concurrent.Await import scala.concurrent.duration.{ Duration, DurationInt } import scala.util.{ Failure, Success, Try } import org.junit._ import org.junit.Assert._ import scuff._, concurrent._ import delta.write.{ Repository, UnknownIdException } import scuff.reflect.Surgeon import delta._ import delta.write._ import delta.util._ import scala.{ SerialVersionUID => version } import delta.process.ConcurrentMapStore import scuff.json.JsVal trait AggrEventHandler { type Return def dispatch(evt: AggrEvent): Return = evt.dispatch(this) def on(evt: AggrCreated): Return def on(evt: NewNumberWasAdded): Return def on(evt: StatusChanged): Return } case class AddNewNumber(n: Int) case class ChangeStatus(newStatus: String) case class AggrState(status: String, numbers: Set[Int]) object AggrStateProjector extends Projector[AggrState, AggrEvent] { def init(evt: AggrEvent) = new EvtHandler().dispatch(evt) def next(s: AggrState, evt: AggrEvent) = new EvtHandler(s).dispatch(evt) } private class EvtHandler(state: AggrState = null) extends AggrEventHandler { type Return = AggrState def on(evt: AggrCreated) = { require(state == null) new AggrState(evt.status, Set.empty) } def on(evt: NewNumberWasAdded) = { val numbers = state.numbers + evt.n state.copy(numbers = numbers) } def on(evt: StatusChanged) = { state.copy(status = evt.newStatus) } } sealed abstract class AggrEvent extends DoubleDispatch { type Callback = AggrEventHandler } @version(1) case class NewNumberWasAdded(n: Int) extends AggrEvent { def dispatch(cb: AggrEventHandler): cb.Return = cb.on(this) } @version(1) case class AggrCreated(status: String) extends AggrEvent { def dispatch(cb: AggrEventHandler): cb.Return = cb.on(this) } @version(1) case class StatusChanged(newStatus: String) extends AggrEvent { def dispatch(cb: AggrEventHandler): cb.Return = cb.on(this) } abstract class AbstractEventStoreRepositoryTest { implicit def ec = RandomDelayExecutionContext def ticker = SysClockTicker class TimestampCodec(name: String) extends Codec[Timestamp, Map[String, String]] { def encode(ts: Timestamp): Map[String, String] = Map(name -> ts.toString) def decode(map: Map[String, String]): Timestamp = Timestamp.parseISO(map(name)).get } @volatile var es: EventStore[String, AggrEvent] = _ @volatile var repo: Repository[String, Aggr] with MutableEntity = _ private def doAsync(f: Promise[Any] => Unit): Unit = { val something = Promise[Any]() f(something) Await.result(something.future, 222.seconds) match { case th: Throwable => throw th case Failure(th) => throw th case _ => } } implicit def metadata: Metadata = Metadata( "timestamp" -> new Timestamp().toString, "random" -> math.random().toString) @Test def loadUnknownId() = doAsync { done => repo.load("Foo").onComplete { case Success(_) => done.complete(Try(fail("Should have failed as unknown"))) case Failure(e: UnknownIdException) => assertEquals("Foo", e.id) done.success(()) case Failure(other) => done.failure(other) } } @Test def failedInvariants() = doAsync { done => val id = "Foo" val newFoo = TheOneAggr.create() newFoo apply AddNewNumber(-1) repo.insert(id, newFoo).onComplete { case Failure(_) => repo.exists(id).onComplete { case Failure(t) => done.failure(t) case Success(None) => done.success("Fail on negative number") case Success(Some(rev)) => fail(s"Should not exist: $rev") } case Success(_) => fail("Should not accept negative numbers") } } @Test def saveNewThenUpdate() = doAsync { done => val id = "Foo" val newFoo = TheOneAggr.create() repo.insert(id, newFoo).onComplete { case Failure(t) => done.failure(t) case Success(_) => repo.update("Foo", Some(0)) { case (foo, rev) => assertEquals(0, rev) assertEquals("New", foo.aggr.status) }.onComplete { case Failure(t) => done.failure(t) case Success((_, rev)) => assertEquals(0, rev) repo.update(id, Some(0)) { case (foo, rev) if rev == 0 => assertEquals(0, rev) assertEquals("New", foo.aggr.status) foo(AddNewNumber(44)) }.onComplete { case Failure(t) => done.failure(t) case Success((_, rev)) => assertEquals(1, rev) repo.update("Foo", Some(0)) { case (foo, rev) => assertEquals(1, rev) assertTrue(foo.concurrentUpdates contains NewNumberWasAdded(44)) assertEquals("New", foo.aggr.status) foo(AddNewNumber(44)) }.onComplete { case Failure(t) => done.failure(t) case Success((_, rev)) => assertEquals(1, rev) repo.update("Foo", Some(1)) { case (foo, rev) => assertEquals(1, rev) assertTrue(foo.concurrentUpdates.isEmpty) assertEquals("New", foo.aggr.status) foo(ChangeStatus("NotNew")) }.onComplete { case Failure(t) => done.failure(t) case Success((_, rev)) => assertEquals(2, rev) repo.load("Foo").onComplete { case Failure(t) => done.failure(t) case Success((foo, rev)) => assertEquals(2, rev) assertEquals("NotNew", foo.aggr.status) done.success(()) } } } } } } } @Test def update() = doAsync { done => val id = "Foo" val newFoo = TheOneAggr.create() newFoo(AddNewNumber(42)) newFoo.appliedEvents match { case Seq(AggrCreated(_), NewNumberWasAdded(n)) => assertEquals(42, n) case _ => fail("Event sequence incorrect: " + newFoo.appliedEvents) } val update1 = repo.insert(id, newFoo).flatMap { case _ => repo.update(id, Some(0)) { case (foo, rev) => assertEquals(0, rev) foo(AddNewNumber(42)) assertEquals(0, foo.appliedEvents.size) foo(AddNewNumber(99)) assertEquals(1, foo.appliedEvents.size) } } update1.onComplete { case Failure(t) => done.failure(t) case Success((_, revision)) => assertEquals(1, revision) repo.load(id).onComplete { case Failure(t) => done.failure(t) case Success((foo, rev)) => assertEquals(1, rev) assertTrue(foo.numbers.contains(42)) assertTrue(foo.numbers.contains(99)) assertEquals(2, foo.numbers.size) done.success(()) } } } @Test def `idempotent insert`() = doAsync { done => val id = "Baz" val baz = TheOneAggr.create() repo.insert(id, baz).onComplete { case Failure(t) => done.failure(t) case Success(idAgain) => assertEquals(id, idAgain) repo.insert(id, baz).onComplete { case Failure(t) => done.failure(t) case Success(idAgain) => assertEquals(id, idAgain) done.success(()) } } } @Test def `concurrent update`() = doAsync { done => val executor = java.util.concurrent.Executors.newScheduledThreadPool(16) val id = "Foo" val foo = TheOneAggr.create() val insFut = repo.insert(id, foo) val updateRevisions = new TrieMap[Int, Future[Int]] val range = 0 to 75 val latch = new CountDownLatch(range.size) insFut.onComplete { case f: Failure[_] => done.complete(f) case Success(_) => for (i <- range) { val runThis = new Runnable { def run: Unit = { val fut = repo.update(id, Some(0)) { case (foo, _) => foo(AddNewNumber(i)) Future successful foo } updateRevisions += i -> fut.map(_._2) latch.countDown() } } executor.schedule(runThis, 500, MILLISECONDS) } if (!latch.await(30, SECONDS)) { done.complete(Try(fail("Timed out waiting for concurrent updates to finish"))) } else { assertEquals(range.size, updateRevisions.size) val revisions = updateRevisions.map { case (_, f) => Await.result(f, Duration.Inf) }.toList.sorted done.complete(Try(assertEquals((1 to range.size).toList, revisions))) } } } @Test def `noop update`() = doAsync { done => val id = "Foo" val foo = TheOneAggr.create() repo.insert(id, foo).onComplete { case Failure(t) => done.failure(t) case Success(_) => repo.update(id, Some(0)) { case (foo, rev) => assertEquals(0, rev) Future successful foo }.onComplete { case Failure(t) => done.failure(t) case Success((_, newRevision)) => assertEquals(0, newRevision) done.success(()) } } } } class Aggr(val state: TheOneAggr.State, val concurrentUpdates: Seq[AggrEvent]) { def appliedEvents: List[AggrEvent] = { val surgeon = new Surgeon(state) surgeon.getAll[List[AggrEvent]].head._2.reverse } private[delta] def aggr = state.get def apply(cmd: AddNewNumber): Unit = { if (!aggr.numbers.contains(cmd.n)) { state(NewNumberWasAdded(cmd.n)) } } def apply(cmd: ChangeStatus): Unit = { if (aggr.status != cmd.newStatus) { state(StatusChanged(cmd.newStatus)) } } def numbers = aggr.numbers } object TheOneAggr extends Entity("", AggrStateProjector) { type Id = String type Type = Aggr def init(state: State, concurrentUpdates: List[Transaction]): Aggr = new Aggr(state, concurrentUpdates.flatMap(_.events.collectAs[AggrEvent])) def state(entity: Aggr) = entity.state override def validate(state: AggrState): Unit = { require(state.numbers.filter(_ < 0).isEmpty, "Cannot contain negative numbers") } def create(): Aggr = { val mutator = TheOneAggr.newState() mutator(new AggrCreated("New")) new Aggr(mutator, Nil) } } class TestEventStoreRepositoryNoSnapshots extends AbstractEventStoreRepositoryTest { object EvtFmt extends EventFormat[AggrEvent, String] { def getVersion(cls: EventClass) = NoVersion def getName(cls: EventClass): String = cls.getSimpleName def encode(evt: AggrEvent): String = evt match { case AggrCreated(status) => s""" "$status" """ case NewNumberWasAdded(num) => num.toString case StatusChanged(newStatus) => s""" "$newStatus" """ } def decode(encoded: Encoded): AggrEvent = { val json = encoded.data encoded.name match { case "AggrCreated" => AggrCreated(status = (JsVal parse json).asStr) case "NewNumberWasAdded" => NewNumberWasAdded(n = json.toInt) case "StatusChanged" => StatusChanged(newStatus = (JsVal parse json).asStr) } } } @Before def setup(): Unit = { es = new TransientEventStore[String, AggrEvent, String]( RandomDelayExecutionContext, EvtFmt)(_ => ticker) with MessageTransportPublishing[String, AggrEvent] { def toTopic(ch: Channel) = Topic(s"transactions/$ch") def toTopic(tx: Transaction): Topic = toTopic(tx.channel) val txTransport = new LocalTransport[Transaction](toTopic, ec) val txChannels = Set(TheOneAggr.channel) val txCodec = Codec.noop[Transaction] } repo = new EntityRepository(TheOneAggr)(es) } } class TestEventStoreRepositoryWithSnapshots extends AbstractEventStoreRepositoryTest { import ReflectiveDecoder._ object EvtFmt extends ReflectiveDecoder[AggrEvent, String](MatchOnMethodName) with AggrEventHandler with EventFormat[AggrEvent, String] { type Return = String def getVersion(cls: EventClass) = NoVersion def getName(cls: EventClass): String = { val lastDot = cls.getName.lastIndexOf('.') val nextDot = cls.getName.lastIndexOf('.', lastDot - 1) cls.getName.substring(nextDot + 1) } def encode(evt: AggrEvent): String = evt.dispatch(this) def on(evt: AggrCreated): Return = s""" "${evt.status}" """ def `testing.AggrCreated`(encoded: Encoded): AggrCreated = new AggrCreated(status = JsVal.parse(encoded.data).asStr) def on(evt: NewNumberWasAdded): Return = evt.n.toString def `testing.NewNumberWasAdded`(encoded: Encoded): NewNumberWasAdded = new NewNumberWasAdded(n = encoded.data.toInt) def on(evt: StatusChanged): Return = s""" "${evt.newStatus}" """ def `testing.StatusChanged`(encoded: Encoded): StatusChanged = new StatusChanged(newStatus = JsVal.parse(encoded.data).asStr) } case class Metrics(id: String, duration: Long, timestamp: Long) var metrics: List[Metrics] = _ @Before def setup(): Unit = { metrics = Nil es = new TransientEventStore[String, AggrEvent, String]( RandomDelayExecutionContext, EvtFmt)(_ => ticker) with MessageTransportPublishing[String, AggrEvent] { def toTopic(ch: Channel) = Topic(s"transactions/$ch") def toTopic(tx: Transaction): Topic = toTopic(tx.channel) val txTransport = new LocalTransport[Transaction](toTopic, RandomDelayExecutionContext) val txChannels = Set(TheOneAggr.channel) val txCodec = Codec.noop[Transaction] } type State = ConcurrentMapStore.State[AggrState] val snapshotMap = new TrieMap[String, State] val snapshotStore = ConcurrentMapStore[String, AggrState, AggrState](snapshotMap, "aggr", None)(_ => Future.none) repo = new EntityRepository(TheOneAggr)(es, snapshotStore) } }
nilskp/delta
src/test/scala/delta/testing/TestEventStoreRepository.scala
Scala
mit
14,346
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.scala package dsl import builder.RouteBuilder import test.{Adult, Toddler, Envelope} class RecipientListRouteTest extends ScalaTestSupport { def testRecipientList = { "mock:a" expect {_.count = 1} "direct:a" ! ("send this message to mock:a", "send this message to mock:z") "mock:a" assert() } def testRecipientListWithPatternMatching = { "mock:playgarden" expect {_.count = 1} "direct:b" ! (new Adult("Gert"), new Toddler("Ewan")) "mock:playgarden" assert() } def testRecipientListWithJXPath = { "mock:c" expect {_.count = 2} "mock:d" expect {_.count = 1} "direct:c" ! (new Envelope("mock:d"), new Envelope("mock:y")) "mock:c" assert() "mock:d" assert() } val builder = new RouteBuilder { //START SNIPPET: simple "direct:a" recipients(_.in[String].substring(21)) //END SNIPPET: simple //START SNIPPET: pattern "direct:b" recipients(_.getIn().getBody() match { case Toddler(_) => "mock:playgarden" case _ => "mock:work" }) //END SNIPPET: pattern //START SNIPPET: block "direct:c" ==> { to("mock:c") recipients(jxpath("./in/body/destination")) } //END SNIPPET: block } }
chicagozer/rheosoft
components/camel-scala/src/test/scala/org/apache/camel/scala/dsl/RecipientListRouteTest.scala
Scala
apache-2.0
2,066
package org.mitre.mandolin.mx import ml.dmlc.mxnet._ import com.typesafe.config.{Config, ConfigValue} import net.ceedubs.ficus.Ficus._ /** * Provides functionality for building up MXNet network/symbol objects from JSON-syntax * specification. * Structure, example: * * {"type":"variable", "name": "input"} // data refers to the name of the input for this symbol * {"type":"convBN", "name": "conv1", "data":"input", * "spec":{"numFilter": 64, "kernel":[1,1], "stride":[1,1], "pad":[0,0]}} * * * {"type":"pooling", "name": "pool1", "data": "conv1", * "spec":{"kernel":[3,3], "stride":[2,2], "pool_type": "max"} } * * Could also handle composite definitions here by allowing for user-specified "types" * */ class SymbolBuilder { def convFactory(data: Symbol, numFilter: Int, kernel: String, stride: String = "(1,1)", pad: String = "(0,0)", name: scala.Option[String] = None) = { val conv = Symbol.Convolution()()(Map("data" -> data, "num_filter" -> numFilter, "kernel" -> kernel, "stride" -> stride, "pad" -> pad)) val act = Symbol.Activation()()(Map("data" -> conv, "act_type" -> "relu")) act } def convFactoryBN(data: Symbol, numFilter: Int, kernel: String, stride: String = "(1,1)", pad: String = "(0,0)", name: scala.Option[String] = None) = { val conv = Symbol.Convolution()()(Map("data" -> data, "num_filter" -> numFilter, "kernel" -> kernel, "stride" -> stride, "pad" -> pad)) val bn = Symbol.BatchNorm()()(Map("data" -> conv)) val act = Symbol.Activation()()(Map("data" -> bn, "act_type" -> "relu")) act } def inceptionFactory(data: Symbol, num1x1: Int, num3x3red: Int, num3x3: Int, numd5x5red: Int, numd5x5: Int, pool: String, proj: Int, name: scala.Option[String] = None) = { val c1x1 = convFactory(data, num1x1, "(1,1)") val c3x3r = convFactory(data, num3x3red, "(1,1)") val c3x3 = convFactory(c3x3r, num3x3, "(3,3)", pad = "(1,1)") val cd5x5r = convFactory(data, numd5x5red, "(1,1)") val cd5x5 = convFactory(cd5x5r, numd5x5, "(5,5)", pad = "(2,2)") val pooling = Symbol.Pooling()()(Map("data" -> data, "kernel" -> "(3,3)", "stride" -> "(1,1)", "pad" -> "(1,1)", "pool_type" -> pool)) val cproj = convFactory(pooling, proj, "(1,1)") val concat = Symbol.Concat()(c1x1,c3x3,cd5x5,cproj)() concat } def inceptionFactoryA(data: Symbol, num1x1: Int, num3x3red: Int, num3x3: Int, numd3x3red: Int, numd3x3: Int, pool: String, proj: Int, name: scala.Option[String] = None) = { val c1x1 = convFactoryBN(data, num1x1, "(1,1)") val c3x3r = convFactoryBN(data, num3x3red, "(1,1)") val c3x3 = convFactoryBN(c3x3r, num3x3, "(3,3)", pad = "(1,1)") val cd3x3r = convFactoryBN(data, num3x3red, "(1,1)") val cd3x3_1 = convFactoryBN(cd3x3r, numd3x3, "(3,3)", pad = "(1,1)") val cd3x3_2 = convFactoryBN(cd3x3_1, numd3x3, "(3,3)", pad = "(1,1)") val pooling = Symbol.Pooling()()(Map("data" -> data, "kernel" -> "(3,3)", "stride" -> "(1,1)", "pad" -> "(1,1)", "pool_type" -> pool)) val cproj = convFactoryBN(pooling, proj, "(1,1)") val concat = Symbol.Concat()(c1x1, c3x3, cd3x3_2, cproj)() concat } def inceptionFactoryB(data: Symbol, num3x3red: Int, num3x3: Int, numd3x3red: Int, numd3x3: Int, name: scala.Option[String] = None) = { val c3x3r = convFactoryBN(data, num3x3red, "(1,1)") val c3x3 = convFactoryBN(c3x3r, num3x3, "(3,3)", pad = "(1,1)", stride = "(2,2)") val cd3x3r = convFactoryBN(data, numd3x3red, "(1,1)") val cd3x3_1 = convFactoryBN(cd3x3r, numd3x3, "(3,3)", pad = "(1,1)") val cd3x3_2 = convFactoryBN(cd3x3_1, numd3x3, "(3,3)", pad = "(1,1)", stride = "(2,2)") val pooling = Symbol.Pooling()()(Map("data" -> data, "kernel" -> "(3,3)", "stride" -> "(2,2)", "pad" -> "(1,1)", "pool_type" -> "max")) val concat = Symbol.Concat()(c3x3, cd3x3_2, pooling)() concat } private def getAsTuple2String(spec: Config, key: String, default: String = "(1,1)") : String = { try { val li = spec.as[List[String]](key) "("+li(0)+","+li(1)+")" } catch {case _: Throwable => default} } /** * This constructs a "Pooling" symbol from the config specification */ def mapFromPoolSpec(spec: Config, in: Symbol) : Map[String, AnyRef] = { val kernel = getAsTuple2String(spec, "kernel") val stride = getAsTuple2String(spec, "stride") val pad = getAsTuple2String(spec, "pad", "(0,0)") val poolType = spec.as[String]("pool_type") Map("data" -> in, "kernel" -> kernel, "stride" -> stride, "pad" -> pad, "pool_type" -> poolType) } def convSymbolFromSpec(spec: Config, in: Symbol) : Symbol = { val nf = spec.as[Int]("num_filter") val kernel = getAsTuple2String(spec, "kernel") val stride = getAsTuple2String(spec, "stride") val pad = getAsTuple2String(spec, "pad", "(0,0)") convFactory(in, nf, kernel, stride, pad) } def convBNSymbolFromSpec(spec: Config, in: Symbol) : Symbol = { val nf = spec.as[Int]("num_filter") val kernel = getAsTuple2String(spec, "kernel") val stride = getAsTuple2String(spec, "stride") val pad = getAsTuple2String(spec, "pad", "(0,0)") convFactoryBN(in, nf, kernel, stride, pad) } def inceptionSymbolFromSpec(spec: Config, in: Symbol) : Symbol = { val num1x1 = spec.as[Int]("n1x1") val num3x3r = spec.as[Int]("n3x3r") val num3x3 = spec.as[Int]("n3x3") val num5x5r = spec.as[Int]("n5x5r") val num5x5 = spec.as[Int]("n5x5") val pt = spec.as[String]("pool_type") val proj = spec.as[Int]("projection") inceptionFactory(in, num1x1, num3x3r, num3x3, num5x5r, num5x5, pt, proj) } def inceptionASymbolFromSpec(spec: Config, in: Symbol) : Symbol = { val num1x1 = spec.as[Int]("n1x1") val num3x3r = spec.as[Int]("n3x3r") val num3x3 = spec.as[Int]("n3x3") val num5x5r = spec.as[Int]("n5x5r") val num5x5 = spec.as[Int]("n5x5") val pt = spec.as[String]("pool_type") val proj = spec.as[Int]("projection") inceptionFactoryA(in, num1x1, num3x3r, num3x3, num5x5r, num5x5, pt, proj) } def inceptionBSymbolFromSpec(spec: Config, in: Symbol) : Symbol = { val num3x3r = spec.as[Int]("n3x3r") val num3x3 = spec.as[Int]("n3x3") val numd3x3r = spec.as[Int]("nd3x3r") val numd3x3 = spec.as[Int]("nd3x3") inceptionFactoryB(in, num3x3r, num3x3, numd3x3r, numd3x3) } def fullyConnectedFromSpec(spec: Config, in: Symbol) : Symbol = { val dim = spec.as[Int]("num_hidden") Symbol.FullyConnected()()(Map("data"-> in, "num_hidden" -> dim)) } def flattenFromSpec(spec: Config, in: Symbol) : Symbol = { Symbol.Flatten()()(Map("data" -> in)) } def activationFromSpec(spec: Config, in: Symbol) : Symbol = { Symbol.Activation()()(Map("data" -> in, "act_type" -> spec.as[String]("act_type"))) } def dropOutFromSpec(spec: Config, in: Symbol) : Symbol = { Symbol.Dropout()()(Map("data" -> in, "p" -> spec.as[String]("p"))) } def softMaxFromSpec(spec: Config, in: Symbol) : Symbol = { Symbol.SoftmaxOutput(name = "softmax")()(Map("data" -> in)) } def batchNormFromSpec(spec: Config, in: Symbol) : Symbol = { val eps = try spec.as[String]("eps") catch {case _: Throwable => "0.001"} val momentum = try spec.as[String]("momentum") catch {case _: Throwable => "0.9"} val fixGamma = try spec.as[String]("fix_gamma") catch {case _: Throwable => "True"} Symbol.BatchNorm()()(Map("data" -> in, "eps" -> eps, "momentum" -> momentum, "fix_gamma" -> fixGamma)) } def resNetV2CoreFromSpec(spec: Config, in: Symbol) : Symbol = { val units = spec.as[List[Int]]("units") val filterList = spec.as[List[Int]]("filter_list") val bottleNeck = spec.as[Boolean]("bottle_neck") WideResNetSymbols.resnetV2(in, units, filterList, bottleNeck, 512) } def mxConvolutionFromSpec(spec: Config, in: Symbol) : Symbol = { val numFilters = spec.as[Int]("num_filter") val kernel = getAsTuple2String(spec, "kernel") val stride = getAsTuple2String(spec, "stride") val pad = getAsTuple2String(spec, "pad", "(0,0)") val noBias = try spec.as[Boolean]("no_bias") catch {case _:Throwable => false} val workSpace = try spec.as[Int]("workspace") catch {case _:Throwable => 512} val noBiasStr = if (noBias) "True" else "False" Symbol.Convolution()()(Map("data" -> in, "kernel" -> kernel, "stride" -> stride, "pad" -> pad, "no_bias" -> noBiasStr, "num_filter" -> numFilters, "workspace" -> workSpace.toString)) } def getSymbol(sp: Config, inSymbol: Symbol, spType: String) : Symbol = { spType match { case "mx_conv" => mxConvolutionFromSpec(sp, inSymbol) case "pooling" => Symbol.Pooling()()(mapFromPoolSpec(sp, inSymbol)) case "conv" => convSymbolFromSpec(sp,inSymbol) case "convBN" => convBNSymbolFromSpec(sp, inSymbol) case "inception" => inceptionSymbolFromSpec(sp, inSymbol) case "inceptionA" => inceptionASymbolFromSpec(sp, inSymbol) case "inceptionB" => inceptionBSymbolFromSpec(sp, inSymbol) case "fc" => fullyConnectedFromSpec(sp, inSymbol) case "flatten" => flattenFromSpec(sp, inSymbol) case "activation" => activationFromSpec(sp, inSymbol) case "dropout" => dropOutFromSpec(sp, inSymbol) case "softmax" => softMaxFromSpec(sp, inSymbol) case "batch_norm" => batchNormFromSpec(sp, inSymbol) case "resnetV2core" => resNetV2CoreFromSpec(sp, inSymbol) case a => throw new RuntimeException("Invalid network symbol type: " + a) } } /** * Simple 'interpreter' that maps configuration specification into MxNet symbol DAG */ def symbolFromSpec(spec: Config, inName: String = "data", outName: String = "softmax") : Symbol = { import scala.collection.JavaConversions._ val data = Symbol.Variable("data") var lastName = "" try { val specList = spec.as[List[Config]]("mandolin.mx.specification") val finalMapping = specList.foldLeft(Map[String,Symbol]("input" -> data)){case (curMap, sp) => val spType = sp.as[String]("type") val spName = sp.as[String]("name") val inData = sp.as[String]("data") // input data val inSymbol = curMap(inData) val newSymbol = getSymbol(sp, inSymbol, spType) lastName = spName curMap + (spName -> newSymbol) } finalMapping(lastName) } catch {case e:Throwable => // if the specification isn't a list, then assume it's in the "NEW" format val specObj = spec.as[Config]("mandolin.mx.specification") val layerNames = specObj.entrySet().toVector.map{x => x.getKey.split('.')(0)} val nextMap = layerNames.toSet.foldLeft(Map():Map[String,String]){case (ac,v) => val inLayer = specObj.getConfig(v).getString("data") ac + (inLayer -> v) } var building = true var prevName = "input" val buf = new collection.mutable.ArrayBuffer[(String,String)] while (building) { val current = nextMap.get(prevName) current match {case Some(c) => buf append ((prevName, c)); prevName = c case None => building = false} } val subSeqPairs = buf.toVector val lastName = subSeqPairs.last._2 val finalMapping = subSeqPairs.foldLeft(Map[String,Symbol]("input" -> data)){case (curMap, sPair) => val (prev,cur) = sPair val sp = specObj.getConfig(cur) val spType = sp.getString("type") val inSymbol = curMap(prev) val newSymbol = getSymbol(sp, inSymbol, spType) curMap + (cur -> newSymbol) } finalMapping(lastName) } } }
project-mandolin/mandolin
mandolin-mx/src/main/scala/org/mitre/mandolin/mx/SymbolBuilder.scala
Scala
apache-2.0
11,988
package reactivemongo.core import java.nio.ByteOrder import akka.util.{ByteStringBuilder, ByteString} import reactivemongo.bson.buffer.{WritableBuffer, ReadableBuffer} /** * Created by sh1ng on 29/04/15. */ class AkkaReadableBuffer(buffer: ByteString) extends ReadableBuffer { val byteBuffer = buffer.toByteBuffer.order(ByteOrder.LITTLE_ENDIAN) /** Returns the current read index of this buffer. */ override def index: Int = byteBuffer.position() /** Reads a `Long` from this buffer. */ override def readLong(): Long = byteBuffer.getLong() override def index_=(i: Int): Unit = byteBuffer.position(i) /** Reads a `Byte` from this buffer. */ override def readByte(): Byte = byteBuffer.get() /** Returns the number of readable remaining bytes of this buffer. */ override def readable(): Int = byteBuffer.remaining() override def size: Int = buffer.size /** Sets the read index to `index + n` (in other words, skips `n` bytes). */ override def discard(n: Int): Unit = byteBuffer.position(byteBuffer.position() + n) /** Reads an `Int` from this buffer. */ override def readInt(): Int = byteBuffer.getInt() /** Fills the given array with the bytes read from this buffer. */ override def readBytes(bytes: Array[Byte]): Unit = byteBuffer.get(bytes) /** * Returns a new instance of ReadableBuffer which starts at the current index and contains `n` bytes. * * This method does not update the read index of the original buffer. */ override def slice(n: Int): ReadableBuffer = new AkkaReadableBuffer(buffer.slice(index, index + n)) /** Reads a `Double` from this buffer. */ override def readDouble(): Double = byteBuffer.getDouble() }
sh1ng/ReactiveMongo
driver/src/main/scala/core/akka.scala
Scala
apache-2.0
1,695
package org.jetbrains.plugins.scala package lang package psi package impl package expr import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElementImpl import com.intellij.lang.ASTNode import org.jetbrains.plugins.scala.lang.psi.api.expr._ import com.intellij.psi.PsiElementVisitor import api.ScalaElementVisitor /** * @author Alexander Podkhalyuzin */ class ScGuardImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScGuard { override def accept(visitor: PsiElementVisitor): Unit = { visitor match { case visitor: ScalaElementVisitor => super.accept(visitor) case _ => super.accept(visitor) } } override def toString: String = "Guard" def expr = findChild(classOf[ScExpression]) }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScGuardImpl.scala
Scala
apache-2.0
722
package com.arcusys.valamis.persistence.impl.scorm.model case class ActivityStateNodeModel(id: Option[Long], parentId: Option[Long], treeId: Option[Long], availableChildrenIds: Option[String])
igor-borisov/valamis
valamis-slick-persistence/src/main/scala/com/arcusys/valamis/persistence/impl/scorm/model/ActivityStateNodeModel.scala
Scala
gpl-3.0
257
class C(val x: Int, val next: C|Null) def f = { var xs: C|Null = C(1, C(2, null)) while (xs != null) { val xsx: Int = xs.x val xscpy: C = xs xs = xscpy // Since xscpy is non-nullable, after the assign, xs is still non-nullable val xscpyx: Int = xscpy.x xs = xs.next // xs.next is nullable, after the assign, xs becomes nullable val xsnx: Int = xs.x // error } }
dotty-staging/dotty
tests/explicit-nulls/neg/flow-after-assign.scala
Scala
apache-2.0
392
/** * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.pso.kafka2avro import com.google.cloud.pso.kafka2avro.demo.MyDemoType import com.google.common.io.Files import com.spotify.scio.ScioContext import com.spotify.scio.testing.PipelineSpec import com.spotify.scio.values.SCollection import java.io.File import org.apache.beam.sdk.transforms.windowing.IntervalWindow /** Test the Kafka2Avro pipeline. * * The pipeline has three steps: extract, transformation and load. * Here we don't test the extract step, that would require having an input * similar to Kafka, probably by mocking the Kafka server. * * The other two steps are tested as follows: * - The transform step should deserialize some objects * - The load step should produce local Avro files * * We also test the windowing function used in the pipeline. */ class Kafka2AvroSpec extends PipelineSpec { // Let's generate 7 objects for this test private val NumDemoObjects: Int = 7 "The transform step" should "deserialize some objs" in { val testObjs: List[MyDemoType] = Object2Kafka.createDemoObjects(NumDemoObjects) val strings: List[String] = testObjs.map(utils.Kafka2AvroUtils.object2String) runWithContext { implicit sc: ScioContext => val coll: SCollection[String] = sc.parallelize(strings) val transformed: SCollection[(IntervalWindow, Iterable[MyDemoType])] = Kafka2Avro.transform(coll) transformed should haveSize(1) // Just one window transformed.flatMap(_._2) should haveSize(strings.length) transformed.flatMap(_._2) should containInAnyOrder(testObjs) } } "The load step" should "write some objs to disk" in { // Create temp location for output testing val tmpDir: File = Files.createTempDir tmpDir.exists shouldEqual true // Check tmp dir is empty tmpDir.list shouldEqual Nil val testObjs: List[MyDemoType] = Object2Kafka.createDemoObjects(NumDemoObjects) runWithContext { implicit sc: ScioContext => val coll: SCollection[MyDemoType] = sc.parallelize(testObjs) val windowed: SCollection[(IntervalWindow, Iterable[MyDemoType])] = Kafka2Avro.windowIn(coll) Kafka2Avro.load(windowed, tmpDir.getPath) } // These checks must be done once the pipeline has finished // Check if there is at least 1 avro file val avroList: Array[String] = tmpDir.list.filter(_.endsWith(".avro")) avroList.length should be > 0 // Remove all files and check val allFilesRemoved: Boolean = tmpDir.listFiles.map(_.delete).forall(d => d) allFilesRemoved shouldEqual true // Delete tmp dir tmpDir.delete shouldEqual true } "The windowIn method" should "group objects together" in { val testObjs: List[MyDemoType] = Object2Kafka.createDemoObjects(NumDemoObjects) runWithContext { implicit sc: ScioContext => val coll = sc.parallelize(testObjs) val windowed = Kafka2Avro.windowIn(coll) windowed should haveSize(1) // Just one window windowed.flatMap(_._2) should haveSize(testObjs.length) windowed.flatMap(_._2) should containInAnyOrder(testObjs) } } }
CloudVLab/professional-services
examples/dataflow-scala-kafka2avro/src/test/scala/com/google/cloud/pso/kafka2avro/Kafka2AvroSpec.scala
Scala
apache-2.0
3,724
package com.twitter.algebird package benchmark import org.openjdk.jmh.annotations._ object BloomFilterQueryBenchmark { @State(Scope.Benchmark) class BloomFilterState { @Param(Array("100", "1000", "10000")) var nbrOfElements: Int = 0 @Param(Array("0.001", "0.01")) var falsePositiveRate: Double = 0 var bf: BF[String] = _ @Setup(Level.Trial) def setup(): Unit = { val randomStrings = BloomFilterCreateBenchmark.createRandomString(nbrOfElements, 10) bf = BloomFilter[String](nbrOfElements, falsePositiveRate) .create(randomStrings: _*) } } } class BloomFilterQueryBenchmark { import BloomFilterQueryBenchmark._ @Benchmark def queryBloomFilter(bloomFilterState: BloomFilterState): ApproximateBoolean = bloomFilterState.bf.contains("1") }
nevillelyh/algebird
algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterQueryBenchmark.scala
Scala
apache-2.0
820
/* * Copyright 2017 Nicolas Rinaudo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kantan.mongodb package options import com.mongodb.client.model.{ValidationAction, ValidationLevel, ValidationOptions} import java.io.Serializable final case class ValidationOpts(action: ValidationOpts.Action, level: ValidationOpts.Level, validator: Option[BsonDocument]) { def action(a: ValidationOpts.Action): ValidationOpts = copy(action = a) def level(l: ValidationOpts.Level): ValidationOpts = copy(level = l) def validator[V: BsonDocumentEncoder](v: V): ValidationOpts = copy(validator = Some(BsonDocumentEncoder[V].encode(v))) private[mongodb] lazy val legacy: ValidationOptions = { val opts = new ValidationOptions().validationAction(action.legacy).validationLevel(level.legacy) validator.foreach(opts.validator) opts } } object ValidationOpts { sealed abstract class Action(private[mongodb] val legacy: ValidationAction) extends Product with Serializable object Action { case object Error extends Action(ValidationAction.ERROR) case object Warn extends Action(ValidationAction.WARN) val default: Action = Error } sealed abstract class Level(private[mongodb] val legacy: ValidationLevel) extends Product with Serializable object Level { case object Moderate extends Level(ValidationLevel.MODERATE) case object Off extends Level(ValidationLevel.OFF) case object Strict extends Level(ValidationLevel.STRICT) val defaut: Level = Strict } val default: ValidationOpts = ValidationOpts(Action.default, Level.defaut, None) }
nrinaudo/kantan.mongodb
core/src/main/scala/kantan/mongodb/options/ValidationOpts.scala
Scala
apache-2.0
2,193
package object CDS { // import nasa.nccs.cds2.kernels.kernelManager // import nasa.nccs.cds2.modules.CDS._ val name = "CDS" val version = "1.0-SNAPSHOT" val organization = "nasa.nccs" val author = "Thomas Maxwell" val contact = "[email protected]" // kernelManager.addKernel( average ) }
nasa-nccs-cds/CDS2
src/main/scala/nasa/nccs/cds2/modules/CDS/package.scala
Scala
gpl-2.0
310
package io.relayr.amqp /** * The message and any routing information that is included when it is received from a queue. * @param exchange * @param routingKey * @param message */ case class Envelope(exchange: String, routingKey: String, message: Message)
relayr/rabbitmq-scala-client
src/main/scala/io/relayr/amqp/Envelope.scala
Scala
mit
260
package fpinscala.laziness import Stream._ trait Stream[+A] { def foldRight[B](z: => B)(f: (A, => B) => B): B = // The arrow `=>` in front of the argument type `B` means that the function `f` takes its second argument by name and may choose not to evaluate it. this match { case Cons(h,t) => f(h(), t().foldRight(z)(f)) // If `f` doesn't evaluate its second argument, the recursion never occurs. case _ => z } def exists(p: A => Boolean): Boolean = foldRight(false)((a, b) => p(a) || b) // Here `b` is the unevaluated recursive step that folds the tail of the stream. If `p(a)` returns `true`, `b` will never be evaluated and the computation terminates early. @annotation.tailrec final def find(f: A => Boolean): Option[A] = this match { case Empty => None case Cons(h, t) => if (f(h())) Some(h()) else t().find(f) } def toList:List[A] = this match { case Cons(h, t) => h()::(t().toList) case _ => Nil } def take(n: Int): Stream[A] = this match { case Cons(h, t) => if (n > 0) Cons(h, () => t().take(n-1)) else empty case _ => empty } def drop(n: Int): Stream[A] = this match { case Cons(h, t) => if (n > 0) t().drop(n-1) else Cons(h,t) case _ => this } def takeWhile(p: A => Boolean): Stream[A] = this match { case Cons(h, t) => if (p(h())) Cons(h, () => t().takeWhile(p)) else empty case _ => empty } def takeWhileUsingFoldRight(p: A => Boolean): Stream[A] = foldRight(empty[A])((a,b) => if (p(a)) cons(a, b) else empty[A]) def forAll(p: A => Boolean): Boolean = foldRight(true)((a,b) => p(a) && b) def headOption: Option[A] = foldRight(None:Option[A])((a,b) => Some(a)) // 5.7 map, filter, append, flatmap using foldRight. Part of the exercise is // writing your own function signatures. def map[B](f: A => B):Stream[B] = foldRight(empty[B])((a,b) => cons(f(a), b)) def filter(f: A => Boolean):Stream[A] = foldRight(empty[A])((a,b) => if (f(a)) cons(a, b) else b) // def append(s2: => Stream[A]):Stream[A] = foldRight(s2)((a,b]) => cons(a,b)) def startsWith[B](s: Stream[B]): Boolean = sys.error("todo") } case object Empty extends Stream[Nothing] case class Cons[+A](h: () => A, t: () => Stream[A]) extends Stream[A] object Stream { def cons[A](hd: => A, tl: => Stream[A]): Stream[A] = { lazy val head = hd lazy val tail = tl Cons(() => head, () => tail) } def empty[A]: Stream[A] = Empty def apply[A](as: A*): Stream[A] = if (as.isEmpty) empty else cons(as.head, apply(as.tail: _*)) val ones: Stream[Int] = Stream.cons(1, ones) def constant[A](a: A): Stream[A] = Stream.cons(a, constant(a)) def from(n: Int): Stream[Int] = Stream.cons(n, from(n + 1)) def fibsn(a:Int, b:Int):Stream[Int] = Stream.cons(a+b, fibsn(b, a+b)) val fibs:Stream[Int] = Stream.cons(0, Stream.cons(1, fibsn(0,1))) def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] = f(z) match { case Some((value,state)) => Stream.cons(value, unfold(state)(f)) case None => empty } val onesUsingUnfold:Stream[Int] = unfold(1)(_ => Some(1, 1)) def constantUsingUnfold[A](a:A): Stream[A] = unfold(a)(s => Some(s, s)) def fromUsingUnfold(n: Int): Stream[Int] = unfold(n)(s => Some(s, s+1)) def fibsnUsingUnfold(a:Int, b:Int):Stream[Int] = unfold((a,b))(xy => Some(xy._1 + xy._2, (xy._2, xy._1+xy._2))) val fibsUsingUnfold:Stream[Int] = Stream.cons(0, Stream.cons(1, fibsnUsingUnfold(0, 1))) }
danielgrigg/fpinscala
exercises/src/main/scala/fpinscala/laziness/Stream.scala
Scala
mit
3,514
package com.arcusys.valamis.lesson.storage import com.arcusys.valamis.lesson.model.PackageScopeRule import com.arcusys.valamis.model.ScopeType trait PackageScopeRuleStorage { def get(packageID: Long, scope: ScopeType.Value, scopeID: Option[String]): Option[PackageScopeRule] def getAll(packageID: Long, scope: ScopeType.Value, scopeID: Option[String]): Seq[PackageScopeRule] def getAllVisible(scope: ScopeType.Value, scopeID: Option[String]): Seq[PackageScopeRule] def create(packageID: Long, scope: ScopeType.Value, scopeID: Option[String], visibility: Boolean, isDefault: Boolean): PackageScopeRule def update(packageID: Long, scope: ScopeType.Value, scopeID: Option[String], visibility: Boolean, isDefault: Boolean): PackageScopeRule //def updateIsDefaultProperty(packageID: Int, scope: ScopeType.Value, scopeID: Option[String], isDefault: Boolean) def getDefaultPackageID(scope: ScopeType.Value, scopeID: Option[String]): Option[Int] def delete(packageID: Long) def renew() }
ViLPy/Valamis
valamis-core/src/main/scala/com/arcusys/valamis/lesson/storage/PackageScopeRuleStorage.scala
Scala
lgpl-3.0
1,000
package org.bitcoins.core.crypto import org.bitcoins.core.number.UInt32 import org.bitcoins.testkitcore.gen.CryptoGenerators import org.bitcoins.testkitcore.util.BitcoinSUnitTest import org.scalacheck.{Gen, Prop} import scala.util.Success class ExtKeySpec extends BitcoinSUnitTest { private val nonHardened: Gen[UInt32] = Gen.choose(0L, ((1L << 31) - 1)).map(UInt32(_)) private val hardened: Gen[UInt32] = Gen.choose(1L << 31, (1L << 32) - 1).map(UInt32(_)) it must "have serialization symmetry" in { Prop.forAll(CryptoGenerators.extKey) { extKey => ExtKey.fromStringT(extKey.toString) == Success(extKey) && ExtKey(extKey.bytes) == extKey } } it must "have derivation identity 1" in { Prop.forAllNoShrink(CryptoGenerators.extPrivateKey, nonHardened, nonHardened, nonHardened) { (m, a, b, c) => //https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#the-key-tree //N(m/a/b/c) = N(m/a/b)/c = N(m/a)/b/c = N(m)/a/b/c = M/a/b/c val path1 = m .deriveChildPrivKey(a) .deriveChildPrivKey(b) .deriveChildPrivKey(c) .extPublicKey val path2 = m .deriveChildPrivKey(a) .deriveChildPrivKey(b) .extPublicKey .deriveChildPubKey(c) .get val path3 = m .deriveChildPrivKey(a) .extPublicKey .deriveChildPubKey(b) .get .deriveChildPubKey(c) .get val path4 = m.extPublicKey .deriveChildPubKey(a) .get .deriveChildPubKey(b) .get .deriveChildPubKey(c) .get path1 == path2 && path2 == path3 && path3 == path4 } } it must "derivation identity 2" in { Prop.forAllNoShrink(CryptoGenerators.extPrivateKey, hardened, nonHardened, nonHardened) { (m, aH, b, c) => //https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#the-key-tree //N(m/aH/b/c) = N(m/aH/b)/c = N(m/aH)/b/c val path1 = m .deriveChildPrivKey(aH) .deriveChildPrivKey(b) .deriveChildPrivKey(c) .extPublicKey val path2 = m .deriveChildPrivKey(aH) .deriveChildPrivKey(b) .extPublicKey .deriveChildPubKey(c) .get val path3 = m .deriveChildPrivKey(aH) .extPublicKey .deriveChildPubKey(b) .get .deriveChildPubKey(c) .get path1 == path2 && path2 == path3 } } }
bitcoin-s/bitcoin-s
core-test/src/test/scala/org/bitcoins/core/crypto/ExtKeySpec.scala
Scala
mit
2,587
package quizleague.web.service.chat import java.time.LocalDateTime import quizleague.web.service.EntityService import quizleague.web.model._ import quizleague.domain.{ChatMessage => Dom, Key => DomKey} import quizleague.domain.Ref import quizleague.web.names.ComponentNames import scala.scalajs.js.JSConverters._ import quizleague.web.service._ import quizleague.web.service.venue._ import quizleague.web.service.text._ import quizleague.web.service.user._ import quizleague.web.names._ import io.circe.parser._ import io.circe.syntax._ import quizleague.util.json.codecs.DomainCodecs._ import quizleague.web.util.Logging import rxscalajs.Observable import scalajs.js trait ChatMessageGetService extends GetService[ChatMessage] with ChatMessageNames { override type U = Dom val userService: SiteUserGetService override protected def mapOutSparse(message: Dom) = new ChatMessage( message.id, userService.refObs(message.user.id), message.message, message.date.toString) protected def dec(json:js.Any) = decodeJson[U](json) } trait ChatMessagePutService extends PutService[ChatMessage] with ChatMessageGetService { val chatService:ChatGetService override protected def mapIn(message: ChatMessage) = Dom( message.id, userService.ref(message.user), message.message, LocalDateTime.parse(message.date) ) override protected def make() = Dom(newId(), null,"", LocalDateTime.now()) override def enc(item: Dom) = item.asJson def saveMessage(text:String, siteUserID:String, chatKey:Key) = { val msg = make(DomKey(chatKey.key)) save(msg.copy(user=userService.ref(siteUserID), message=text).withKey(msg.key.get)) } }
gumdrop/quizleague-maintain
js/src/main/scala/quizleague/web/service/chat/ChatMessageService.scala
Scala
mit
1,667
package views.html import play.templates._ import play.templates.TemplateMagic._ import play.api.templates._ import play.api.templates.PlayMagic._ import models._ import controllers._ import play.api.i18n._ import play.api.mvc._ import play.api.data._ import views.html._ /* * This template takes a single argument, a String containing a * message to display. */ object index extends BaseScalaTemplate[play.api.templates.HtmlFormat.Appendable,Format[play.api.templates.HtmlFormat.Appendable]](play.api.templates.HtmlFormat) with play.api.templates.Template1[String,play.api.templates.HtmlFormat.Appendable] { /* * This template takes a single argument, a String containing a * message to display. */ def apply/*5.2*/(message: String):play.api.templates.HtmlFormat.Appendable = { _display_ { Seq[Any](format.raw/*5.19*/(""" """),format.raw/*11.4*/(""" """),_display_(Seq[Any](/*12.2*/main("Welcome to Play")/*12.25*/ {_display_(Seq[Any](format.raw/*12.27*/(""" """),format.raw/*17.8*/(""" """),_display_(Seq[Any](/*18.6*/play20/*18.12*/.welcome(message, style = "Scala"))),format.raw/*18.46*/(""" """)))})),format.raw/*20.2*/(""" """))} } def render(message:String): play.api.templates.HtmlFormat.Appendable = apply(message) def f:((String) => play.api.templates.HtmlFormat.Appendable) = (message) => apply(message) def ref: this.type = this } /* -- GENERATED -- DATE: Sat Apr 09 16:50:36 PDT 2016 SOURCE: D:/git/trask/glowroot/agent-parent/plugins/play-plugin/tmp-router-files/app/views/index.scala.html HASH: 5ab829a9da6246199d7a5d7e2eb4d198465a6a1f MATRIX: 734->95|845->112|874->312|911->314|943->337|983->339|1016->468|1057->474|1072->480|1128->514|1162->517 LINES: 25->5|28->5|30->11|31->12|31->12|31->12|33->17|34->18|34->18|34->18|36->20 -- GENERATED -- */
trask/glowroot
agent/plugins/play-plugin/src/test/app-2.2.x-scala/scala/views/html/index.template.scala
Scala
apache-2.0
2,017
package com.peterpotts.common.sample class SampleIdentity[A](sampleA: Sample[A]) extends Sample[A] { def next(): A = sampleA.next() }
peterpotts/mobius
src/main/scala/com/peterpotts/common/sample/SampleIdentity.scala
Scala
mit
137
/* * MOIS: Reaction Network * Copyright (C) 2014 University of Edinburgh School of Informatics * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package uk.ac.ed.inf.mois.reaction import scala.language.implicitConversions import scala.collection.mutable import scala.reflect.ClassTag import spire.algebra.{Rig, Ring} import uk.ac.ed.inf.mois.math.Multiset import uk.ac.ed.inf.mois.{Process, Var, VarMeta} /** A base trait for reaction networks. These reaction network use * species as variables and let you define reactions using them. * This is the base class for DeterministicReactionNetwork, * StochasticReactionNetwork and FBA. */ trait ReactionNetwork[T] extends Process { override def stringPrefix = "ReactionNetwork" // All species in a ReactionNetwork must be copy numbers // (Int or Long) or concentrations (Float or Double). // This doesn't allow for hybrid models. type Base = T // for counting number of ocurrences of species type Species = Var[T] type Reaction <: BaseReaction val species = mutable.ArrayBuffer.empty[Species] // -- Reactions -- abstract class BaseReaction { val lhs: Multiset[Species] val rhs: Multiset[Species] def stringPrefix = "Reaction" override def toString = stringPrefix + "(" + lhs + ", " + rhs + ")" override def equals(that: Any) = that match { case that: BaseReaction => (this.lhs == that.lhs) && (this.rhs == that.rhs) case _ => false } @inline final def apply(s: Species) = rhs(s) - lhs(s) } // -- Factories -- def Species(ident: String)(implicit rig: Rig[T], ct: ClassTag[T]): Species = { val s = addVar[T](ident) species += s s } abstract class ReactionFactory { def apply(lhs: Multiset[Species], rhs: Multiset[Species]): Reaction } val Reaction: ReactionFactory // -- Conversions -- // -- Species -- final class SpeciesSyntax(val v: Species)(implicit ring: Ring[T]) { // -- Multiset creation methods -- def + (p: Multiset[Species]) = p + v def + (s: Species) = Multiset(v) + s def * (m: Int) = Multiset(v -> m) // -- Reaction creation methods -- def --> (p: Multiset[Species]) = Reaction(Multiset(v), p) def --> (s: Species) = Reaction(Multiset(v), Multiset(s)) def --> () = Reaction(Multiset(v), Multiset()) // -- Arithmetic methods def += (x: T) = v.update(ring.plus(v.value, x)) def -= (x: T) = v.update(ring.minus(v.value, x)) } final class MultisetMaker(n: Int) { def * (s: Species) = Multiset(s -> n) def * (p: Multiset[Species]) = p * n def apply(s: Species) = Multiset(s -> n) def apply(p: Multiset[Species]) = p * n } final class ReactionMaker(u: Unit) { def --> (m: Multiset[Species]) = Reaction(Multiset.empty, m) def --> (s: Species) = Reaction(Multiset.empty, Multiset(s)) } final class MultisetReactionMaker(l: Multiset[Species]) { def --> (m: Multiset[Species]) = Reaction(l, m) def --> (s: Species) = Reaction(l, Multiset(s)) def --> () = Reaction(l, Multiset()) } final class ReactionSyntax(r: Reaction) { // -- Append species to the right-hand side -- def + (m: Multiset[Species]) = Reaction(r.lhs, r.rhs + m) def + (s: Species) = Reaction(r.lhs, r.rhs + s) def * (n: Int) = Reaction(r.lhs, r.rhs * n) } implicit def multisetToBase(m: Multiset[Species])( implicit num: Numeric[Base]): Base = { (for ((s, n) <- m) yield num.times(num.fromInt(n), s.value)).sum(num) } implicit def SpeciesSyntax(s: Species)(implicit r: Ring[T]) = new SpeciesSyntax(s)(r) implicit def MultisetMaker(n: Int) = new MultisetMaker(n) implicit def ReactionMaker(u: Unit) = new ReactionMaker(u) implicit def MultisetReactionMaker(l: Multiset[Species]) = new MultisetReactionMaker(l) implicit def ReactionSyntax(r: Reaction) = new ReactionSyntax(r) }
edinburgh-rbm/mois
src/main/scala/uk/ac/ed/inf/mois/reaction/ReactionNetwork.scala
Scala
gpl-3.0
4,485
/* * Copyright (C) 2016-2020 Lightbend Inc. <https://www.lightbend.com> */ package akka.persistence.cassandra.snapshot import scala.compat.java8.FutureConverters._ import scala.concurrent.ExecutionContext import scala.concurrent.Future import akka.Done import akka.annotation.InternalApi import akka.event.LoggingAdapter import akka.persistence.cassandra.indent import com.datastax.oss.driver.api.core.CqlSession import akka.persistence.cassandra.FutureDone /** * INTERNAL API */ @InternalApi private[akka] class CassandraSnapshotStatements(snapshotSettings: SnapshotSettings) { def createKeyspace = s""" | CREATE KEYSPACE IF NOT EXISTS ${snapshotSettings.keyspace} | WITH REPLICATION = { 'class' : ${snapshotSettings.replicationStrategy} } """.stripMargin.trim // snapshot_data is the serialized snapshot payload // snapshot is for backwards compatibility and not used for new rows def createTable = s""" |CREATE TABLE IF NOT EXISTS ${tableName} ( | persistence_id text, | sequence_nr bigint, | timestamp bigint, | ser_id int, | ser_manifest text, | snapshot_data blob, | snapshot blob, | meta_ser_id int, | meta_ser_manifest text, | meta blob, | PRIMARY KEY (persistence_id, sequence_nr)) | WITH CLUSTERING ORDER BY (sequence_nr DESC) AND gc_grace_seconds =${snapshotSettings.gcGraceSeconds} | AND compaction = ${indent(snapshotSettings.tableCompactionStrategy.asCQL, " ")} """.stripMargin.trim def writeSnapshot(withMeta: Boolean): String = s""" INSERT INTO ${tableName} (persistence_id, sequence_nr, timestamp, ser_manifest, ser_id, snapshot_data ${if (withMeta) ", meta_ser_id, meta_ser_manifest, meta" else ""}) VALUES (?, ?, ?, ?, ?, ? ${if (withMeta) ", ?, ?, ?" else ""}) """ def deleteSnapshot = s""" DELETE FROM ${tableName} WHERE persistence_id = ? AND sequence_nr = ? """ def deleteAllSnapshotForPersistenceIdAndSequenceNrBetween = s""" DELETE FROM ${tableName} WHERE persistence_id = ? AND sequence_nr >= ? AND sequence_nr <= ? """ def deleteSnapshotsBefore = s""" DELETE FROM ${tableName} WHERE persistence_id = ? AND sequence_nr < ? """ def selectSnapshot = s""" SELECT * FROM ${tableName} WHERE persistence_id = ? AND sequence_nr = ? """ def selectSnapshotMetadata(limit: Option[Int] = None) = s""" SELECT persistence_id, sequence_nr, timestamp FROM ${tableName} WHERE persistence_id = ? AND sequence_nr <= ? AND sequence_nr >= ? ${limit.map(l => s"LIMIT ${l}").getOrElse("")} """ def selectLatestSnapshotMeta = s"""SELECT persistence_id, sequence_nr, timestamp FROM ${tableName} WHERE persistence_id = ? ORDER BY sequence_nr DESC LIMIT ? """ def selectAllSnapshotMeta = s"""SELECT sequence_nr, timestamp FROM ${tableName} WHERE persistence_id = ? ORDER BY sequence_nr DESC """ private def tableName = s"${snapshotSettings.keyspace}.${snapshotSettings.table}" /** * Execute creation of keyspace and tables if that is enabled in config. * Avoid calling this from several threads at the same time to * reduce the risk of (annoying) "Column family ID mismatch" exception. * * Exceptions will be logged but will not fail the returned Future. */ def executeCreateKeyspaceAndTables(session: CqlSession, log: LoggingAdapter)( implicit ec: ExecutionContext): Future[Done] = { def keyspace: Future[Done] = if (snapshotSettings.keyspaceAutoCreate) session.executeAsync(createKeyspace).toScala.map(_ => Done) else FutureDone if (snapshotSettings.tablesAutoCreate) { // reason for setSchemaMetadataEnabled is that it speed up tests by multiple factors session.setSchemaMetadataEnabled(false) val result = for { _ <- keyspace _ <- session.executeAsync(createTable).toScala } yield { session.setSchemaMetadataEnabled(null) Done } result.recoverWith { case e => log.warning("Failed to create snapshot keyspace and tables: {}", e) session.setSchemaMetadataEnabled(null) FutureDone } } else { keyspace.recoverWith { case e => log.warning("Failed to create snapshot keyspace: {}", e) FutureDone } } } }
chbatey/akka-persistence-cassandra
core/src/main/scala/akka/persistence/cassandra/snapshot/CassandraSnapshotStatements.scala
Scala
apache-2.0
4,470
/* * Copyright 2014–2020 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.qsu package minimizers import slamdata.Predef._ import cats.{~>, Eq, Foldable, Monad, Monoid, MonoidK} import cats.data.Ior import cats.implicits._ import matryoshka.{Hole => _, _} import matryoshka.data.free._ import matryoshka.implicits._ import matryoshka.patterns.interpret import quasar.RenderTreeT //import quasar.RenderTree.ops._ import quasar.common.effect.NameGenerator import quasar.contrib.iota._ import quasar.contrib.scalaz.free._ import quasar.contrib.std.errorImpossible import quasar.fp.ski.κ2 import quasar.qscript._ import quasar.qsu.{QScriptUniform => QSU}, QSU.{Retain, Rotation} import scalaz.{Forall, Foldable1, IList, NonEmptyList} // these instances don't exist in cats (for good reason), but we depend on them here import scalaz.std.set.{setMonoid => _, _} import scalaz.std.map._ import scala.collection.immutable.{Map => SMap} // An order of magnitude faster to compile than import shims.{plusEmptyToCats => _, _} import shims.{plusEmptyToCats => _, equalToCats, eqToScalaz, foldableToCats, foldableToScalaz, monadToCats, monadToScalaz, showToCats, showToScalaz, traverseToCats} sealed abstract class MergeCartoix[T[_[_]]: BirecursiveT: EqualT: RenderTreeT: ShowT] extends Minimizer[T] with MraPhase[T] { import MinimizeAutoJoins.MinStateM import QSUGraph.Extractors._ import RecFreeS.RecOps import MapFuncsCore.{IntLit, ProjectIndex, ProjectKey, StrLit} type Cartouche0 = quasar.qsu.minimizers.Cartouche[T, Index, Hole, Retain] type Cartouche1 = quasar.qsu.minimizers.Cartouche[T, Nothing, FreeMap, Output] type CStage0 = quasar.qsu.minimizers.CStage[T, Index, Hole, Retain] type CStage1 = quasar.qsu.minimizers.CStage[T, Nothing, FreeMap, Output] type PrjPath = NonEmptyList[Index] type ∀[P[_]] = Forall[P] implicit def PEqual: Eq[P] private val FM = Foldable[FreeMapA] private implicit val MM: Monoid[SMap[Symbol, Cartouche0]] = MonoidK[SMap[Symbol, ?]].algebra[Cartouche0] private val SourceKey = "source" private val func = construction.Func[T] def couldApplyTo(candidates: List[QSUGraph]): Boolean = candidates exists { case Transpose(_, _, _) => true; case _ => false } @SuppressWarnings(Array("org.wartremover.warts.Recursion")) def extract[ G[_]: scalaz.Monad: NameGenerator: MonadPlannerErr: RevIdxM: MinStateM[T, P, ?[_]]]( candidate: QSUGraph) : Option[(QSUGraph, (QSUGraph, FreeMap) => G[QSUGraph])] = { candidate match { // handle a shift candidate case Transpose(parent, retain, rotations) => extract[G](parent) map { _ map { _ andThen { _ flatMap { parent2 => updateGraph[G](parent2)(QSU.Transpose(_, retain, rotations)) } } } } // handle a transformed shift candidate case MappableRegion.MaximalUnary(Transpose(parent, retain, rotations), fm) => extract[G](parent) map { _ map { _ andThen { p2 => for { parent2 <- p2 tpose <- updateGraph[G](parent2)(QSU.Transpose(_, retain, rotations)) res <- updateGraph[G](tpose)(QSU.Map(_, fm.asRec)) } yield res } } } // handle a FreeMap case MappableRegion.MaximalUnary(parent, fm) => Some((parent, (parent2, fm2) => { val f = MapFuncCore.normalized(fm >> fm2) if (f === func.Hole) parent2.pure[G] else updateGraph[G](parent2)(QSU.Map(_, f.asRec)) })) // handle the rest case qgraph => Some((qgraph, (src, fm) => { if (fm === func.Hole) src.pure[G] else updateGraph[G](src)(QSU.Map(_, fm.asRec)) })) } } def apply[ G[_]: scalaz.Monad: NameGenerator: MonadPlannerErr: RevIdxM: MinStateM[T, P, ?[_]]]( original: QSUGraph, singleSource: QSUGraph, candidates0: List[QSUGraph], fm0: FreeMapA[Int]) : G[Option[(QSUGraph, QSUGraph)]] = { val (candidates, fm) = Some(candidates0) .filter(_.exists(_.root === singleSource.root)) .flatMap(readSourceProjections(singleSource, _, fm0)) .getOrElse((candidates0.map(Right(_)), fm0)) val exprProjections: CStage0 => List[CStage0] = { case CStage.Expr(f) => quotientProjection(f).toList case other => List(other) } val maybeJoin: Option[CStage.Join[T, Index, Hole, Retain]] = candidates .traverse(_.fold( p => Some(p.map(CStage.Project[T, Index](_)).toList), readCandidate(singleSource, _).map(_.reverse))) .map { cartoix => val cs = cartoix.zipWithIndex map { case (c, i) => Symbol(s"cart$i") -> Cartouche.fromFoldable(c.flatMap(exprProjections)) } CStage.Join(cs.toMap, fm.map(i => Symbol(s"cart$i"))) } // maybeJoin.fold(println("NO JOIN"))(j => println(s"MAYBE_JOIN\\n${(j: CStage0).render.show}")) maybeJoin traverse { j => simplifyJoin[G](j) // .map { smpl => println(s"SIMPLIFIED_JOIN\\n${(smpl: CStage[T, Index, Hole, Output]).render.show}"); smpl } .map(finalizeStages(_)) // .map { fnl => println(s"FINALIZED_JOIN\\n${fnl.asInstanceOf[CStage[T, Unit, FreeMap, Output]].render.show}"); fnl } .flatMap(j => reifyJoin[G](j, singleSource, StructLens.init(j.cartoix.size > 1), false)) .map(x => (x, x)) } } private final class FuncOf(src: Symbol) { def unapply(qgraph: QSUGraph): Option[FreeMap] = MappableRegion.unaryOf(src, qgraph).filterNot(_ === func.Hole) } // returns the list of stages in reverse order, Nil represents the source // ignores projection @SuppressWarnings(Array("org.wartremover.warts.Recursion")) private def readCandidate(singleSource: QSUGraph, qgraph: QSUGraph): Option[List[CStage0]] = { val MapSrc = new FuncOf(singleSource.root) qgraph match { case Transpose(src, retain, rot) => // struct val above: FreeMapA[QSUGraph] = MappableRegion[T](singleSource.root === _, src) val maybeCartoix: Option[FreeMapA[(Symbol, List[CStage0])]] = above.traverse(g => readCandidate(singleSource, g).tupleLeft(g.root)) maybeCartoix map { case FreeA((_, parent)) => CStage.Shift[T, Hole, Retain](Hole(), retain, rot) :: parent case cartoix => val roots = FM.foldMap(cartoix) { case (root, _) => Set(root) } if (roots.size > 1) { val cmap = FM.foldMap(cartoix) { case (root, stages) => SMap(root -> Cartouche.fromFoldable(stages.reverse)) } val joiner = cartoix.map(_._1) val join = CStage.Join(cmap, joiner) val current = CStage.Shift[T, Hole, Retain](Hole(), retain, rot) // this is an interesting discovery: joins are fully subsuming during read // it's justified because we haven't (yet) collapsed above, so there can be // no common parents yet. in other words, we read as a tree and collapse to // a dag current :: join :: Nil } else { val parent = cartoix.toList.take(1).flatMap { case (_, s) => s } val struct = CStage.Expr(cartoix.as(Hole())) val current = CStage.Shift[T, Hole, Retain](Hole(), retain, rot) current :: struct :: parent } } case MapSrc(f) => Some(List(CStage.Expr(f))) case qgraph if qgraph.root === singleSource.root => Some(Nil) case _ => None } } // Extracts projections of `singleSource` from `expr`, returning the remainder. private def readSourceProjections( singleSource: QSUGraph, candidates: List[QSUGraph], expr: FreeMapA[Int]) : Option[(List[Either[PrjPath, QSUGraph]], FreeMapA[Int])] = { type T = Either[PrjPath, QSUGraph] val len = candidates.length val indices = candidates .zipWithIndex .flatMap { case (c, i) if c.root === singleSource.root => List(i) case _ => Nil } val read = Some(attemptProject(expr)) .filter(_ exists { case (i, p) => indices.contains(i) && p.nonEmpty }) .map(_ flatMap { case (i, h :: t) if indices.contains(i) => FreeA(Left(NonEmptyList.nels(h, t: _*)): T) case (i, p) => reifyPath(FreeA(Right(candidates(i)): T), p) }) read map { r => val reindexed = r.zipWithIndex (reindexed.toList.sortBy(_._2).map(_._1), reindexed.map(_._2)) } } private def attemptProject[A](fm: FreeMapA[A]): FreeMapA[(A, List[Index])] = { val fa: A => FreeMapA[(A, List[Index])] = a => FreeA((a, Nil)) val ff: Algebra[MapFunc, FreeMapA[(A, List[Index])]] = { case MFC(ProjectKey(FreeA((a, p)), StrLit(key))) => FreeA((a, Index.field(key) :: p)) case MFC(ProjectIndex(FreeA((a, p)), IntLit(i))) if i.isValidInt => FreeA((a, Index.position(i.toInt) :: p)) case other => FreeF(other) } fm.cata(interpret(fa, ff)).map(_.map(_.reverse)) } // Returns an expression where all projections of `Hole` have been // extracted, `None` if any access of `Hole` is not a project. private def extractProject(fm: FreeMap): Option[FreeMapA[PrjPath]] = attemptProject(fm) traverse { case (_, h :: t) => Some(NonEmptyList.nels(h, t: _*)) case _ => None } private def commonPrjPrefix(xs: PrjPath, ys: PrjPath): Option[PrjPath] = { val prefix = (xs zip ys).list .takeWhile { case (x, y) => x === y } .map(_._1) prefix.headOption.map(NonEmptyList.nel(_, prefix.drop(1))) } // Extracts the longest common project prefix of `Hole`, turning it into // a `Project` stage, followed by an `Expr` stage containing the remainder // of the `FreeMap`. private def quotientProjection(fm: FreeMap): NonEmptyList[CStage0] = { val factored = for { extracted <- extractProject(fm) prefix <- extracted.reduceLeftToOption(_.some)((x, y) => x.flatMap(commonPrjPrefix(_, y))).flatten len = prefix.size expr = extracted.flatMap(p => reifyPath(func.Hole, p.list.drop(len))) } yield { if (expr === func.Hole) prefix.map[CStage0](CStage.Project(_)) else prefix.map[CStage0](CStage.Project(_)) :::> IList(CStage.Expr[T](expr)) } factored getOrElse NonEmptyList[CStage0](CStage.Expr[T](fm)) } private case class Buckets[J, S]( joins: SMap[Symbol, CStage.Join[T, Index, Hole, J]], shifts: SMap[Rotation, SMap[Symbol, CStage.Shift[T, Hole, S]]], projects: SMap[Index, Set[Symbol]], exprs: SMap[Symbol, CStage.Expr[T]]) private object Buckets { val Empty = Buckets[Retain, Retain](SMap(), SMap(), SMap(), SMap()) /** Bucket each cartouche based on potential compatibility, distinguishing * between kind of stage and subgrouping based on attributes (like `Rotation`) * * Note that being in the same bucket doesn't guarantee compatible, just possible */ def fromCartoix(cartoix: SMap[Symbol, Cartouche0]): Buckets[Retain, Retain] = { import CStage._ cartoix.foldLeft(Empty) { case (buckets, (ref, Cartouche.Stages(NonEmptyList(head, _)))) => head match { case j @ Join(_, _) => buckets.copy(joins = buckets.joins + (ref -> j)) case s @ Shift(_, _, rot) => val shiftsMap = buckets.shifts.getOrElse(rot, SMap()) + (ref -> s) buckets.copy(shifts = buckets.shifts + (rot -> shiftsMap)) case Project(p) => val prjsSet = buckets.projects.getOrElse(p, Set()) + ref buckets.copy(projects = buckets.projects + (p -> prjsSet)) case e @ Expr(_) => buckets.copy(exprs = buckets.exprs + (ref -> e)) case Cartesian(_) => buckets } case (buckets, _) => buckets } } } /** Attempts to simplify a CStage.Join by coalescing compatible stages. */ @SuppressWarnings(Array( "org.wartremover.warts.Recursion", "org.wartremover.warts.TraversableOps")) private def simplifyJoin[F[_]: Monad: NameGenerator](join: CStage.Join[T, Index, Hole, Retain]) : F[CStage.Join[T, Index, Hole, Output]] = { type Remap = SMap[Symbol, Symbol] type OutStage = CStage[T, Index, Hole, Output] type OutCart = Cartouche[T, Index, Hole, Output] val simplName = freshSymbol[F]("simpl") def convertUnchanged(s: CStage0): OutStage = s match { case CStage.Join(cx, jn) => CStage.Join(convertCartoix(cx), jn) case CStage.Cartesian(cx) => CStage.Cartesian(convertCartoix(cx)) case CStage.Shift(s, Retain.Identities, rot) => CStage.Shift(s, Output.id, rot) case CStage.Shift(s, Retain.Values, rot) => CStage.Shift(s, Output.value, rot) case CStage.Project(p) => CStage.Project(p) case CStage.Expr(f) => CStage.Expr(f) } def convertCartoix(cx: SMap[Symbol, Cartouche0]): SMap[Symbol, OutCart] = cx map { case (s, Cartouche.Source()) => (s, Cartouche.Source(): OutCart) case (s, Cartouche.Stages(ss)) => (s, Cartouche.Stages(ss.map(convertUnchanged))) } def step(cartoix: SMap[Symbol, Cartouche0]): F[(SMap[Symbol, OutCart], Remap)] = { import CStage._ /** The tuple represents the resolution of a closure: an optional * collapsee and the set of cartouche identifiers that collapsed. */ type Resolved = List[(OutStage, Set[Symbol])] val buckets = Buckets.fromCartoix(cartoix) for { simplifiedJoins <- buckets.joins.traverse(simplifyJoin[F](_)) simplifiedBuckets = buckets.copy(joins = simplifiedJoins) // within subgroups, compare all possible pairs and build final subsubgroups // this process is O(n^3) in the size of each bucket, since computing the set // of disjoint reflexive transitive closures is O(n) with memoization joinRelation = Relation.allPairs(simplifiedBuckets.joins.toList)(_._1) { case ((_, l), (_, r)) => // can't merge joins at all, so they have to be fully equal (l: OutStage) === r } shiftRelations = simplifiedBuckets.shifts mapValues { syms => Relation.allPairs(syms.toList)(_._1)(κ2(true)) } projectRelations = simplifiedBuckets.projects mapValues { syms => Relation.allPairs(syms)(s => s)(κ2(true)) } exprRelation = Relation.allPairs(simplifiedBuckets.exprs.toList)(_._1) { case ((_, l), (_, r)) => l.f === r.f } resolvedJoins = joinRelation.closures.toList map { cl => // just arbitrarily pick a join; we know by construction they're all equal (simplifiedBuckets.joins(cl.head), cl) } resolvedProjects = projectRelations.toList flatMap { case (idx, rel) => rel.closures.toList.map((Project[T, Index](idx), _)) } resolvedExprs = exprRelation.closures.toList map { cl => (simplifiedBuckets.exprs(cl.head), cl) } shiftClosures = shiftRelations.toList flatMap { case (rot, rel) => rel.closures.toList.tupleLeft(rot) } // resolve shifts, generating fresh names for identity references (resolvedShifts, idsRemap) <- shiftClosures.foldLeftM((Nil: Resolved, SMap(): Remap)) { case ((res, rm), (rot, cl)) => val shifts = cl.map(ref => ref -> simplifiedBuckets.shifts(rot)(ref)).toMap val (ids, vals) = shifts.foldLeft((Set[Symbol](), Set[Symbol]())) { case ((ids, vals), (ref, Shift(_, Retain.Identities, _))) => (ids + ref, vals) case ((ids, vals), (ref, Shift(_, Retain.Values, _))) => (ids, vals + ref) } val outAndRm = if (ids.isEmpty) (Output.value, rm).pure[F] else if (vals.isEmpty) (Output.id, rm).pure[F] else simplName map { n => (Output.idAndValue(n), ids.foldLeft(rm)((m, i) => m + (i -> n))) } outAndRm map { case (o, remap) => ((Shift[T, Hole, Output](Hole(), o, rot), cl) :: res, remap) } } allResolved = resolvedJoins ::: resolvedShifts ::: resolvedProjects ::: resolvedExprs unchanged = convertCartoix(cartoix -- (allResolved.foldMap(_._2) ++ idsRemap.keySet)) back <- allResolved.foldLeftM((unchanged, SMap(): Remap)) { case ((cx, remap0), (stage, syms)) => val nestedCartoix = cartoix.filterKeys(syms) map { case (s, cart) => (s, cart.dropHead) } if (nestedCartoix forall { case (_, c) => c.isEmpty }) { // Pick a name to use for the cartouche val h = syms.head // Everything merged, so remap all references val rm1 = syms.map(fm => (fm, idsRemap.getOrElse(fm, h))) (cx.updated(h, Cartouche.stages(NonEmptyList(stage))), remap0 ++ rm1).pure[F] } else { // Extract any source references of identities, removing their cartouche val (cx1, ids) = nestedCartoix.toList.foldLeft((SMap[Symbol, Cartouche0](), Set[Symbol]())) { // Cartouche terminated with shift identities, exclude the cartouche and record the reference case ((c, i), (ref, Cartouche.Source())) if idsRemap.contains(ref) => (c, i + ref) // Cartouche doesn't terminate here, retain // // TODO: This isn't quite correct, see // https://app.clubhouse.io/data/story/9672/compatible-operations-to-id-and-value-components-of-a-shift-are-incorrectly-coalesced // // Ideally when the cartouche did reference ids and didn't terminate, // we'd indicate that it should project the upstream id, not the value, // but we don't have a way to express this currently. case ((c, i), kv) => (c + kv, i) } for { (lower, lowerRemap) <- step(cx1) finalRemap = remap0 ++ lowerRemap ++ idsRemap.filterKeys(ids) back <- // lower coalesced into a single cartouche, prepend stage if (lower.size === 1) { val (lowerName, lowerCart) = lower.head (cx.updated(lowerName, stage :: lowerCart), finalRemap).pure[F] } else { // lower has multiple cartoix, turn it into a cartesian and make it the tail of a new cartouche simplName map { newName => val currentCart = Cartouche.stages(NonEmptyList(stage, CStage.Cartesian(lower))) (cx.updated(newName, currentCart), finalRemap) } } } yield back } } } yield back } step(join.cartoix) map { case (simplified, remap) => CStage.Join( simplified, join.joiner.map(s => remap.getOrElse(s, s))) } } // Eliminates Project stages, collapsing them into Shift structs or Expr nodes. private def finalizeStages[O](init: CStage.Join[T, Index, Hole, O]): CStage.Join[T, Nothing, FreeMap, O] = { type InStage = CStage[T, Index, Hole, O] type OutStage = CStage[T, Nothing, FreeMap, O] type InCart = Cartouche[T, Index, Hole, O] type OutCart = Cartouche[T, Nothing, FreeMap, O] def z(c0: InStage): Either[NonEmptyList[OutStage], FreeMap] = c0 match { case j @ CStage.Join(_, _) => Left(NonEmptyList(finalizeStages(j): OutStage)) case CStage.Cartesian(cx) => Left(NonEmptyList(CStage.Cartesian(finalizeCartoix(cx)))) case CStage.Shift(_, idStatus, rot) => Left(NonEmptyList(CStage.Shift(func.Hole, idStatus, rot))) case CStage.Project(idx) => Right(reifyIndex(func.Hole, idx)) case CStage.Expr(f) => Right(f) } def acc(a: NonEmptyList[OutStage] Ior FreeMap, s: InStage): NonEmptyList[OutStage] Ior FreeMap = (a, s) match { case (Ior.Right(f), CStage.Project(idx)) => Ior.Right(reifyIndex(f, idx)) case (Ior.Left(ss), CStage.Project(idx)) => Ior.Both(ss, reifyIndex(func.Hole, idx)) case (Ior.Both(ss, f), CStage.Project(idx)) => Ior.Both(ss, reifyIndex(f, idx)) case (Ior.Right(f), CStage.Shift(_, i, r)) => Ior.Left(NonEmptyList(CStage.Shift(f, i, r))) case (Ior.Left(ss), CStage.Shift(_, i, r)) => Ior.Left((CStage.Shift(func.Hole, i, r): OutStage) <:: ss) case (Ior.Both(ss, f), CStage.Shift(_, i, r)) => Ior.Left((CStage.Shift(f, i, r): OutStage) <:: ss) case (Ior.Right(f), CStage.Expr(g)) => Ior.Right(g >> f) case (Ior.Left(ss), CStage.Expr(g)) => Ior.Both(ss, g) case (Ior.Both(ss, f), CStage.Expr(g)) => Ior.Both(ss, g >> f) case (Ior.Right(f), CStage.Cartesian(cx)) => Ior.Left(NonEmptyList(CStage.Cartesian(finalizeCartoix(cx)), CStage.Expr(f))) case (Ior.Left(ss), CStage.Cartesian(cx)) => Ior.Left((CStage.Cartesian(finalizeCartoix(cx)): OutStage) <:: ss) case (Ior.Both(ss, f), CStage.Cartesian(cx)) => Ior.Left((CStage.Cartesian(finalizeCartoix(cx)): OutStage) <:: (CStage.Expr(f): OutStage) <:: ss) case (Ior.Right(f), j @ CStage.Join(_, _)) => Ior.Left(NonEmptyList(finalizeStages(j): OutStage, CStage.Expr(f))) case (Ior.Left(ss), j @ CStage.Join(_, _)) => Ior.Left((finalizeStages(j): OutStage) <:: ss) case (Ior.Both(ss, f), j @ CStage.Join(_, _)) => Ior.Left((finalizeStages(j): OutStage) <:: (CStage.Expr(f): OutStage) <:: ss) } def finalizeCartouche(c0: InCart): OutCart = c0 match { case Cartouche.Stages(ss0) => Foldable1[NonEmptyList].foldMapLeft1(ss0)(s => Ior.fromEither(z(s)))(acc) match { case Ior.Left(ss1) => Cartouche.stages[T, Nothing, FreeMap, O](ss1.reverse) case Ior.Right(fm) => Cartouche.stages[T, Nothing, FreeMap, O](NonEmptyList(CStage.Expr(fm): OutStage)) case Ior.Both(ss1, fm) => Cartouche.stages[T, Nothing, FreeMap, O]((CStage.Expr(fm) <:: ss1).reverse) } case Cartouche.Source() => Cartouche.source } def finalizeCartoix(cx: SMap[Symbol, InCart]): SMap[Symbol, OutCart] = cx map { case (k, v) => (k, finalizeCartouche(v)) } CStage.Join(finalizeCartoix(init.cartoix), init.joiner) } private def reifyJoin[ G[_]: Monad: MonadPlannerErr: NameGenerator: RevIdxM: MinStateM[T, P, ?[_]]]( join: CStage.Join[T, Nothing, FreeMap, Output], parent: QSUGraph, lens: StructLens, isNested: Boolean) : G[QSUGraph] = { val (srcRefs, rest) = join.cartoix.toList.foldRight((Set[Symbol](), List[(Symbol, NonEmptyList[CStage1])]())) { case ((s, Cartouche.Source()), (as, r)) => (as + s, r) case ((s, Cartouche.Stages(ss)), (as, r)) => (as, (s, ss) :: r) } for { results <- reifyCartoix[G](parent, rest, lens, isNested) f = join.joiner flatMap { s => if (srcRefs(s)) func.ProjectKeyS(func.Hole, SourceKey) else func.ProjectKeyS(func.Hole, s.name) } mapped <- results match { case LeftShift(src, struct, idStatus, onUndef, repair, rot) => val joinRep = MapFuncCore.normalized(f >> repair) updateGraph[G](src)(QSU.LeftShift(_, struct, idStatus, onUndef, joinRep, rot)) case other => updateGraph[G](other)(QSU.Map(_, f.asRec)) } } yield mapped } @SuppressWarnings(Array("org.wartremover.warts.Recursion")) private def reifyCartoix[ G[_]: Monad: MonadPlannerErr: NameGenerator: RevIdxM: MinStateM[T, P, ?[_]]]( parent: QSUGraph, cartoix: List[(Symbol, NonEmptyList[CStage1])], lens0: StructLens, isNested: Boolean) : G[QSUGraph] = cartoix.sortBy(kv => (reifyPrecedence(kv._2.head), kv._2.size)) match { case (s, NonEmptyList(hd, tail)) :: rest => val prj = if (isNested) lens0.project else func.ProjectKeyS(func.Hole, SourceKey) val inj = new ∀[λ[α => (Symbol, FreeMapA[α], FreeMapA[α], Option[(Symbol, FreeMapA[α])]) => FreeMapA[α]]] { def apply[α] = { (id, results, above, maybeIds) => val core = func.ConcatMaps( above, func.MakeMapS(id.name, results)) maybeIds.fold(core) { case (sym, ids) => func.ConcatMaps( core, func.MakeMapS(sym.name, ids)) } } } val resultsM = reifyStage[G](s, hd, parent, lens0, true) flatMap { parent => val projectPrev = func.ProjectKeyS(func.Hole, s.name) tail.foldLeftM(parent) { (parent, stage) => reifyStage[G](s, stage, parent, StructLens(projectPrev, inj, true), true) } } resultsM.flatMap(reifyCartoix[G](_, rest, StructLens(prj, inj, true), isNested)) case Nil => parent.pure[G] } private def reifyIndex[A](src: FreeMapA[A], idx: Index): FreeMapA[A] = idx.toEither.fold(func.ProjectIndexI(src, _), func.ProjectKeyS(src, _)) private def reifyPath[A, F[_]: Foldable](z: FreeMapA[A], path: F[Index]) : FreeMapA[A] = path.foldLeft(z)(reifyIndex) @SuppressWarnings(Array("org.wartremover.warts.Recursion")) private def reifyStage[ G[_]: Monad: MonadPlannerErr: NameGenerator: RevIdxM: MinStateM[T, P, ?[_]]]( id: Symbol, stage: CStage1, parent: QSUGraph, lens: StructLens, isNested: Boolean) : G[QSUGraph] = { stage match { case CStage.Expr(f) => val ap = λ[FreeMapA ~> FreeMapA](f >> lens.project >> _) parent match { case LeftShift(src, struct, idStatus, onUndef, repair, rot) => updateGraph[G](src)( QSU.LeftShift( _, struct, idStatus, onUndef, lens.inject[JoinSide]( id, MapFuncCore.normalized(ap(repair)), repair, None), rot)) case Map(src, rfm) => updateGraph[G](src)( QSU.Map( _, lens.inject[Hole]( id, MapFuncCore.normalized(ap(rfm.linearize)), rfm.linearize, None).asRec)) case src => updateGraph[G](src)( QSU.Map( _, lens.inject[Hole]( id, MapFuncCore.normalized(ap(func.Hole)), func.Hole, None).asRec)) } case CStage.Shift(struct, output, rot) => updateGraph[G](parent)( QSU.LeftShift( _, (struct >> lens.project).asRec, output.toIdStatus, if (lens.outer) OnUndefined.Emit else OnUndefined.Omit, lens.inject[JoinSide]( id, output match { case Output.IdAndValue(_) => func.ProjectIndexI(func.RightSide, 1) case _ => func.RightSide }, func.LeftSide, output match { case Output.IdAndValue(n) => Some((n, func.ProjectIndexI(func.RightSide, 0))) case _ => None }), rot)) case CStage.Cartesian(cartoix) => val cartoix2 = cartoix.toList map { case (k, Cartouche.Stages(ss)) => (k, ss) case (k, Cartouche.Source()) => (k, NonEmptyList[CStage1](CStage.Expr(func.Hole))) } reifyCartoix[G](parent, cartoix2, lens, isNested) case j @ CStage.Join(_, _) => reifyJoin[G](j, parent, lens, isNested) case CStage.Project(_) => errorImpossible } } // Order sibling cartouches when rendering to minimize // structure and provide stability for tests private val reifyPrecedence: CStage1 => Int = { case CStage.Join(_, _) => 0 case CStage.Cartesian(_) => 1 case CStage.Shift(_, _, _) => 2 case CStage.Expr(_) => 3 case CStage.Project(_) => errorImpossible } private def updateGraph[ G[_]: Monad: NameGenerator: MonadPlannerErr: RevIdxM: MinStateM[T, P, ?[_]]]( parent: QSUGraph)( nodeF: Symbol => QSU[T, Symbol]) : G[QSUGraph] = for { g <- derive[G](parent)(nodeF) _ <- MinimizeAutoJoins.updateProvenance[T, G](qprov, g) } yield g private def derive[ G[_]: Monad: NameGenerator: RevIdxM]( parent: QSUGraph)( nodeF: Symbol => QSU[T, Symbol]) : G[QSUGraph] = withName[G](nodeF(parent.root)).map(_ :++ parent) private def withName[ G[_]: Monad: NameGenerator: RevIdxM]( node: QSU[T, Symbol]) : G[QSUGraph] = QSUGraph.withName[T, G]("mcart")(node) // this isn't in the stdlib wtf? implicit class Function2Syntax[A, B, C](self: (A, B) => C) { def andThen[D](f: C => D): (A, B) => D = (a, b) => f(self(a, b)) } private case class StructLens( project: FreeMap, // (cartouche id, result access, incoming value, identity access) inject: ∀[λ[α => (Symbol, FreeMapA[α], FreeMapA[α], Option[(Symbol, FreeMapA[α])]) => FreeMapA[α]]], outer: Boolean) private object StructLens { def init(includeSource: Boolean): StructLens = { val inj = new ∀[λ[α => (Symbol, FreeMapA[α], FreeMapA[α], Option[(Symbol, FreeMapA[α])]) => FreeMapA[α]]] { def apply[α] = { (id, results, above, maybeIds) => val core = if (includeSource) func.StaticMapS( SourceKey -> above, id.name -> results) else func.MakeMapS(id.name, results) maybeIds.fold(core) { case (idsName, ids) => func.ConcatMaps( core, func.MakeMapS(idsName.name, ids)) } } } StructLens(func.Hole, inj, true) } } } object MergeCartoix { def apply[T[_[_]]: BirecursiveT: EqualT: RenderTreeT: ShowT]( qp: QProv[T])( implicit eqP: Eq[qp.P]) : Minimizer.Aux[T, qp.P] = new MergeCartoix[T] { val qprov: qp.type = qp val PEqual = eqP } }
slamdata/quasar
qsu/src/main/scala/quasar/qsu/minimizers/MergeCartoix.scala
Scala
apache-2.0
31,750
package io.hydrosphere.mist.master.execution.status import akka.actor.ActorSystem import io.hydrosphere.mist.core.logging.LogEvent import io.hydrosphere.mist.master.{EventsStreamer, JobDetails} import io.hydrosphere.mist.master.Messages.StatusMessages._ import io.hydrosphere.mist.master.logging.LogService import io.hydrosphere.mist.master.store.JobRepository import scala.concurrent.Future trait StatusReporter { def report(ev: ReportedEvent): Unit def reportPlain(e: UpdateStatusEvent): Unit = report(ReportedEvent.plain(e)) def reportWithFlushCallback(e: UpdateStatusEvent): Future[JobDetails] = { val ev = ReportedEvent.withCallback(e) report(ev) ev.callback.future } } object StatusReporter { val NOOP = new StatusReporter { override def report(ev: ReportedEvent): Unit = () } /** * Send status updates to store + async interfaces */ def reporter( repo: JobRepository, streamer: EventsStreamer, logService: LogService )(implicit sys: ActorSystem): StatusReporter = { val flusher = sys.actorOf(StoreFlusher.props(repo, logService)) new StatusReporter { override def report(ev: ReportedEvent): Unit = { flusher ! ev streamer.push(ev.e) } } } }
Hydrospheredata/mist
mist/master/src/main/scala/io/hydrosphere/mist/master/execution/status/StatusReporter.scala
Scala
apache-2.0
1,251
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive import java.lang.reflect.{ParameterizedType, Type, WildcardType} import java.time.Duration import scala.collection.JavaConverters._ import org.apache.hadoop.{io => hadoopIo} import org.apache.hadoop.hive.common.`type`.{HiveChar, HiveDecimal, HiveIntervalDayTime, HiveIntervalYearMonth, HiveVarchar} import org.apache.hadoop.hive.serde2.{io => hiveIo} import org.apache.hadoop.hive.serde2.objectinspector.{StructField => HiveStructField, _} import org.apache.hadoop.hive.serde2.objectinspector.primitive._ import org.apache.hadoop.hive.serde2.typeinfo.{DecimalTypeInfo, TypeInfoFactory} import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.execution.datasources.DaysWritable import org.apache.spark.sql.types import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String /** * 1. The Underlying data type in catalyst and in Hive * In catalyst: * Primitive => * UTF8String * int / scala.Int * boolean / scala.Boolean * float / scala.Float * double / scala.Double * long / scala.Long * short / scala.Short * byte / scala.Byte * [[org.apache.spark.sql.types.Decimal]] * Array[Byte] * java.sql.Date * java.sql.Timestamp * Complex Types => * Map: `MapData` * List: `ArrayData` * Struct: [[org.apache.spark.sql.catalyst.InternalRow]] * Union: NOT SUPPORTED YET * The Complex types plays as a container, which can hold arbitrary data types. * * In Hive, the native data types are various, in UDF/UDAF/UDTF, and associated with * Object Inspectors, in Hive expression evaluation framework, the underlying data are * Primitive Type * Java Boxed Primitives: * org.apache.hadoop.hive.common.type.HiveVarchar * org.apache.hadoop.hive.common.type.HiveChar * java.lang.String * java.lang.Integer * java.lang.Boolean * java.lang.Float * java.lang.Double * java.lang.Long * java.lang.Short * java.lang.Byte * org.apache.hadoop.hive.common.`type`.HiveDecimal * byte[] * java.sql.Date * java.sql.Timestamp * Writables: * org.apache.hadoop.hive.serde2.io.HiveVarcharWritable * org.apache.hadoop.hive.serde2.io.HiveCharWritable * org.apache.hadoop.io.Text * org.apache.hadoop.io.IntWritable * org.apache.hadoop.hive.serde2.io.DoubleWritable * org.apache.hadoop.io.BooleanWritable * org.apache.hadoop.io.LongWritable * org.apache.hadoop.io.FloatWritable * org.apache.hadoop.hive.serde2.io.ShortWritable * org.apache.hadoop.hive.serde2.io.ByteWritable * org.apache.hadoop.io.BytesWritable * org.apache.hadoop.hive.serde2.io.DateWritable * org.apache.hadoop.hive.serde2.io.TimestampWritable * org.apache.hadoop.hive.serde2.io.HiveDecimalWritable * Complex Type * List: Object[] / java.util.List * Map: java.util.Map * Struct: Object[] / java.util.List / java POJO * Union: class StandardUnion { byte tag; Object object } * * NOTICE: HiveVarchar/HiveChar is not supported by catalyst, it will be simply considered as * String type. * * * 2. Hive ObjectInspector is a group of flexible APIs to inspect value in different data * representation, and developers can extend those API as needed, so technically, * object inspector supports arbitrary data type in java. * * Fortunately, only few built-in Hive Object Inspectors are used in generic udf/udaf/udtf * evaluation. * 1) Primitive Types (PrimitiveObjectInspector & its sub classes) {{{ public interface PrimitiveObjectInspector { // Java Primitives (java.lang.Integer, java.lang.String etc.) Object getPrimitiveJavaObject(Object o); // Writables (hadoop.io.IntWritable, hadoop.io.Text etc.) Object getPrimitiveWritableObject(Object o); // ObjectInspector only inspect the `writable` always return true, we need to check it // before invoking the methods above. boolean preferWritable(); ... } }}} * 2) Complex Types: * ListObjectInspector: inspects java array or [[java.util.List]] * MapObjectInspector: inspects [[java.util.Map]] * Struct.StructObjectInspector: inspects java array, [[java.util.List]] and * even a normal java object (POJO) * UnionObjectInspector: (tag: Int, object data) (TODO: not supported by SparkSQL yet) * * 3) ConstantObjectInspector: * Constant object inspector can be either primitive type or Complex type, and it bundles a * constant value as its property, usually the value is created when the constant object inspector * constructed. * {{{ public interface ConstantObjectInspector extends ObjectInspector { Object getWritableConstantValue(); ... } }}} * Hive provides 3 built-in constant object inspectors: * Primitive Object Inspectors: * WritableConstantStringObjectInspector * WritableConstantHiveVarcharObjectInspector * WritableConstantHiveCharObjectInspector * WritableConstantHiveDecimalObjectInspector * WritableConstantTimestampObjectInspector * WritableConstantIntObjectInspector * WritableConstantDoubleObjectInspector * WritableConstantBooleanObjectInspector * WritableConstantLongObjectInspector * WritableConstantFloatObjectInspector * WritableConstantShortObjectInspector * WritableConstantByteObjectInspector * WritableConstantBinaryObjectInspector * WritableConstantDateObjectInspector * Map Object Inspector: * StandardConstantMapObjectInspector * List Object Inspector: * StandardConstantListObjectInspector]] * Struct Object Inspector: Hive doesn't provide the built-in constant object inspector for Struct * Union Object Inspector: Hive doesn't provide the built-in constant object inspector for Union * * * 3. This trait facilitates: * Data Unwrapping: Hive Data => Catalyst Data (unwrap) * Data Wrapping: Catalyst Data => Hive Data (wrap) * Binding the Object Inspector for Catalyst Data (toInspector) * Retrieving the Catalyst Data Type from Object Inspector (inspectorToDataType) * * * 4. Future Improvement (TODO) * This implementation is quite ugly and inefficient: * a. Pattern matching in runtime * b. Small objects creation in catalyst data => writable * c. Unnecessary unwrap / wrap for nested UDF invoking: * e.g. date_add(printf("%s-%s-%s", a,b,c), 3) * We don't need to unwrap the data for printf and wrap it again and passes in data_add */ private[hive] trait HiveInspectors { def javaTypeToDataType(clz: Type): DataType = clz match { // writable case c: Class[_] if c == classOf[hadoopIo.DoubleWritable] => DoubleType case c: Class[_] if c == classOf[hiveIo.DoubleWritable] => DoubleType case c: Class[_] if c == classOf[hiveIo.HiveDecimalWritable] => DecimalType.SYSTEM_DEFAULT case c: Class[_] if c == classOf[hiveIo.ByteWritable] => ByteType case c: Class[_] if c == classOf[hiveIo.ShortWritable] => ShortType case c: Class[_] if c == classOf[hiveIo.DateWritable] => DateType case c: Class[_] if c == classOf[hiveIo.TimestampWritable] => TimestampType case c: Class[_] if c == classOf[hadoopIo.Text] => StringType case c: Class[_] if c == classOf[hadoopIo.IntWritable] => IntegerType case c: Class[_] if c == classOf[hadoopIo.LongWritable] => LongType case c: Class[_] if c == classOf[hadoopIo.FloatWritable] => FloatType case c: Class[_] if c == classOf[hadoopIo.BooleanWritable] => BooleanType case c: Class[_] if c == classOf[hadoopIo.BytesWritable] => BinaryType // java class case c: Class[_] if c == classOf[java.lang.String] => StringType case c: Class[_] if c == classOf[java.sql.Date] => DateType case c: Class[_] if c == classOf[java.sql.Timestamp] => TimestampType case c: Class[_] if c == classOf[HiveDecimal] => DecimalType.SYSTEM_DEFAULT case c: Class[_] if c == classOf[java.math.BigDecimal] => DecimalType.SYSTEM_DEFAULT case c: Class[_] if c == classOf[Array[Byte]] => BinaryType case c: Class[_] if c == classOf[java.lang.Short] => ShortType case c: Class[_] if c == classOf[java.lang.Integer] => IntegerType case c: Class[_] if c == classOf[java.lang.Long] => LongType case c: Class[_] if c == classOf[java.lang.Double] => DoubleType case c: Class[_] if c == classOf[java.lang.Byte] => ByteType case c: Class[_] if c == classOf[java.lang.Float] => FloatType case c: Class[_] if c == classOf[java.lang.Boolean] => BooleanType // primitive type case c: Class[_] if c == java.lang.Short.TYPE => ShortType case c: Class[_] if c == java.lang.Integer.TYPE => IntegerType case c: Class[_] if c == java.lang.Long.TYPE => LongType case c: Class[_] if c == java.lang.Double.TYPE => DoubleType case c: Class[_] if c == java.lang.Byte.TYPE => ByteType case c: Class[_] if c == java.lang.Float.TYPE => FloatType case c: Class[_] if c == java.lang.Boolean.TYPE => BooleanType case c: Class[_] if c.isArray => ArrayType(javaTypeToDataType(c.getComponentType)) // Hive seems to return this for struct types? case c: Class[_] if c == classOf[java.lang.Object] => NullType case p: ParameterizedType if isSubClassOf(p.getRawType, classOf[java.util.List[_]]) => val Array(elementType) = p.getActualTypeArguments ArrayType(javaTypeToDataType(elementType)) case p: ParameterizedType if isSubClassOf(p.getRawType, classOf[java.util.Map[_, _]]) => val Array(keyType, valueType) = p.getActualTypeArguments MapType(javaTypeToDataType(keyType), javaTypeToDataType(valueType)) // raw java list type unsupported case c: Class[_] if isSubClassOf(c, classOf[java.util.List[_]]) => throw new AnalysisException( "Raw list type in java is unsupported because Spark cannot infer the element type.") // raw java map type unsupported case c: Class[_] if isSubClassOf(c, classOf[java.util.Map[_, _]]) => throw new AnalysisException( "Raw map type in java is unsupported because Spark cannot infer key and value types.") case _: WildcardType => throw new AnalysisException( "Collection types with wildcards (e.g. List<?> or Map<?, ?>) are unsupported because " + "Spark cannot infer the data type for these type parameters.") case c => throw new AnalysisException(s"Unsupported java type $c") } private def isSubClassOf(t: Type, parent: Class[_]): Boolean = t match { case cls: Class[_] => parent.isAssignableFrom(cls) case _ => false } private def withNullSafe(f: Any => Any): Any => Any = { input => if (input == null) null else f(input) } /** * Wraps with Hive types based on object inspector. */ protected def wrapperFor(oi: ObjectInspector, dataType: DataType): Any => Any = oi match { case _ if dataType.isInstanceOf[UserDefinedType[_]] => val sqlType = dataType.asInstanceOf[UserDefinedType[_]].sqlType wrapperFor(oi, sqlType) case x: ConstantObjectInspector => (o: Any) => x.getWritableConstantValue case x: PrimitiveObjectInspector => x match { // TODO we don't support the HiveVarcharObjectInspector yet. case _: StringObjectInspector if x.preferWritable() => withNullSafe(o => getStringWritable(o)) case _: StringObjectInspector => withNullSafe(o => o.asInstanceOf[UTF8String].toString()) case _: IntObjectInspector if x.preferWritable() => withNullSafe(o => getIntWritable(o)) case _: IntObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Integer]) case _: BooleanObjectInspector if x.preferWritable() => withNullSafe(o => getBooleanWritable(o)) case _: BooleanObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Boolean]) case _: FloatObjectInspector if x.preferWritable() => withNullSafe(o => getFloatWritable(o)) case _: FloatObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Float]) case _: DoubleObjectInspector if x.preferWritable() => withNullSafe(o => getDoubleWritable(o)) case _: DoubleObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Double]) case _: LongObjectInspector if x.preferWritable() => withNullSafe(o => getLongWritable(o)) case _: LongObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Long]) case _: ShortObjectInspector if x.preferWritable() => withNullSafe(o => getShortWritable(o)) case _: ShortObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Short]) case _: ByteObjectInspector if x.preferWritable() => withNullSafe(o => getByteWritable(o)) case _: ByteObjectInspector => withNullSafe(o => o.asInstanceOf[java.lang.Byte]) // To spark HiveVarchar and HiveChar are same as string case _: HiveVarcharObjectInspector if x.preferWritable() => withNullSafe(o => getStringWritable(o)) case _: HiveVarcharObjectInspector => withNullSafe { o => val s = o.asInstanceOf[UTF8String].toString new HiveVarchar(s, s.length) } case _: HiveCharObjectInspector if x.preferWritable() => withNullSafe(o => getStringWritable(o)) case _: HiveCharObjectInspector => withNullSafe { o => val s = o.asInstanceOf[UTF8String].toString new HiveChar(s, s.length) } case _: JavaHiveDecimalObjectInspector => withNullSafe(o => HiveDecimal.create(o.asInstanceOf[Decimal].toJavaBigDecimal)) case _: JavaDateObjectInspector => withNullSafe(o => DateTimeUtils.toJavaDate(o.asInstanceOf[Int])) case _: JavaTimestampObjectInspector => withNullSafe(o => DateTimeUtils.toJavaTimestamp(o.asInstanceOf[Long])) case _: HiveDecimalObjectInspector if x.preferWritable() => withNullSafe(o => getDecimalWritable(o.asInstanceOf[Decimal])) case _: HiveDecimalObjectInspector => withNullSafe(o => HiveDecimal.create(o.asInstanceOf[Decimal].toJavaBigDecimal)) case _: BinaryObjectInspector if x.preferWritable() => withNullSafe(o => getBinaryWritable(o)) case _: BinaryObjectInspector => withNullSafe(o => o.asInstanceOf[Array[Byte]]) case _: DateObjectInspector if x.preferWritable() => withNullSafe(o => getDateWritable(o)) case _: DateObjectInspector => withNullSafe(o => DateTimeUtils.toJavaDate(o.asInstanceOf[Int])) case _: TimestampObjectInspector if x.preferWritable() => withNullSafe(o => getTimestampWritable(o)) case _: TimestampObjectInspector => withNullSafe(o => DateTimeUtils.toJavaTimestamp(o.asInstanceOf[Long])) case _: HiveIntervalDayTimeObjectInspector if x.preferWritable() => withNullSafe(o => getHiveIntervalDayTimeWritable(o)) case _: HiveIntervalDayTimeObjectInspector => withNullSafe(o => { val duration = IntervalUtils.microsToDuration(o.asInstanceOf[Long]) new HiveIntervalDayTime(duration.getSeconds, duration.getNano) }) case _: HiveIntervalYearMonthObjectInspector if x.preferWritable() => withNullSafe(o => getHiveIntervalYearMonthWritable(o)) case _: HiveIntervalYearMonthObjectInspector => withNullSafe(o => new HiveIntervalYearMonth(o.asInstanceOf[Int])) case _: VoidObjectInspector => (_: Any) => null // always be null for void object inspector } case soi: StandardStructObjectInspector => val schema = dataType.asInstanceOf[StructType] val wrappers = soi.getAllStructFieldRefs.asScala.zip(schema.fields).map { case (ref, field) => wrapperFor(ref.getFieldObjectInspector, field.dataType) } withNullSafe { o => val struct = soi.create() val row = o.asInstanceOf[InternalRow] soi.getAllStructFieldRefs.asScala.zip(wrappers).zipWithIndex.foreach { case ((field, wrapper), i) => soi.setStructFieldData(struct, field, wrapper(row.get(i, schema(i).dataType))) } struct } case ssoi: SettableStructObjectInspector => val structType = dataType.asInstanceOf[StructType] val wrappers = ssoi.getAllStructFieldRefs.asScala.zip(structType).map { case (ref, tpe) => wrapperFor(ref.getFieldObjectInspector, tpe.dataType) } withNullSafe { o => val row = o.asInstanceOf[InternalRow] // 1. create the pojo (most likely) object val result = ssoi.create() ssoi.getAllStructFieldRefs.asScala.zip(wrappers).zipWithIndex.foreach { case ((field, wrapper), i) => val tpe = structType(i).dataType ssoi.setStructFieldData( result, field, wrapper(row.get(i, tpe)).asInstanceOf[AnyRef]) } result } case soi: StructObjectInspector => val structType = dataType.asInstanceOf[StructType] val wrappers = soi.getAllStructFieldRefs.asScala.zip(structType).map { case (ref, tpe) => wrapperFor(ref.getFieldObjectInspector, tpe.dataType) } withNullSafe { o => val row = o.asInstanceOf[InternalRow] val result = new java.util.ArrayList[AnyRef](wrappers.size) soi.getAllStructFieldRefs.asScala.zip(wrappers).zipWithIndex.foreach { case ((field, wrapper), i) => val tpe = structType(i).dataType result.add(wrapper(row.get(i, tpe)).asInstanceOf[AnyRef]) } result } case loi: ListObjectInspector => val elementType = dataType.asInstanceOf[ArrayType].elementType val wrapper = wrapperFor(loi.getListElementObjectInspector, elementType) withNullSafe { o => val array = o.asInstanceOf[ArrayData] val values = new java.util.ArrayList[Any](array.numElements()) array.foreach(elementType, (_, e) => values.add(wrapper(e))) values } case moi: MapObjectInspector => val mt = dataType.asInstanceOf[MapType] val keyWrapper = wrapperFor(moi.getMapKeyObjectInspector, mt.keyType) val valueWrapper = wrapperFor(moi.getMapValueObjectInspector, mt.valueType) withNullSafe { o => val map = o.asInstanceOf[MapData] val jmap = new java.util.HashMap[Any, Any](map.numElements()) map.foreach(mt.keyType, mt.valueType, (k, v) => jmap.put(keyWrapper(k), valueWrapper(v))) jmap } case _ => identity[Any] } /** * Builds unwrappers ahead of time according to object inspector * types to avoid pattern matching and branching costs per row. * * Strictly follows the following order in unwrapping (constant OI has the higher priority): * Constant Null object inspector => * return null * Constant object inspector => * extract the value from constant object inspector * If object inspector prefers writable => * extract writable from `data` and then get the catalyst type from the writable * Extract the java object directly from the object inspector * * NOTICE: the complex data type requires recursive unwrapping. * * @param objectInspector the ObjectInspector used to create an unwrapper. * @return A function that unwraps data objects. * Use the overloaded HiveStructField version for in-place updating of a MutableRow. */ def unwrapperFor(objectInspector: ObjectInspector): Any => Any = objectInspector match { case coi: ConstantObjectInspector if coi.getWritableConstantValue == null => _ => null case poi: WritableConstantStringObjectInspector => val constant = UTF8String.fromString(poi.getWritableConstantValue.toString) _ => constant case poi: WritableConstantHiveVarcharObjectInspector => val constant = UTF8String.fromString(poi.getWritableConstantValue.getHiveVarchar.getValue) _ => constant case poi: WritableConstantHiveCharObjectInspector => val constant = UTF8String.fromString(poi.getWritableConstantValue.getHiveChar.getValue) _ => constant case poi: WritableConstantHiveDecimalObjectInspector => val constant = HiveShim.toCatalystDecimal( PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector, poi.getWritableConstantValue.getHiveDecimal) _ => constant case poi: WritableConstantTimestampObjectInspector => val t = poi.getWritableConstantValue val constant = DateTimeUtils.fromJavaTimestamp(t.getTimestamp) _ => constant case poi: WritableConstantIntObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantDoubleObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantBooleanObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantLongObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantFloatObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantShortObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantByteObjectInspector => val constant = poi.getWritableConstantValue.get() _ => constant case poi: WritableConstantBinaryObjectInspector => val writable = poi.getWritableConstantValue val constant = new Array[Byte](writable.getLength) System.arraycopy(writable.getBytes, 0, constant, 0, constant.length) _ => constant case poi: WritableConstantDateObjectInspector => val constant = DateTimeUtils.fromJavaDate(poi.getWritableConstantValue.get()) _ => constant case mi: StandardConstantMapObjectInspector => val keyUnwrapper = unwrapperFor(mi.getMapKeyObjectInspector) val valueUnwrapper = unwrapperFor(mi.getMapValueObjectInspector) val keyValues = mi.getWritableConstantValue val constant = ArrayBasedMapData(keyValues, keyUnwrapper, valueUnwrapper) _ => constant case li: StandardConstantListObjectInspector => val unwrapper = unwrapperFor(li.getListElementObjectInspector) val values = li.getWritableConstantValue.asScala .map(unwrapper) .toArray val constant = new GenericArrayData(values) _ => constant case poi: VoidObjectInspector => _ => null // always be null for void object inspector case dt: WritableConstantHiveIntervalDayTimeObjectInspector => val constant = dt.getWritableConstantValue.asInstanceOf[HiveIntervalDayTime] _ => IntervalUtils.durationToMicros( Duration.ofSeconds(constant.getTotalSeconds).plusNanos(constant.getNanos.toLong)) case ym: WritableConstantHiveIntervalYearMonthObjectInspector => val constant = ym.getWritableConstantValue.asInstanceOf[HiveIntervalYearMonth] _ => constant.getTotalMonths case pi: PrimitiveObjectInspector => pi match { // We think HiveVarchar/HiveChar is also a String case hvoi: HiveVarcharObjectInspector if hvoi.preferWritable() => data: Any => { if (data != null) { UTF8String.fromString(hvoi.getPrimitiveWritableObject(data).getHiveVarchar.getValue) } else { null } } case hvoi: HiveVarcharObjectInspector => data: Any => { if (data != null) { UTF8String.fromString(hvoi.getPrimitiveJavaObject(data).getValue) } else { null } } case hvoi: HiveCharObjectInspector if hvoi.preferWritable() => data: Any => { if (data != null) { UTF8String.fromString(hvoi.getPrimitiveWritableObject(data).getHiveChar.getValue) } else { null } } case hvoi: HiveCharObjectInspector => data: Any => { if (data != null) { UTF8String.fromString(hvoi.getPrimitiveJavaObject(data).getValue) } else { null } } case x: StringObjectInspector if x.preferWritable() => data: Any => { if (data != null) { // Text is in UTF-8 already. No need to convert again via fromString. Copy bytes val wObj = x.getPrimitiveWritableObject(data) val result = wObj.copyBytes() UTF8String.fromBytes(result, 0, result.length) } else { null } } case x: StringObjectInspector => data: Any => { if (data != null) { UTF8String.fromString(x.getPrimitiveJavaObject(data)) } else { null } } case x: IntObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: BooleanObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: FloatObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: DoubleObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: LongObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: ShortObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: ByteObjectInspector if x.preferWritable() => data: Any => { if (data != null) x.get(data) else null } case x: HiveDecimalObjectInspector => data: Any => { if (data != null) { HiveShim.toCatalystDecimal(x, data) } else { null } } case x: BinaryObjectInspector if x.preferWritable() => data: Any => { if (data != null) { // BytesWritable.copyBytes() only available since Hadoop2 // In order to keep backward-compatible, we have to copy the // bytes with old apis val bw = x.getPrimitiveWritableObject(data) val result = new Array[Byte](bw.getLength()) System.arraycopy(bw.getBytes(), 0, result, 0, bw.getLength()) result } else { null } } case x: DateObjectInspector if x.preferWritable() => data: Any => { if (data != null) { new DaysWritable(x.getPrimitiveWritableObject(data)).gregorianDays } else { null } } case x: DateObjectInspector => data: Any => { if (data != null) { DateTimeUtils.fromJavaDate(x.getPrimitiveJavaObject(data)) } else { null } } case x: TimestampObjectInspector if x.preferWritable() => data: Any => { if (data != null) { DateTimeUtils.fromJavaTimestamp(x.getPrimitiveWritableObject(data).getTimestamp) } else { null } } case ti: TimestampObjectInspector => data: Any => { if (data != null) { DateTimeUtils.fromJavaTimestamp(ti.getPrimitiveJavaObject(data)) } else { null } } case dt: HiveIntervalDayTimeObjectInspector if dt.preferWritable() => data: Any => { if (data != null) { val dayTime = dt.getPrimitiveWritableObject(data).getHiveIntervalDayTime IntervalUtils.durationToMicros( Duration.ofSeconds(dayTime.getTotalSeconds).plusNanos(dayTime.getNanos.toLong)) } else { null } } case dt: HiveIntervalDayTimeObjectInspector => data: Any => { if (data != null) { val dayTime = dt.getPrimitiveJavaObject(data) IntervalUtils.durationToMicros( Duration.ofSeconds(dayTime.getTotalSeconds).plusNanos(dayTime.getNanos.toLong)) } else { null } } case ym: HiveIntervalYearMonthObjectInspector if ym.preferWritable() => data: Any => { if (data != null) { ym.getPrimitiveWritableObject(data).getHiveIntervalYearMonth.getTotalMonths } else { null } } case ym: HiveIntervalYearMonthObjectInspector => data: Any => { if (data != null) { ym.getPrimitiveJavaObject(data).getTotalMonths } else { null } } case _ => data: Any => { if (data != null) { pi.getPrimitiveJavaObject(data) } else { null } } } case li: ListObjectInspector => val unwrapper = unwrapperFor(li.getListElementObjectInspector) data: Any => { if (data != null) { Option(li.getList(data)) .map { l => val values = l.asScala.map(unwrapper).toArray new GenericArrayData(values) } .orNull } else { null } } case mi: MapObjectInspector => val keyUnwrapper = unwrapperFor(mi.getMapKeyObjectInspector) val valueUnwrapper = unwrapperFor(mi.getMapValueObjectInspector) data: Any => { if (data != null) { val map = mi.getMap(data) if (map == null) { null } else { ArrayBasedMapData(map, keyUnwrapper, valueUnwrapper) } } else { null } } // currently, hive doesn't provide the ConstantStructObjectInspector case si: StructObjectInspector => val fields = si.getAllStructFieldRefs.asScala val unwrappers = fields.map { field => val unwrapper = unwrapperFor(field.getFieldObjectInspector) data: Any => unwrapper(si.getStructFieldData(data, field)) } data: Any => { if (data != null) { InternalRow.fromSeq(unwrappers.map(_(data)).toSeq) } else { null } } } /** * Builds unwrappers ahead of time according to object inspector * types to avoid pattern matching and branching costs per row. * * @param field The HiveStructField to create an unwrapper for. * @return A function that performs in-place updating of a MutableRow. * Use the overloaded ObjectInspector version for assignments. */ def unwrapperFor(field: HiveStructField): (Any, InternalRow, Int) => Unit = field.getFieldObjectInspector match { case oi: BooleanObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setBoolean(ordinal, oi.get(value)) case oi: ByteObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setByte(ordinal, oi.get(value)) case oi: ShortObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setShort(ordinal, oi.get(value)) case oi: IntObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setInt(ordinal, oi.get(value)) case oi: LongObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setLong(ordinal, oi.get(value)) case oi: FloatObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setFloat(ordinal, oi.get(value)) case oi: DoubleObjectInspector => (value: Any, row: InternalRow, ordinal: Int) => row.setDouble(ordinal, oi.get(value)) case oi => val unwrapper = unwrapperFor(oi) (value: Any, row: InternalRow, ordinal: Int) => row(ordinal) = unwrapper(value) } def wrap(a: Any, oi: ObjectInspector, dataType: DataType): AnyRef = { wrapperFor(oi, dataType)(a).asInstanceOf[AnyRef] } def wrap( row: InternalRow, wrappers: Array[(Any) => Any], cache: Array[AnyRef], dataTypes: Array[DataType]): Array[AnyRef] = { var i = 0 val length = wrappers.length while (i < length) { cache(i) = wrappers(i)(row.get(i, dataTypes(i))).asInstanceOf[AnyRef] i += 1 } cache } def wrap( row: Seq[Any], wrappers: Array[(Any) => Any], cache: Array[AnyRef], dataTypes: Array[DataType]): Array[AnyRef] = { var i = 0 val length = wrappers.length while (i < length) { cache(i) = wrappers(i)(row(i)).asInstanceOf[AnyRef] i += 1 } cache } /** * @param dataType Catalyst data type * @return Hive java object inspector (recursively), not the Writable ObjectInspector * We can easily map to the Hive built-in object inspector according to the data type. */ def toInspector(dataType: DataType): ObjectInspector = dataType match { case ArrayType(tpe, _) => ObjectInspectorFactory.getStandardListObjectInspector(toInspector(tpe)) case MapType(keyType, valueType, _) => ObjectInspectorFactory.getStandardMapObjectInspector( toInspector(keyType), toInspector(valueType)) case StringType => PrimitiveObjectInspectorFactory.javaStringObjectInspector case IntegerType => PrimitiveObjectInspectorFactory.javaIntObjectInspector case DoubleType => PrimitiveObjectInspectorFactory.javaDoubleObjectInspector case BooleanType => PrimitiveObjectInspectorFactory.javaBooleanObjectInspector case LongType => PrimitiveObjectInspectorFactory.javaLongObjectInspector case FloatType => PrimitiveObjectInspectorFactory.javaFloatObjectInspector case ShortType => PrimitiveObjectInspectorFactory.javaShortObjectInspector case ByteType => PrimitiveObjectInspectorFactory.javaByteObjectInspector case NullType => PrimitiveObjectInspectorFactory.javaVoidObjectInspector case BinaryType => PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector case DateType => PrimitiveObjectInspectorFactory.javaDateObjectInspector case TimestampType => PrimitiveObjectInspectorFactory.javaTimestampObjectInspector case _: DayTimeIntervalType => PrimitiveObjectInspectorFactory.javaHiveIntervalDayTimeObjectInspector case _: YearMonthIntervalType => PrimitiveObjectInspectorFactory.javaHiveIntervalYearMonthObjectInspector // TODO decimal precision? case DecimalType() => PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector case StructType(fields) => ObjectInspectorFactory.getStandardStructObjectInspector( java.util.Arrays.asList(fields.map(f => f.name) : _*), java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) : _*)) case _: UserDefinedType[_] => val sqlType = dataType.asInstanceOf[UserDefinedType[_]].sqlType toInspector(sqlType) } /** * Map the catalyst expression to ObjectInspector, however, * if the expression is `Literal` or foldable, a constant writable object inspector returns; * Otherwise, we always get the object inspector according to its data type(in catalyst) * @param expr Catalyst expression to be mapped * @return Hive java objectinspector (recursively). */ def toInspector(expr: Expression): ObjectInspector = expr match { case Literal(value, StringType) => getStringWritableConstantObjectInspector(value) case Literal(value, IntegerType) => getIntWritableConstantObjectInspector(value) case Literal(value, DoubleType) => getDoubleWritableConstantObjectInspector(value) case Literal(value, BooleanType) => getBooleanWritableConstantObjectInspector(value) case Literal(value, LongType) => getLongWritableConstantObjectInspector(value) case Literal(value, FloatType) => getFloatWritableConstantObjectInspector(value) case Literal(value, ShortType) => getShortWritableConstantObjectInspector(value) case Literal(value, ByteType) => getByteWritableConstantObjectInspector(value) case Literal(value, BinaryType) => getBinaryWritableConstantObjectInspector(value) case Literal(value, DateType) => getDateWritableConstantObjectInspector(value) case Literal(value, TimestampType) => getTimestampWritableConstantObjectInspector(value) case Literal(value, DecimalType()) => getDecimalWritableConstantObjectInspector(value) case Literal(_, NullType) => getPrimitiveNullWritableConstantObjectInspector case Literal(_, _: DayTimeIntervalType) => getHiveIntervalDayTimeWritableConstantObjectInspector case Literal(_, _: YearMonthIntervalType) => getHiveIntervalYearMonthWritableConstantObjectInspector case Literal(value, ArrayType(dt, _)) => val listObjectInspector = toInspector(dt) if (value == null) { ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, null) } else { val list = new java.util.ArrayList[Object]() value.asInstanceOf[ArrayData].foreach(dt, (_, e) => list.add(wrap(e, listObjectInspector, dt))) ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, list) } case Literal(value, MapType(keyType, valueType, _)) => val keyOI = toInspector(keyType) val valueOI = toInspector(valueType) if (value == null) { ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, null) } else { val map = value.asInstanceOf[MapData] val jmap = new java.util.HashMap[Any, Any](map.numElements()) map.foreach(keyType, valueType, (k, v) => jmap.put(wrap(k, keyOI, keyType), wrap(v, valueOI, valueType))) ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, jmap) } case Literal(_, dt: StructType) => toInspector(dt) case Literal(_, dt: UserDefinedType[_]) => toInspector(dt.sqlType) // We will enumerate all of the possible constant expressions, throw exception if we missed case Literal(_, dt) => sys.error(s"Hive doesn't support the constant type [$dt].") // ideally, we don't test the foldable here(but in optimizer), however, some of the // Hive UDF / UDAF requires its argument to be constant objectinspector, we do it eagerly. case _ if expr.foldable => toInspector(Literal.create(expr.eval(), expr.dataType)) // For those non constant expression, map to object inspector according to its data type case _ => toInspector(expr.dataType) } def inspectorToDataType(inspector: ObjectInspector): DataType = inspector match { case s: StructObjectInspector => StructType(s.getAllStructFieldRefs.asScala.map(f => types.StructField( f.getFieldName, inspectorToDataType(f.getFieldObjectInspector), nullable = true) ).toSeq) case l: ListObjectInspector => ArrayType(inspectorToDataType(l.getListElementObjectInspector)) case m: MapObjectInspector => MapType( inspectorToDataType(m.getMapKeyObjectInspector), inspectorToDataType(m.getMapValueObjectInspector)) case _: WritableStringObjectInspector => StringType case _: JavaStringObjectInspector => StringType case _: WritableHiveVarcharObjectInspector => StringType case _: JavaHiveVarcharObjectInspector => StringType case _: WritableHiveCharObjectInspector => StringType case _: JavaHiveCharObjectInspector => StringType case _: WritableIntObjectInspector => IntegerType case _: JavaIntObjectInspector => IntegerType case _: WritableDoubleObjectInspector => DoubleType case _: JavaDoubleObjectInspector => DoubleType case _: WritableBooleanObjectInspector => BooleanType case _: JavaBooleanObjectInspector => BooleanType case _: WritableLongObjectInspector => LongType case _: JavaLongObjectInspector => LongType case _: WritableShortObjectInspector => ShortType case _: JavaShortObjectInspector => ShortType case _: WritableByteObjectInspector => ByteType case _: JavaByteObjectInspector => ByteType case _: WritableFloatObjectInspector => FloatType case _: JavaFloatObjectInspector => FloatType case _: WritableBinaryObjectInspector => BinaryType case _: JavaBinaryObjectInspector => BinaryType case w: WritableHiveDecimalObjectInspector => decimalTypeInfoToCatalyst(w) case j: JavaHiveDecimalObjectInspector => decimalTypeInfoToCatalyst(j) case _: WritableDateObjectInspector => DateType case _: JavaDateObjectInspector => DateType case _: WritableTimestampObjectInspector => TimestampType case _: JavaTimestampObjectInspector => TimestampType case _: WritableHiveIntervalDayTimeObjectInspector => DayTimeIntervalType() case _: JavaHiveIntervalDayTimeObjectInspector => DayTimeIntervalType() case _: WritableHiveIntervalYearMonthObjectInspector => YearMonthIntervalType() case _: JavaHiveIntervalYearMonthObjectInspector => YearMonthIntervalType() case _: WritableVoidObjectInspector => NullType case _: JavaVoidObjectInspector => NullType } private def decimalTypeInfoToCatalyst(inspector: PrimitiveObjectInspector): DecimalType = { val info = inspector.getTypeInfo.asInstanceOf[DecimalTypeInfo] DecimalType(info.precision(), info.scale()) } private def getStringWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.stringTypeInfo, getStringWritable(value)) private def getIntWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.intTypeInfo, getIntWritable(value)) private def getDoubleWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.doubleTypeInfo, getDoubleWritable(value)) private def getBooleanWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.booleanTypeInfo, getBooleanWritable(value)) private def getLongWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.longTypeInfo, getLongWritable(value)) private def getFloatWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.floatTypeInfo, getFloatWritable(value)) private def getShortWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.shortTypeInfo, getShortWritable(value)) private def getByteWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.byteTypeInfo, getByteWritable(value)) private def getBinaryWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.binaryTypeInfo, getBinaryWritable(value)) private def getDateWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.dateTypeInfo, getDateWritable(value)) private def getTimestampWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.timestampTypeInfo, getTimestampWritable(value)) private def getDecimalWritableConstantObjectInspector(value: Any): ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.decimalTypeInfo, getDecimalWritable(value)) private def getPrimitiveNullWritableConstantObjectInspector: ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.voidTypeInfo, null) private def getHiveIntervalDayTimeWritableConstantObjectInspector: ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.intervalDayTimeTypeInfo, null) private def getHiveIntervalYearMonthWritableConstantObjectInspector: ObjectInspector = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.intervalYearMonthTypeInfo, null) private def getStringWritable(value: Any): hadoopIo.Text = if (value == null) null else new hadoopIo.Text(value.asInstanceOf[UTF8String].getBytes) private def getIntWritable(value: Any): hadoopIo.IntWritable = if (value == null) null else new hadoopIo.IntWritable(value.asInstanceOf[Int]) private def getDoubleWritable(value: Any): hiveIo.DoubleWritable = if (value == null) { null } else { new hiveIo.DoubleWritable(value.asInstanceOf[Double]) } private def getBooleanWritable(value: Any): hadoopIo.BooleanWritable = if (value == null) { null } else { new hadoopIo.BooleanWritable(value.asInstanceOf[Boolean]) } private def getLongWritable(value: Any): hadoopIo.LongWritable = if (value == null) null else new hadoopIo.LongWritable(value.asInstanceOf[Long]) private def getFloatWritable(value: Any): hadoopIo.FloatWritable = if (value == null) { null } else { new hadoopIo.FloatWritable(value.asInstanceOf[Float]) } private def getShortWritable(value: Any): hiveIo.ShortWritable = if (value == null) null else new hiveIo.ShortWritable(value.asInstanceOf[Short]) private def getByteWritable(value: Any): hiveIo.ByteWritable = if (value == null) null else new hiveIo.ByteWritable(value.asInstanceOf[Byte]) private def getBinaryWritable(value: Any): hadoopIo.BytesWritable = if (value == null) { null } else { new hadoopIo.BytesWritable(value.asInstanceOf[Array[Byte]]) } private def getDateWritable(value: Any): DaysWritable = if (value == null) { null } else { new DaysWritable(value.asInstanceOf[Int]) } private def getTimestampWritable(value: Any): hiveIo.TimestampWritable = if (value == null) { null } else { new hiveIo.TimestampWritable(DateTimeUtils.toJavaTimestamp(value.asInstanceOf[Long])) } private def getHiveIntervalDayTimeWritable(value: Any): hiveIo.HiveIntervalDayTimeWritable = if (value == null) { null } else { val duration = IntervalUtils.microsToDuration(value.asInstanceOf[Long]) new hiveIo.HiveIntervalDayTimeWritable( new HiveIntervalDayTime(duration.getSeconds, duration.getNano)) } private def getHiveIntervalYearMonthWritable(value: Any): hiveIo.HiveIntervalYearMonthWritable = if (value == null) { null } else { new hiveIo.HiveIntervalYearMonthWritable(new HiveIntervalYearMonth(value.asInstanceOf[Int])) } private def getDecimalWritable(value: Any): hiveIo.HiveDecimalWritable = if (value == null) { null } else { // TODO precise, scale? new hiveIo.HiveDecimalWritable( HiveDecimal.create(value.asInstanceOf[Decimal].toJavaBigDecimal)) } implicit class typeInfoConversions(dt: DataType) { import org.apache.hadoop.hive.serde2.typeinfo._ import TypeInfoFactory._ private def decimalTypeInfo(decimalType: DecimalType): TypeInfo = decimalType match { case DecimalType.Fixed(precision, scale) => new DecimalTypeInfo(precision, scale) case dt => throw new AnalysisException(s"${dt.catalogString} is not supported.") } def toTypeInfo: TypeInfo = dt match { case ArrayType(elemType, _) => getListTypeInfo(elemType.toTypeInfo) case StructType(fields) => getStructTypeInfo( java.util.Arrays.asList(fields.map(_.name): _*), java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo): _*)) case MapType(keyType, valueType, _) => getMapTypeInfo(keyType.toTypeInfo, valueType.toTypeInfo) case BinaryType => binaryTypeInfo case BooleanType => booleanTypeInfo case ByteType => byteTypeInfo case DoubleType => doubleTypeInfo case FloatType => floatTypeInfo case IntegerType => intTypeInfo case LongType => longTypeInfo case ShortType => shortTypeInfo case StringType => stringTypeInfo case d: DecimalType => decimalTypeInfo(d) case DateType => dateTypeInfo case TimestampType => timestampTypeInfo case NullType => voidTypeInfo case _: DayTimeIntervalType => intervalDayTimeTypeInfo case _: YearMonthIntervalType => intervalYearMonthTypeInfo case dt => throw new AnalysisException( s"${dt.catalogString} cannot be converted to Hive TypeInfo") } } }
maropu/spark
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
Scala
apache-2.0
50,979
package lila.history import scala.concurrent.duration._ import scala.math.round import org.joda.time.DateTime import play.api.libs.json._ import lila.rating.{ Glicko, PerfType } import lila.user.{ User, Perfs } final class RatingChartApi( historyApi: HistoryApi, mongoCache: lila.memo.MongoCache.Builder, cacheTtl: FiniteDuration) { def apply(user: User): Fu[Option[String]] = cache(user) map { chart => chart.nonEmpty option chart } private val cache = mongoCache[User, String]( prefix = "history:rating", f = (user: User) => build(user) map (~_), maxCapacity = 64, timeToLive = cacheTtl, keyToString = _.id) private val columns = Json stringify { Json.arr( Json.arr("string", "Date"), Json.arr("number", "Standard"), Json.arr("number", "Opponent Rating"), Json.arr("number", "Average") ) } private def build(user: User): Fu[Option[String]] = { def ratingsMapToJson(perfType: PerfType, ratingsMap: RatingsMap) = Json obj ( "name" -> perfType.name, "points" -> ratingsMap.map { case (days, rating) => val date = user.createdAt plusDays days Json.arr(date.getYear, date.getMonthOfYear - 1, date.getDayOfMonth, rating) } ) historyApi get user.id map2 { (history: History) => Json stringify { Json.toJson { import lila.rating.PerfType._ List(Bullet, Blitz, Classical, Correspondence, Chess960, KingOfTheHill, ThreeCheck, Antichess, Atomic, Horde, Puzzle) map { pt => ratingsMapToJson(pt, history(pt)) } } } } } }
Happy0/lila
modules/history/src/main/RatingChartApi.scala
Scala
mit
1,634
package org.jetbrains.plugins.scala.codeInsight.implicits.menu import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent, CommonDataKeys} import org.jetbrains.plugins.scala.codeInsight.implicits.{Hint, MouseHandler} import org.jetbrains.plugins.scala.extensions.inWriteCommandAction class RemoveExplicitArguments extends AnAction { override def actionPerformed(e: AnActionEvent): Unit = { val editor = e.getData(CommonDataKeys.EDITOR) val model = editor.getInlayModel val inlay = model.getElementAt(MouseHandler.mousePressLocation) val element = Hint.elementOf(inlay) inWriteCommandAction(element.getParent.replace(element.getPrevSibling))(editor.getProject) inlay.dispose() } }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInsight/implicits/menu/RemoveExplicitArguments.scala
Scala
apache-2.0
719
package io.getquill import io.getquill.context.sql.idiom.SqlIdiom import io.getquill.context.sql.idiom.QuestionMarkBindVariables import io.getquill.context.sql.idiom.ConcatSupport trait MirrorSqlDialect extends SqlIdiom with QuestionMarkBindVariables with ConcatSupport object MirrorSqlDialect extends MirrorSqlDialect { override def prepareForProbing(string: String) = string }
mentegy/quill
quill-sql/src/main/scala/io/getquill/MirrorSqlDialect.scala
Scala
apache-2.0
390
package aims.samples import aims.MicroServiceSystem import aims.samples.services.CouponOperationResource import akka.actor.ActorSystem import akka.stream.FlowMaterializer import akka.util.Timeout import scala.concurrent.duration._ /** * Component: * Description: * Date: 2014/12/26 * @author Andy Ai */ object Main extends App { implicit val system = ActorSystem("aims") implicit val materializer = FlowMaterializer() implicit val timeout: Timeout = 5000.millis private val service = new MicroServiceSystem(new CouponOperationResource().resources()) service.start("localhost", port = 8080) }
aiyanbo/aims
aims-samples/src/main/scala/aims/samples/Main.scala
Scala
mit
612
package chandu0101.scalajs.react.components.demo.util /** * Created by chandrasekharkode . */ object SampleData { val personJson = """ |[{"fname":"Joshua","lname":"Myers","email":"[email protected]","country":"France"}, |{"fname":"Gloria","lname":"Porter","email":"[email protected]","country":"Indonesia"}, |{"fname":"Joe","lname":"Elliott","email":"[email protected]","country":"Brazil"}, |{"fname":"Larry","lname":"Henry","email":"[email protected]","country":"Philippines"}, |{"fname":"Frances","lname":"Roberts","email":"[email protected]","country":"Mexico"}, |{"fname":"Ashley","lname":"Turner","email":"[email protected]","country":"Brazil"}, |{"fname":"Jeremy","lname":"Morris","email":"[email protected]","country":"China"}, |{"fname":"Todd","lname":"Carter","email":"[email protected]","country":"Peru"}, |{"fname":"Antonio","lname":"Hart","email":"[email protected]","country":"Brazil"}, |{"fname":"Henry","lname":"Welch","email":"[email protected]","country":"Paraguay"}, |{"fname":"Russell","lname":"Carr","email":"[email protected]","country":"China"}, |{"fname":"Jean","lname":"Rivera","email":"[email protected]","country":"Bulgaria"}, |{"fname":"Sara","lname":"Chavez","email":"[email protected]","country":"South Africa"}, |{"fname":"Gerald","lname":"Arnold","email":"[email protected]","country":"Thailand"}, |{"fname":"Bruce","lname":"Rice","email":"[email protected]","country":"China"}, |{"fname":"Barbara","lname":"Parker","email":"[email protected]","country":"China"}, |{"fname":"Philip","lname":"Griffin","email":"[email protected]","country":"China"}, |{"fname":"Adam","lname":"Mason","email":"[email protected]","country":"Pakistan"}, |{"fname":"Juan","lname":"Johnston","email":"[email protected]","country":"Tunisia"}, |{"fname":"Robin","lname":"Clark","email":"[email protected]","country":"Brazil"}, |{"fname":"Martha","lname":"Bryant","email":"[email protected]","country":"Chile"}, |{"fname":"Fred","lname":"Carter","email":"[email protected]","country":"Bangladesh"}, |{"fname":"Norma","lname":"Hughes","email":"[email protected]","country":"Indonesia"}, |{"fname":"Cheryl","lname":"Garcia","email":"[email protected]","country":"Croatia"}, |{"fname":"Brenda","lname":"Hall","email":"[email protected]","country":"Russia"}, |{"fname":"Anthony","lname":"Lee","email":"[email protected]","country":"Kenya"}, |{"fname":"Marie","lname":"Shaw","email":"[email protected]","country":"Philippines"}, |{"fname":"Terry","lname":"Knight","email":"[email protected]","country":"Brazil"}, |{"fname":"Todd","lname":"Murray","email":"[email protected]","country":"Finland"}, |{"fname":"Sean","lname":"Hudson","email":"[email protected]","country":"Peru"}, |{"fname":"Robert","lname":"Garrett","email":"[email protected]","country":"Thailand"}, |{"fname":"Louis","lname":"Thomas","email":"[email protected]","country":"Philippines"}, |{"fname":"Stephanie","lname":"Sanchez","email":"[email protected]","country":"Argentina"}, |{"fname":"James","lname":"Young","email":"[email protected]","country":"Argentina"}, |{"fname":"Stephen","lname":"Miller","email":"[email protected]","country":"Russia"}, |{"fname":"Shirley","lname":"Rodriguez","email":"[email protected]","country":"Philippines"}, |{"fname":"Daniel","lname":"Rivera","email":"[email protected]","country":"Russia"}, |{"fname":"Steven","lname":"Morrison","email":"[email protected]","country":"Indonesia"}, |{"fname":"Julia","lname":"Gonzalez","email":"[email protected]","country":"Poland"}, |{"fname":"Randy","lname":"Ramirez","email":"[email protected]","country":"Hungary"}, |{"fname":"Jimmy","lname":"Hill","email":"[email protected]","country":"China"}, |{"fname":"Robert","lname":"Reed","email":"[email protected]","country":"China"}, |{"fname":"Gerald","lname":"Burke","email":"[email protected]","country":"Nicaragua"}, |{"fname":"Dorothy","lname":"Moreno","email":"[email protected]","country":"Indonesia"}, |{"fname":"Roger","lname":"Franklin","email":"[email protected]","country":"Sweden"}, |{"fname":"Billy","lname":"Wilson","email":"[email protected]","country":"Philippines"}, |{"fname":"Anna","lname":"Warren","email":"[email protected]","country":"Poland"}, |{"fname":"Brandon","lname":"Lawson","email":"[email protected]","country":"Libya"}, |{"fname":"Paula","lname":"Gonzales","email":"[email protected]","country":"Norway"}, |{"fname":"Judith","lname":"Hamilton","email":"[email protected]","country":"Madagascar"}, |{"fname":"Jonathan","lname":"Pierce","email":"[email protected]","country":"Tajikistan"}, |{"fname":"Carlos","lname":"Carpenter","email":"[email protected]","country":"Senegal"}, |{"fname":"Carolyn","lname":"Dean","email":"[email protected]","country":"Brazil"}, |{"fname":"Betty","lname":"Harper","email":"[email protected]","country":"Russia"}, |{"fname":"Margaret","lname":"Lewis","email":"[email protected]","country":"Finland"}, |{"fname":"Jean","lname":"Ellis","email":"[email protected]","country":"Bulgaria"}, |{"fname":"Janet","lname":"Hunt","email":"[email protected]","country":"Indonesia"}, |{"fname":"Amy","lname":"Burns","email":"[email protected]","country":"Tunisia"}, |{"fname":"Nicole","lname":"Fowler","email":"[email protected]","country":"Indonesia"}, |{"fname":"Kelly","lname":"Rice","email":"[email protected]","country":"Thailand"}, |{"fname":"Bobby","lname":"Ramos","email":"[email protected]","country":"Pakistan"}, |{"fname":"Timothy","lname":"Spencer","email":"[email protected]","country":"Ireland"}, |{"fname":"Brandon","lname":"Stewart","email":"[email protected]","country":"Brazil"}, |{"fname":"Janet","lname":"Coleman","email":"[email protected]","country":"Poland"}, |{"fname":"Terry","lname":"Medina","email":"[email protected]","country":"Brazil"}, |{"fname":"David","lname":"Rivera","email":"[email protected]","country":"Poland"}, |{"fname":"Amy","lname":"Lee","email":"[email protected]","country":"China"}, |{"fname":"Brandon","lname":"Sullivan","email":"[email protected]","country":"Russia"}, |{"fname":"Charles","lname":"Rivera","email":"[email protected]","country":"Portugal"}, |{"fname":"Henry","lname":"Moreno","email":"[email protected]","country":"Russia"}, |{"fname":"Janet","lname":"Mason","email":"[email protected]","country":"Afghanistan"}, |{"fname":"Judy","lname":"Porter","email":"[email protected]","country":"Netherlands"}, |{"fname":"Bruce","lname":"Diaz","email":"[email protected]","country":"Indonesia"}, |{"fname":"Russell","lname":"Fields","email":"[email protected]","country":"Indonesia"}, |{"fname":"Jose","lname":"Wagner","email":"[email protected]","country":"Czech Republic"}, |{"fname":"Joyce","lname":"Wells","email":"[email protected]","country":"Indonesia"}, |{"fname":"Debra","lname":"Sims","email":"[email protected]","country":"France"}, |{"fname":"Lois","lname":"Evans","email":"[email protected]","country":"Venezuela"}, |{"fname":"Juan","lname":"Hart","email":"[email protected]","country":"China"}, |{"fname":"Joe","lname":"Ruiz","email":"[email protected]","country":"Vietnam"}, |{"fname":"Nicole","lname":"Powell","email":"[email protected]","country":"Indonesia"}, |{"fname":"Sandra","lname":"Elliott","email":"[email protected]","country":"Indonesia"}, |{"fname":"Ernest","lname":"West","email":"[email protected]","country":"Czech Republic"}, |{"fname":"Philip","lname":"Garrett","email":"[email protected]","country":"France"}, |{"fname":"Tammy","lname":"Jenkins","email":"[email protected]","country":"Russia"}, |{"fname":"Andrew","lname":"Murphy","email":"[email protected]","country":"Indonesia"}, |{"fname":"Melissa","lname":"Campbell","email":"[email protected]","country":"China"}, |{"fname":"Catherine","lname":"Nichols","email":"[email protected]","country":"Nigeria"}, |{"fname":"Brandon","lname":"Black","email":"[email protected]","country":"Netherlands"}, |{"fname":"Louise","lname":"Woods","email":"[email protected]","country":"China"}, |{"fname":"Benjamin","lname":"Kelly","email":"[email protected]","country":"China"}, |{"fname":"Carolyn","lname":"Powell","email":"[email protected]","country":"Portugal"}, |{"fname":"Lillian","lname":"Perkins","email":"[email protected]","country":"Slovenia"}, |{"fname":"Patrick","lname":"Morales","email":"[email protected]","country":"China"}, |{"fname":"Thomas","lname":"Kelley","email":"[email protected]","country":"Ukraine"}, |{"fname":"James","lname":"Smith","email":"[email protected]","country":"Kenya"}, |{"fname":"Diana","lname":"Willis","email":"[email protected]","country":"Thailand"}, |{"fname":"Marie","lname":"Simmons","email":"[email protected]","country":"Brazil"}, |{"fname":"Samuel","lname":"Brown","email":"[email protected]","country":"United Kingdom"}, |{"fname":"Carlos","lname":"Chapman","email":"[email protected]","country":"Indonesia"}] """.stripMargin val personWithAgeJson = """ |[{"_id":1,"fname":"Catherine","lname":"Bennett","age":81,"email":"[email protected]","country":"Czech Republic"}, |{"_id":2,"fname":"Johnny","lname":"Fields","age":26,"email":"[email protected]","country":"Indonesia"}, |{"_id":3,"fname":"Ernest","lname":"Smith","age":65,"email":"[email protected]","country":"Poland"}, |{"_id":4,"fname":"Judith","lname":"Peterson","age":51,"email":"[email protected]","country":"Indonesia"}, |{"_id":5,"fname":"Norma","lname":"Carroll","age":76,"email":"[email protected]","country":"China"}, |{"_id":6,"fname":"Kathryn","lname":"Thompson","age":82,"email":"[email protected]","country":"China"}, |{"_id":7,"fname":"John","lname":"Warren","age":52,"email":"[email protected]","country":"Portugal"}, |{"_id":8,"fname":"Adam","lname":"Mitchell","age":13,"email":"[email protected]","country":"Russia"}, |{"_id":9,"fname":"Wanda","lname":"Watson","age":65,"email":"[email protected]","country":"Iran"}, |{"_id":10,"fname":"Catherine","lname":"Rivera","age":44,"email":"[email protected]","country":"Russia"}, |{"_id":11,"fname":"Irene","lname":"Gonzalez","age":70,"email":"[email protected]","country":"Croatia"}, |{"_id":12,"fname":"Ryan","lname":"Larson","age":94,"email":"[email protected]","country":"China"}, |{"_id":13,"fname":"David","lname":"Sims","age":13,"email":"[email protected]","country":"Malaysia"}, |{"_id":14,"fname":"Heather","lname":"Turner","age":18,"email":"[email protected]","country":"Chile"}, |{"_id":15,"fname":"Alice","lname":"Moreno","age":100,"email":"[email protected]","country":"China"}, |{"_id":16,"fname":"Karen","lname":"Howard","age":78,"email":"[email protected]","country":"Austria"}, |{"_id":17,"fname":"Paula","lname":"Lopez","age":67,"email":"[email protected]","country":"Poland"}, |{"_id":18,"fname":"Helen","lname":"Warren","age":14,"email":"[email protected]","country":"Ivory Coast"}, |{"_id":19,"fname":"Kathryn","lname":"Mcdonald","age":78,"email":"[email protected]","country":"China"}, |{"_id":20,"fname":"Jimmy","lname":"Morris","age":9,"email":"[email protected]","country":"Vietnam"}, |{"_id":21,"fname":"Johnny","lname":"Burton","age":47,"email":"[email protected]","country":"China"}, |{"_id":22,"fname":"Brandon","lname":"Alvarez","age":44,"email":"[email protected]","country":"Greece"}, |{"_id":23,"fname":"Alice","lname":"Lawrence","age":16,"email":"[email protected]","country":"China"}, |{"_id":24,"fname":"Philip","lname":"Gonzalez","age":99,"email":"[email protected]","country":"Portugal"}, |{"_id":25,"fname":"Ernest","lname":"Campbell","age":70,"email":"[email protected]","country":"Sweden"}, |{"_id":26,"fname":"Martin","lname":"Wood","age":73,"email":"[email protected]","country":"Indonesia"}, |{"_id":27,"fname":"Lois","lname":"Bryant","age":69,"email":"[email protected]","country":"Sweden"}, |{"_id":28,"fname":"Sharon","lname":"Freeman","age":5,"email":"[email protected]","country":"Russia"}, |{"_id":29,"fname":"Diane","lname":"Lawson","age":44,"email":"[email protected]","country":"China"}, |{"_id":30,"fname":"Alice","lname":"Fields","age":47,"email":"[email protected]","country":"Portugal"}, |{"_id":31,"fname":"Alice","lname":"Reyes","age":39,"email":"[email protected]","country":"China"}, |{"_id":32,"fname":"Margaret","lname":"James","age":73,"email":"[email protected]","country":"Indonesia"}, |{"_id":33,"fname":"Martha","lname":"Robinson","age":78,"email":"[email protected]","country":"Poland"}, |{"_id":34,"fname":"Alice","lname":"Lawson","age":75,"email":"[email protected]","country":"Canada"}, |{"_id":35,"fname":"Bruce","lname":"Chavez","age":57,"email":"[email protected]","country":"China"}, |{"_id":36,"fname":"Sara","lname":"Kelly","age":66,"email":"[email protected]","country":"Canada"}, |{"_id":37,"fname":"Carol","lname":"Robertson","age":78,"email":"[email protected]","country":"Indonesia"}, |{"_id":38,"fname":"Brenda","lname":"Hamilton","age":14,"email":"[email protected]","country":"Thailand"}, |{"_id":39,"fname":"Harold","lname":"Patterson","age":35,"email":"[email protected]","country":"Armenia"}, |{"_id":40,"fname":"Ryan","lname":"Lopez","age":68,"email":"[email protected]","country":"China"}, |{"_id":41,"fname":"Ralph","lname":"Russell","age":97,"email":"[email protected]","country":"Indonesia"}, |{"_id":42,"fname":"Lisa","lname":"Armstrong","age":13,"email":"[email protected]","country":"Palestinian Territory"}, |{"_id":43,"fname":"Daniel","lname":"Harris","age":68,"email":"[email protected]","country":"Vietnam"}, |{"_id":44,"fname":"Timothy","lname":"Harper","age":18,"email":"[email protected]","country":"Poland"}, |{"_id":45,"fname":"Jerry","lname":"Weaver","age":56,"email":"[email protected]","country":"Peru"}, |{"_id":46,"fname":"Mary","lname":"Williamson","age":73,"email":"[email protected]","country":"China"}, |{"_id":47,"fname":"Donald","lname":"Chapman","age":40,"email":"[email protected]","country":"Brazil"}, |{"_id":48,"fname":"Randy","lname":"Mccoy","age":95,"email":"[email protected]","country":"China"}, |{"_id":49,"fname":"Stephanie","lname":"Bennett","age":43,"email":"[email protected]","country":"Greece"}, |{"_id":50,"fname":"Mildred","lname":"Moore","age":67,"email":"[email protected]","country":"Indonesia"}, |{"_id":51,"fname":"Lisa","lname":"George","age":70,"email":"[email protected]","country":"Czech Republic"}, |{"_id":52,"fname":"Emily","lname":"Myers","age":7,"email":"[email protected]","country":"China"}, |{"_id":53,"fname":"Andrew","lname":"Wilson","age":42,"email":"[email protected]","country":"Croatia"}, |{"_id":54,"fname":"Alice","lname":"Morris","age":29,"email":"[email protected]","country":"Somalia"}, |{"_id":55,"fname":"Roger","lname":"Carroll","age":37,"email":"[email protected]","country":"New Caledonia"}, |{"_id":56,"fname":"Gerald","lname":"Parker","age":74,"email":"[email protected]","country":"China"}, |{"_id":57,"fname":"Alan","lname":"Warren","age":27,"email":"[email protected]","country":"China"}, |{"_id":58,"fname":"Kelly","lname":"Morris","age":84,"email":"[email protected]","country":"Iran"}, |{"_id":59,"fname":"Joe","lname":"Green","age":56,"email":"[email protected]","country":"Guatemala"}, |{"_id":60,"fname":"Cheryl","lname":"Vasquez","age":92,"email":"[email protected]","country":"Tunisia"}, |{"_id":61,"fname":"Jeffrey","lname":"Carpenter","age":4,"email":"[email protected]","country":"China"}, |{"_id":62,"fname":"Frances","lname":"Palmer","age":42,"email":"[email protected]","country":"Philippines"}, |{"_id":63,"fname":"Roger","lname":"Bryant","age":47,"email":"[email protected]","country":"Finland"}, |{"_id":64,"fname":"Juan","lname":"Hernandez","age":64,"email":"[email protected]","country":"Sweden"}, |{"_id":65,"fname":"Bruce","lname":"Stewart","age":94,"email":"[email protected]","country":"Japan"}, |{"_id":66,"fname":"Fred","lname":"Hunter","age":81,"email":"[email protected]","country":"China"}, |{"_id":67,"fname":"Ernest","lname":"Henderson","age":64,"email":"[email protected]","country":"Argentina"}, |{"_id":68,"fname":"Anne","lname":"Martin","age":11,"email":"[email protected]","country":"China"}, |{"_id":69,"fname":"Roger","lname":"Washington","age":50,"email":"[email protected]","country":"China"}, |{"_id":70,"fname":"Mildred","lname":"Andrews","age":64,"email":"[email protected]","country":"Portugal"}, |{"_id":71,"fname":"Susan","lname":"Martin","age":82,"email":"[email protected]","country":"Aland Islands"}, |{"_id":72,"fname":"Debra","lname":"Ford","age":9,"email":"[email protected]","country":"China"}, |{"_id":73,"fname":"Shawn","lname":"Nguyen","age":16,"email":"[email protected]","country":"Costa Rica"}, |{"_id":74,"fname":"Jose","lname":"Jordan","age":74,"email":"[email protected]","country":"Colombia"}, |{"_id":75,"fname":"Amy","lname":"Bell","age":71,"email":"[email protected]","country":"Yemen"}, |{"_id":76,"fname":"Patrick","lname":"Hansen","age":45,"email":"[email protected]","country":"China"}, |{"_id":77,"fname":"Anna","lname":"Olson","age":26,"email":"[email protected]","country":"Mongolia"}, |{"_id":78,"fname":"Eugene","lname":"Ruiz","age":58,"email":"[email protected]","country":"United States"}, |{"_id":79,"fname":"Catherine","lname":"Lynch","age":13,"email":"[email protected]","country":"China"}, |{"_id":80,"fname":"Kimberly","lname":"Hernandez","age":65,"email":"[email protected]","country":"Albania"}, |{"_id":81,"fname":"Heather","lname":"Jackson","age":95,"email":"[email protected]","country":"China"}, |{"_id":82,"fname":"Joshua","lname":"Wheeler","age":20,"email":"[email protected]","country":"China"}, |{"_id":83,"fname":"Joseph","lname":"Hayes","age":87,"email":"[email protected]","country":"Albania"}, |{"_id":84,"fname":"Maria","lname":"Robinson","age":43,"email":"[email protected]","country":"Botswana"}, |{"_id":85,"fname":"Carl","lname":"Alvarez","age":70,"email":"[email protected]","country":"China"}, |{"_id":86,"fname":"Doris","lname":"Young","age":15,"email":"[email protected]","country":"France"}, |{"_id":87,"fname":"Johnny","lname":"Anderson","age":53,"email":"[email protected]","country":"China"}, |{"_id":88,"fname":"Frank","lname":"Day","age":54,"email":"[email protected]","country":"Indonesia"}, |{"_id":89,"fname":"Donna","lname":"Perez","age":22,"email":"[email protected]","country":"Indonesia"}, |{"_id":90,"fname":"Nicholas","lname":"Miller","age":70,"email":"[email protected]","country":"Peru"}, |{"_id":91,"fname":"Rachel","lname":"Reed","age":81,"email":"[email protected]","country":"Indonesia"}, |{"_id":92,"fname":"Cheryl","lname":"Berry","age":31,"email":"[email protected]","country":"Russia"}, |{"_id":93,"fname":"Christopher","lname":"Gray","age":41,"email":"[email protected]","country":"China"}, |{"_id":94,"fname":"Lois","lname":"Ryan","age":63,"email":"[email protected]","country":"China"}, |{"_id":95,"fname":"Earl","lname":"Garza","age":65,"email":"[email protected]","country":"China"}, |{"_id":96,"fname":"Martin","lname":"Ferguson","age":40,"email":"[email protected]","country":"Taiwan"}, |{"_id":97,"fname":"Janet","lname":"Harris","age":41,"email":"[email protected]","country":"Hungary"}, |{"_id":98,"fname":"Alice","lname":"Powell","age":48,"email":"[email protected]","country":"Brazil"}, |{"_id":99,"fname":"Kathy","lname":"Wagner","age":51,"email":"[email protected]","country":"China"}, |{"_id":100,"fname":"Debra","lname":"Cooper","age":17,"email":"[email protected]","country":"Indonesia"}] """.stripMargin }
coreyauger/scalajs-react-components
demo/src/main/scala/chandu0101/scalajs/react/components/demo/util/SampleData.scala
Scala
apache-2.0
20,963
package com.geeksville.json import org.json4s.CustomSerializer import java.util.UUID import org.json4s.JsonAST._ object UUIDSerializer extends CustomSerializer[UUID](format => ( { case JString(s) => UUID.fromString(s) }, { case x: UUID => JString(x.toString) }))
dronekit/dronekit-server
src/main/scala/com/geeksville/json/UUIDSerializer.scala
Scala
gpl-3.0
292
package com.tozny import java.util.Date import org.apache.commons.codec.binary.Base64.decodeBase64 import play.api.libs.json.Json.toJson import play.api.libs.json.{JsObject, JsValue, Reads, Writes} import scala.concurrent.{Await, Future, ExecutionContext} import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} class Realm( val realmKeyId: String, val realmSecret: String, val apiUrl: String = sys.env("API_URL") ) { import Protocol.asTry val rawCall = Protocol.sendRequest( apiUrl, realmKeyId, realmSecret, _: String, _: JsObject ) /** * We have received a signed package and signature - let's verify it. */ def verifyLogin(signedData: String, signature: String): Try[Login] = { if (Protocol.checkSignature(realmSecret, signature, signedData)) { Protocol.decode[Login](signedData) } else { new Failure(InvalidSignature("invalid signature")) } } def checkValidLogin(userId: String, sessionId: String, expiresAt: Date ): Try[Boolean] = { val resp = rawCall("realm.check_valid_login", new JsObject(Seq( "user_id" -> toJson(userId), "session_id" -> toJson(sessionId), "expires_at" -> toJson(Protocol.encodeTime(expiresAt)) ))) for { r <- resp ret <- asTry((r \ "return").validate[String]) } yield ret == "true" } /** * Sends a question challenge - optionally directed to a specific user. * * Fields returned with the response include: * - challenge * - realm_key_id * - session_id, * - qr_url * - mobile_url * - created_at * - presence */ def questionChallenge[A,B]( question: A, userId: String )(implicit w: Writes[A], r: Reads[B]): Try[B] = { val params = new JsObject(Seq("question" -> toJson(question))) for { json <- rawCall("realm.question_challenge", params) result <- asTry(json.validate[B]) } yield result } def userGet(userId: String): Try[ToznyUser] = { val resp = rawCall("realm.user_get", new JsObject(Seq( "user_id" -> toJson(userId) ))) for { r <- resp user <- asTry((r \ "results").validate[ToznyUser]) } yield user } }
tozny/sdk-scala
src/main/scala/com/tozny/Realm.scala
Scala
apache-2.0
2,190
package parsers.exercises import scala.util.parsing.combinator.JavaTokenParsers trait Exercise3 extends JavaTokenParsers { def alt[A](p1: => Parser[A], p2: => Parser[A]): Parser[A] = sys.error("TODO") }
julienrf/scala-lessons
highlights/parsers/code/src/main/scala/parsers/exercises/Exercise3.scala
Scala
mit
208
package repositories.musitobject.dao object SearchFieldValues { sealed trait IntervalBoundary {} case class Value(v: Long) extends IntervalBoundary case class Infinite() extends IntervalBoundary def boundaryAsString(intervalBoundary: IntervalBoundary) = { intervalBoundary match { case Value(v) => v.toString() case Infinite() => "" } } //case class IntervalValue(from: IntervalBoundary, to: IntervalBoundary) sealed trait FieldValue { val v: String } case class EmptyValue() extends FieldValue { override val v: String = "" } case class LiteralValue(v: String) extends FieldValue /** * If v contains a value which needs to be escaped, escapeChar contains the * appropriate escape character. If v doesn't contains a value which needs to be * escaped with the given escapeChar. */ case class WildcardValue(v: String, escapeChar: Char) extends FieldValue case class IntervalValue(from: IntervalBoundary, to: IntervalBoundary) extends FieldValue { override val v: String = boundaryAsString(from) + ".." + boundaryAsString(to) } }
MUSIT-Norway/musit
service_backend/app/repositories/musitobject/dao/SearchFieldValues.scala
Scala
gpl-2.0
1,124
package text.vector import util.Config /** * @author ynupc * Created on 2016/05/22 */ object VectorType extends Enumeration { val None, Binary, Frequency = Value def get: VectorType.Value = { if (Config.isFrequencyOtherwiseBinary) { Frequency } else { Binary } } }
ynupc/scalastringcourseday6
src/main/scala/text/vector/VectorType.scala
Scala
apache-2.0
315
package org.scalajs.openui5.sap.ui.core import org.scalajs.dom import org.scalajs.openui5.sap.ui.base.Object import scala.scalajs.js import scala.scalajs.js.annotation.JSName import scala.scalajs.js.| @JSName("sap.ui.core.RenderManager") @js.native /** RenderManager that will take care for rendering Controls. * * The RenderManager will be available from the sap.ui.core.Core instance (available via sap.ui.getCore()). * It can be used to render Controls and Control-Trees. * * The convention for renderers belonging to some controls is the following: * * - for a Control e.g. sap.ui.controls.InputField there shall be * - a renderer named sap.ui.controls.InputFieldRenderer */ trait RenderManager extends Object { /** Adds a class to the class collection if the name is not empty or null. * * The class collection is flushed if it is written to the buffer using * [[writeClasses]]. * * @param name name of the class to be added; null values are ignored * @return this render manager instance to allow chaining */ def addClass(name: String): this.type = js.native /** Adds a style property to the style collection if the value is not empty or null. * * The style collection is flushed if it is written to the buffer using * [[writeStyles]]. * * @param name * @param value * @return */ def addStyle(name: String, value: String): this.type = js.native /** Cleans up the rendering state of the given control with rendering it. * * A control is responsible for the rendering of all its child controls. But * in some cases it makes sense that a control does not render all its * children based on a filter condition. For example a Carousel control only * renders the current visible parts (and maybe some parts before and after * the visible area) for performance reasons. If a child was rendered but * should not be rendered anymore because the filter condition does not apply * anymore this child must be cleaned up correctly (e.g deregistering * eventhandlers, ...). * * The following example shows how [[renderControl]] and * [[cleanupControlWithoutRendering]] should be used: * * {{{ * render = function(rm, ctrl){ * //... * var aAggregatedControls = * //... * for(var i=0; i * }}} * * @note The method does not remove DOM of the given control. * The callee of this method has to take over the responsibility to * cleanup the DOM of the control afterwards. For parents which are * rendered with the normal mechanism as shown in the example above * this requirement is fulfilled, because the control is not added to * the rendering buffer (renderControl is not called) and the DOM is * replaced when the rendering cycle is finalized. * @param control the control that should be cleaned up */ def cleanupControlWithoutRendering(control: Control): Unit = js.native /** Renders the content of the rendering buffer into the provided DOMNode. * * This function must not be called within control renderers. * * ==Usage== * {{{ * // Create a new instance of the RenderManager * var rm = sap.ui.getCore().createRenderManager(); * // Use the writer API to fill the buffers * rm.write(...); * rm.renderControl(oControl); * rm.write(...); * ... * // Finally flush the buffer into the provided DOM node (The current content is removed) * rm.flush(oDomNode); * // If the instance is not needed anymore, destroy it * rm.destroy(); * }}} * * @param targetDomNode The node in the dom where the buffer should be flushed into. * @param doNotPreserve flag, whether to not preserve (true) the content or to preserve it (false). * @param insert flag, whether to append (true) or replace (false) the buffer of the target dom node or to insert at a certain position (int) */ def flush(targetDomNode: dom.Element, doNotPreserve: Boolean, insert: Boolean | Int): Unit = js.native /** Returns the configuration object Shortcut for `sap.ui.getCore().getConfiguration()` * * @return the configuration object */ def getConfiguration(): Configuration = js.native /** Returns the renderer class for a given control instance. * * @param control the control that should be rendered * @return renderer for control. */ def getRenderer(control: Control): Renderer = js.native /** Renders the given control to the provided DOMNode. * * If the control is already rendered in the provided DOMNode the DOM of the * control is replaced. If the control is already rendered somewhere else * the current DOM of the control is removed and the new DOM is appended to * the provided DOMNode. * * This function must not be called within control renderers. * * @param control the [[Control]] that should be rendered. * @param targetDomNode the node in the DOM where the result of the rendering * should be inserted. */ def render(control: Control, targetDomNode: dom.Element): Unit = js.native /** Turns the given control into its HTML representation and appends it to the * rendering buffer. * * If the given control is undefined or null, then nothing is rendered. * * @param control the control that should be rendered. */ def renderControl(control: Control): Unit = js.native /** Write the given texts to the buffer. * * @param text (can be a number too) * @return this render manager instance to allow chaining */ def write(text: String | Int): this.type = js.native /** Writes the attribute and its value into the HTML. * * @param name the name of the attribute * @param value the value of the attribute * @return this render manager instance to allow chaining */ def writeAttribute(name: String, value: String | Int | Boolean): this.type = js.native /** Writes the attribute and its value into the HTML. * * The value is properly escaped to avoid XSS attacks. * * @param name the name of the attribute * @param value the value of the attribute * @return this render manager instance to allow chaining */ def writeAttributeEscaped(name: String, value: js.Any): this.type = js.native /** Writes and flushes the class collection. * * Writes and flushes the class collection (all CSS classes added by * "addClass()" since the last flush). Also writes the custom style classes * added by the application with "addStyleClass(...)". Custom classes are * added by default from the currently rendered control. If an oElement is * given, this Element's custom style classes are added instead. * If oElement === false, no custom style classes are added. * * @param element an Element from which to add custom style classes (instead * of adding from the control itself) * @return this render manager instance to allow chaining */ def writeClasses(element: js.UndefOr[Element | Boolean] = js.undefined): this.type = js.native /** Writes the controls data into the HTML. * * Control Data consists of at least the id of a control. * * @param control the control whose identifying information should be written * to the buffer * @return this render manager instance to allow chaining */ def writeControlData(control: Control): this.type = js.native /** Writes the elements data into the HTML. * * Element Data consists at least of the id of a element. * * @param element the element whose identifying information should be written * to the buffer * @return this render manager instance to allow chaining */ def writeElementData(element: Element): this.type = js.native /** Escape text for HTML and write it to the buffer. * * @param text text to escape * @param lineBreaks whether to convert linebreaks into tags * @return this render manager instance to allow chaining */ def writeEscaped(text: String, lineBreaks: Boolean): this.type = js.native /** Writes necessary invisible control/element placeholder data into the HTML. * * Controls should only use this method if they can't live with the standard * 'visible=false' implementation of the RenderManager which renders dummy * HTMLSpanElement for better re-rendering performance. Even though HTML5 * error tolerance accepts this for most of the cases and these dummy * elements are not in the render tree of the Browser, controls may need to * generate a valid and semantic HTML output when the rendered * HTMLSpanElement is not an allowed element(e.g. `<span>` element within * the `<tr>` or `<li>` group). * * The caller needs to start an opening HTML tag, then call this method, then * complete the opening and closing tag. * * {{{ * oRenderManager.write(""); * }}} * * @param element an instance of [[Element]] * @return this render manager instance to allow chaining */ def writeInvisiblePlaceholderData(element: Element): this.type = js.native /** Writes and flushes the style collection. * * @return this render manager instance to allow chaining */ def writeStyles(): this.type = js.native }
lastsys/scalajs-openui5
src/main/scala/org/scalajs/openui5/sap/ui/core/RenderManager.scala
Scala
mit
9,468
/** * Copyright 2011-2012 eBusiness Information, Groupe Excilys (www.excilys.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.excilys.ebi.gatling.http.check.after import com.excilys.ebi.gatling.core.check.{ ExtractorFactory, MatcherCheckBuilder } import com.excilys.ebi.gatling.core.session.NOOP_EVALUATABLE_STRING import com.excilys.ebi.gatling.http.check.{ HttpCheck, HttpExtractorCheckBuilder } import com.excilys.ebi.gatling.http.request.HttpPhase.AfterResponseReceived import com.excilys.ebi.gatling.http.response.ExtendedResponse /** * HttpBodyesponseTimeCheckBuilder class companion * * It contains DSL definitions */ object HttpBodyResponseTimeCheckBuilder { private val findExtendedResponseTimeExtractorFactory: ExtractorFactory[ExtendedResponse, String, Long] = (response: ExtendedResponse) => (expression: String) => Some(response.reponseTimeInMillis) private val findLatencyExtractorFactory: ExtractorFactory[ExtendedResponse, String, Long] = (response: ExtendedResponse) => (expression: String) => Some(response.latencyInMillis) val responseTimeInMillis = new HttpBodyResponseTimeCheckBuilder(findExtendedResponseTimeExtractorFactory) val latencyInMillis = new HttpBodyResponseTimeCheckBuilder(findLatencyExtractorFactory) } /** * This class builds a response time check */ class HttpBodyResponseTimeCheckBuilder(factory: ExtractorFactory[ExtendedResponse, String, Long]) extends HttpExtractorCheckBuilder[Long, String](NOOP_EVALUATABLE_STRING, AfterResponseReceived) { def find = new MatcherCheckBuilder[HttpCheck[String], ExtendedResponse, String, Long](httpCheckBuilderFactory, factory) }
Tjoene/thesis
Case_Programs/gatling-1.4.0/gatling-http/src/main/scala/com/excilys/ebi/gatling/http/check/after/HttpBodyResponseTimeCheckBuilder.scala
Scala
gpl-2.0
2,152
/* * Copyright 2015-2016 IBM Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package whisk.core.controller import java.util.Base64 import java.nio.charset.StandardCharsets import scala.concurrent.Future import scala.util.Failure import scala.util.Success import scala.util.Try import WhiskWebActionsApi.MediaExtension import spray.http._ import spray.http.HttpEntity.Empty import spray.http.HttpEntity.NonEmpty import spray.http.HttpHeaders._ import spray.http.MediaTypes._ import spray.http.StatusCodes._ import spray.http.Uri.Query import spray.http.parser.HttpParser import spray.httpx.SprayJsonSupport._ import spray.json._ import spray.json.DefaultJsonProtocol._ import spray.routing.Directives import spray.routing.RequestContext import spray.routing.Route import whisk.common.TransactionId import whisk.core.controller.actions.BlockingInvokeTimeout import whisk.core.controller.actions.PostActionActivation import whisk.core.database._ import whisk.core.entity._ import whisk.core.entity.types._ import whisk.http.ErrorResponse.terminate import whisk.http.Messages import whisk.utils.JsHelpers._ protected[controller] sealed class WebApiDirectives private (prefix: String) { // enforce the presence of an extension (e.g., .http) in the URI path val enforceExtension = false // the field name that represents the status code for an http action response val statusCode = "statusCode" // parameters that are added to an action input to pass HTTP request context values val method: String = fields("method") val headers: String = fields("headers") val path: String = fields("path") val namespace: String = fields("user") val query: String = fields("query") val body: String = fields("body") lazy val reservedProperties: Set[String] = Set(method, headers, path, namespace, query, body) protected final def fields(f: String) = s"$prefix$f" } // field names for /web with raw-http action protected[controller] object WebApiDirectives { // field names for /web val web = new WebApiDirectives("__ow_") // field names used for /experimental/web val exp = new WebApiDirectives("__ow_meta_") { override val enforceExtension = true override val method = fields("verb") override val namespace = fields("namespace") override val statusCode = "code" } } private case class Context( propertyMap: WebApiDirectives, method: HttpMethod, headers: List[HttpHeader], path: String, query: Query, body: Option[JsValue] = None) { val queryAsMap = query.toMap // returns true iff the attached query and body parameters contain a property // that conflicts with the given reserved parameters def overrides(reservedParams: Set[String]): Set[String] = { val queryParams = queryAsMap.keySet val bodyParams = body.map { case JsObject(fields) => fields.keySet case _ => Set.empty }.getOrElse(Set.empty) (queryParams ++ bodyParams) intersect reservedParams } // attach the body to the context def withBody(b: Option[JsValue]) = Context(propertyMap, method, headers, path, query, b) def metadata(user: Option[Identity]): Map[String, JsValue] = { Map(propertyMap.method -> method.value.toLowerCase.toJson, propertyMap.headers -> headers.map(h => h.lowercaseName -> h.value).toMap.toJson, propertyMap.path -> path.toJson) ++ user.map(u => propertyMap.namespace -> u.namespace.asString.toJson) } def toActionArgument(user: Option[Identity], boxQueryAndBody: Boolean): Map[String, JsValue] = { val queryParams = if (boxQueryAndBody) { Map(propertyMap.query -> JsString(query.render(new StringRendering, StandardCharsets.UTF_8).get)) } else { queryAsMap.map(kv => kv._1 -> JsString(kv._2)) } // if the body is a json object, merge with query parameters // otherwise, this is an opaque body that will be nested under // __ow_body in the parameters sent to the action as an argument val bodyParams = body match { case Some(JsObject(fields)) if !boxQueryAndBody => fields case Some(v) => Map(propertyMap.body -> v) case None if !boxQueryAndBody => Map.empty case _ => Map(propertyMap.body -> JsObject()) } // precedence order is: query params -> body (last wins) metadata(user) ++ queryParams ++ bodyParams } } protected[core] object WhiskWebActionsApi extends Directives { private val mediaTranscoders = { // extensions are expected to contain only [a-z] Seq(MediaExtension(".http", None, false, resultAsHttp _), MediaExtension(".json", None, true, resultAsJson _), MediaExtension(".html", Some(List("html")), true, resultAsHtml _), MediaExtension(".svg", Some(List("svg")), true, resultAsSvg _), MediaExtension(".text", Some(List("text")), true, resultAsText _)) } private val defaultMediaTranscoder: MediaExtension = mediaTranscoders.find(_.extension == ".http").get val allowedExtensions: Set[String] = mediaTranscoders.map(_.extension).toSet /** * Splits string into a base name plus optional extension. * If name ends with ".xxxx" which matches a known extension, accept it as the extension. * Otherwise, the extension is ".http" by definition unless enforcing the presence of an extension. */ def mediaTranscoderForName(name: String, enforceExtension: Boolean): (String, Option[MediaExtension]) = { mediaTranscoders.find(mt => name.endsWith(mt.extension)).map { mt => val base = name.dropRight(mt.extensionLength) (base, Some(mt)) }.getOrElse { (name, if (enforceExtension) None else Some(defaultMediaTranscoder)) } } /** * Supported extensions, their default projection and transcoder to complete a request. * * @param extension the supported media types for action response * @param defaultProject the default media extensions for action projection * @param transcoder the HTTP decoder and terminator for the extension */ protected case class MediaExtension( extension: String, defaultProjection: Option[List[String]], projectionAllowed: Boolean, transcoder: (JsValue, TransactionId, WebApiDirectives) => RequestContext => Unit) { val extensionLength = extension.length } private def resultAsHtml(result: JsValue, transid: TransactionId, rp: WebApiDirectives): RequestContext => Unit = result match { case JsString(html) => respondWithMediaType(`text/html`) { complete(OK, html) } case _ => terminate(BadRequest, Messages.invalidMedia(`text/html`))(transid) } private def resultAsSvg(result: JsValue, transid: TransactionId, rp: WebApiDirectives): RequestContext => Unit = result match { case JsString(svg) => respondWithMediaType(`image/svg+xml`) { complete(OK, svg) } case _ => terminate(BadRequest, Messages.invalidMedia(`image/svg+xml`))(transid) } private def resultAsText(result: JsValue, transid: TransactionId, rp: WebApiDirectives): RequestContext => Unit = { result match { case r: JsObject => complete(OK, r.prettyPrint) case r: JsArray => complete(OK, r.prettyPrint) case JsString(s) => complete(OK, s) case JsBoolean(b) => complete(OK, b.toString) case JsNumber(n) => complete(OK, n.toString) case JsNull => complete(OK, JsNull.toString) } } private def resultAsJson(result: JsValue, transid: TransactionId, rp: WebApiDirectives): RequestContext => Unit = { result match { case r: JsObject => complete(OK, r) case r: JsArray => complete(OK, r) case _ => terminate(BadRequest, Messages.invalidMedia(`application/json`))(transid) } } private def resultAsHttp(result: JsValue, transid: TransactionId, rp: WebApiDirectives): RequestContext => Unit = { Try { val JsObject(fields) = result val headers = fields.get("headers").map { case JsObject(hs) => hs.map { case (k, JsString(v)) => RawHeader(k, v) case (k, JsBoolean(v)) => RawHeader(k, v.toString) case (k, JsNumber(v)) => RawHeader(k, v.toString) case _ => throw new Throwable("Invalid header") }.toList case _ => throw new Throwable("Invalid header") } getOrElse List() val code = fields.get(rp.statusCode).map { case JsNumber(c) => // the following throws an exception if the code is // not a whole number or a valid code StatusCode.int2StatusCode(c.toIntExact) case _ => throw new Throwable("Illegal code") } getOrElse (OK) fields.get("body") map { case JsString(str) => interpretHttpResponse(code, headers, str, transid) case _ => terminate(BadRequest, Messages.httpContentTypeError)(transid) } getOrElse { respondWithHeaders(headers) { // note that if header defined a content-type, it will be ignored // since the type must be compatible with the data response complete(code, HttpEntity.Empty) } } } getOrElse { // either the result was not a JsObject or there was an exception validting the // response as an http result terminate(BadRequest, Messages.invalidMedia(`message/http`))(transid) } } private def interpretHttpResponse(code: StatusCode, headers: List[RawHeader], str: String, transid: TransactionId): RequestContext => Unit = { val parsedHeader: Try[MediaType] = headers.find(_.lowercaseName == `Content-Type`.lowercaseName) match { case Some(header) => HttpParser.parseHeader(header) match { case Right(header: `Content-Type`) => val mediaType = header.contentType.mediaType // lookup the media type specified in the content header to see if it is a recognized type MediaTypes.getForKey(mediaType.mainType -> mediaType.subType).map(Success(_)).getOrElse { // this is a content-type that is not recognized, reject it Failure(RejectRequest(BadRequest, Messages.httpUnknownContentType)(transid)) } case _ => Failure(RejectRequest(BadRequest, Messages.httpUnknownContentType)(transid)) } case None => Success(`text/html`) } parsedHeader.flatMap { mediaType => if (mediaType.binary) { Try(HttpData(Base64.getDecoder().decode(str))).map((mediaType, _)) } else { Success(mediaType, HttpData(str)) } } match { case Success((mediaType, data)) => respondWithHeaders(headers) { respondWithMediaType(mediaType) { complete(code, data) } } case Failure(RejectRequest(code, message)) => terminate(code, message)(transid) case _ => terminate(BadRequest, Messages.httpContentTypeError)(transid) } } } trait WhiskWebActionsApi extends Directives with ValidateRequestSize with PostActionActivation { services: WhiskServices => /** API path invocation path for posting activations directly through the host. */ protected val webInvokePathSegments: Seq[String] /** Mapping of HTTP request fields to action parameter names. */ protected val webApiDirectives: WebApiDirectives /** Store for identities. */ protected val authStore: AuthStore /** The prefix for web invokes e.g., /web. */ private lazy val webRoutePrefix = { pathPrefix(webInvokePathSegments.map(segmentStringToPathMatcher(_)).reduceLeft(_ / _)) } /** Allowed verbs. */ private lazy val allowedOperations = get | delete | post | put | head | options | patch private lazy val validNameSegment = pathPrefix(EntityName.REGEX.r) private lazy val packagePrefix = pathPrefix("default".r | EntityName.REGEX.r) /** Extracts the HTTP method, headers, query params and unmatched (remaining) path. */ private val requestMethodParamsAndPath = { extract { ctx => val method = ctx.request.method val query = ctx.request.message.uri.query val path = ctx.unmatchedPath.toString val headers = ctx.request.headers Context(webApiDirectives, method, headers, path, query) } } def routes(user: Identity)(implicit transid: TransactionId): Route = routes(Some(user)) def routes()(implicit transid: TransactionId): Route = routes(None) /** * Adds route to web based activations. Actions invoked this way are anonymous in that the * caller is not authenticated. The intended action must be named in the path as a fully qualified * name as in /experimental/web/some-namespace/some-package/some-action. The package is optional * in that the action may be in the default package, in which case, the string "default" must be used. * If the action doesn't exist (or the namespace is not valid) NotFound is generated. Following the * action name, an "extension" is required to specify the desired content type for the response. This * extension is one of supported media types. An example is ".json" for a JSON response or ".html" for * an text/html response. * * Optionally, the result form the action may be projected based on a named property. As in * /experimental/web/some-namespace/some-package/some-action/some-property. If the property * does not exist in the result then a NotFound error is generated. A path of properties may * be supplied to project nested properties. * * Actions may be exposed to this web proxy by adding an annotation ("export" -> true). */ def routes(user: Option[Identity])(implicit transid: TransactionId): Route = { (allowedOperations & webRoutePrefix) { validNameSegment { namespace => packagePrefix { pkg => validNameSegment { seg => handleMatch(namespace, pkg, seg, user) } } } } } /** * Gets package from datastore. * This method is factored out to allow mock testing. */ protected def getPackage(pkgName: FullyQualifiedEntityName)( implicit transid: TransactionId): Future[WhiskPackage] = { WhiskPackage.get(entityStore, pkgName.toDocId) } /** * Gets action from datastore. * This method is factored out to allow mock testing. */ protected def getAction(actionName: FullyQualifiedEntityName)( implicit transid: TransactionId): Future[WhiskAction] = { WhiskAction.get(entityStore, actionName.toDocId) } /** * Gets identity from datastore. * This method is factored out to allow mock testing. */ protected def getIdentity(namespace: EntityName)( implicit transid: TransactionId): Future[Identity] = { Identity.get(authStore, namespace) } private def handleMatch(namespaceSegment: String, pkgSegment: String, actionNameWithExtension: String, onBehalfOf: Option[Identity])( implicit transid: TransactionId) = { def fullyQualifiedActionName(actionName: String) = { val namespace = EntityName(namespaceSegment) val pkgName = if (pkgSegment == "default") None else Some(EntityName(pkgSegment)) namespace.addPath(pkgName).addPath(EntityName(actionName)).toFullyQualifiedEntityName } provide(WhiskWebActionsApi.mediaTranscoderForName(actionNameWithExtension, webApiDirectives.enforceExtension)) { case (actionName, Some(extension)) => // extract request context, checks for overrides of reserved properties, and constructs action arguments // as the context body which may be the incoming request when the content type is JSON or formdata, or // the raw body as __ow_body (and query parameters as __ow_query) otherwise extract(_.request.entity) { e => validateSize(isWhithinRange(e.data.length))(transid) { requestMethodParamsAndPath { context => provide(fullyQualifiedActionName(actionName)) { fullActionName => onComplete(verifyWebAction(fullActionName, onBehalfOf.isDefined)) { case Success((actionOwnerIdentity, action)) => extractEntityAndProcessRequest(actionOwnerIdentity, action, extension, onBehalfOf, context, e) case Failure(t: RejectRequest) => terminate(t.code, t.message) case Failure(t) => logging.error(this, s"exception in handleMatch: $t") terminate(InternalServerError) } } } } } case (_, None) => terminate(NotAcceptable, Messages.contentTypeExtensionNotSupported(WhiskWebActionsApi.allowedExtensions)) } } /** * Checks that subject has right to post an activation and fetch the action * followed by the package and merge parameters. The action is fetched first since * it will not succeed for references relative to a binding, and the export bit is * confirmed before fetching the package and merging parameters. * * @return Future that completes with the action and action-owner-identity on success otherwise * a failed future with a request rejection error which may be one of the following: * not entitled (throttled), package/action not found, action not web enabled, * or request overrides final parameters */ private def verifyWebAction(actionName: FullyQualifiedEntityName, authenticated: Boolean)( implicit transid: TransactionId) = { for { // lookup the identity for the action namespace actionOwnerIdentity <- identityLookup(actionName.path.root) flatMap { i => entitlementProvider.checkThrottles(i) map (_ => i) } // lookup the action - since actions are stored relative to package name // the lookup will fail if the package name for the action refers to a binding instead // also merge package and action parameters at the same time // precedence order for parameters: // package.params -> action.params -> query.params -> request.entity (body) -> augment arguments (namespace, path) action <- confirmExportedAction(actionLookup(actionName), authenticated) flatMap { a => if (a.namespace.defaultPackage) { Future.successful(a) } else { pkgLookup(a.namespace.toFullyQualifiedEntityName) map { pkg => (a.inherit(pkg.parameters)) } } } } yield (actionOwnerIdentity, action) } private def extractEntityAndProcessRequest( actionOwnerIdentity: Identity, action: WhiskAction, extension: MediaExtension, onBehalfOf: Option[Identity], context: Context, httpEntity: HttpEntity)( implicit transid: TransactionId) = { def process(body: Option[JsValue], isRawHttpAction: Boolean) = { processRequest(actionOwnerIdentity, action, extension, onBehalfOf, context.withBody(body), isRawHttpAction) } provide(action.annotations.asBool("raw-http").exists(identity)) { isRawHttpAction => httpEntity match { case Empty => process(None, isRawHttpAction) case NonEmpty(ContentType(`application/json`, _), json) if !isRawHttpAction => entity(as[JsObject]) { body => process(Some(body), isRawHttpAction) } case NonEmpty(ContentType(`application/x-www-form-urlencoded`, _), form) if !isRawHttpAction => entity(as[FormData]) { form => val body = form.fields.toMap.toJson.asJsObject process(Some(body), isRawHttpAction) } case NonEmpty(contentType, data) => if (contentType.mediaType.binary) { Try(JsString(Base64.getEncoder.encodeToString(data.toByteArray))) match { case Success(bytes) => process(Some(bytes), isRawHttpAction) case Failure(t) => terminate(BadRequest, Messages.unsupportedContentType(contentType.mediaType)) } } else { val str = JsString(data.asString(HttpCharsets.`UTF-8`)) process(Some(str), isRawHttpAction) } case _ => terminate(BadRequest, Messages.unsupportedContentType) } } } private def processRequest( actionOwnerIdentity: Identity, action: WhiskAction, responseType: MediaExtension, onBehalfOf: Option[Identity], context: Context, isRawHttpAction: Boolean)( implicit transid: TransactionId) = { def queuedActivation = { // checks (1) if any of the query or body parameters override final action parameters // computes overrides if any relative to the reserved __ow_* properties, and (2) if // action is a raw http handler // // NOTE: it is assumed the action parameters do not intersect with the reserved properties // since these are system properties, the action should not define them, and if it does, // they will be overwritten if (isRawHttpAction || context.overrides(webApiDirectives.reservedProperties ++ action.immutableParameters).isEmpty) { val content = context.toActionArgument(onBehalfOf, isRawHttpAction) val waitOverride = Some(WhiskActionsApi.maxWaitForBlockingActivation) invokeAction(actionOwnerIdentity, action, Some(JsObject(content)), blocking = true, waitOverride) } else { Future.failed(RejectRequest(BadRequest, Messages.parametersNotAllowed)) } } completeRequest(queuedActivation, projectResultField(context, responseType), responseType) } private def completeRequest( queuedActivation: Future[(ActivationId, Option[WhiskActivation])], projectResultField: => List[String], responseType: MediaExtension)( implicit transid: TransactionId) = { onComplete(queuedActivation) { case Success((activationId, Some(activation))) => val result = activation.resultAsJson if (activation.response.isSuccess || activation.response.isApplicationError) { val resultPath = if (activation.response.isSuccess) { projectResultField } else { // the activation produced an error response: therefore ignore // the requested projection and unwrap the error instead // and attempt to handle it per the desired response type (extension) List(ActivationResponse.ERROR_FIELD) } val result = getFieldPath(activation.resultAsJson, resultPath) result match { case Some(projection) => val marshaler = Future(responseType.transcoder(projection, transid, webApiDirectives)) onComplete(marshaler) { case Success(done) => done // all transcoders terminate the connection case Failure(t) => terminate(InternalServerError) } case _ => terminate(NotFound, Messages.propertyNotFound) } } else { terminate(BadRequest, Messages.errorProcessingRequest) } case Success((activationId, None)) => // blocking invoke which got queued instead // this should not happen, instead it should be a blocking invoke timeout logging.warn(this, "activation returned an id, expecting timeout error instead") terminate(Accepted, Messages.responseNotReady) case Failure(t: BlockingInvokeTimeout) => // blocking invoke which timed out waiting on response logging.info(this, "activation waiting period expired") terminate(Accepted, Messages.responseNotReady) case Failure(t: RejectRequest) => terminate(t.code, t.message) case Failure(t) => logging.error(this, s"exception in completeRequest: $t") terminate(InternalServerError) } } /** * Gets package from datastore and confirms it is not a binding. */ private def pkgLookup(pkg: FullyQualifiedEntityName)( implicit transid: TransactionId): Future[WhiskPackage] = { getPackage(pkg).filter { _.binding.isEmpty } recoverWith { case _: ArtifactStoreException | DeserializationException(_, _, _) => // if the package lookup fails or the package doesn't conform to expected invariants, // fail the request with BadRequest so as not to leak information about the existence // of packages that are otherwise private logging.info(this, s"package which does not exist") Future.failed(RejectRequest(NotFound)) case _: NoSuchElementException => logging.warn(this, s"'$pkg' is a binding") Future.failed(RejectRequest(NotFound)) } } /** * Gets the action if it exists and fail future with RejectRequest if it does not. * * @return future action document or NotFound rejection */ private def actionLookup(actionName: FullyQualifiedEntityName)( implicit transid: TransactionId): Future[WhiskAction] = { getAction(actionName) recoverWith { case _: ArtifactStoreException | DeserializationException(_, _, _) => Future.failed(RejectRequest(NotFound)) } } /** * Gets the identity for the namespace. */ private def identityLookup(namespace: EntityName)( implicit transid: TransactionId): Future[Identity] = { getIdentity(namespace) recoverWith { case _: ArtifactStoreException | DeserializationException(_, _, _) => Future.failed(RejectRequest(NotFound)) case t => // leak nothing no matter what, failure is already logged so skip here Future.failed(RejectRequest(NotFound)) } } /** * Checks if an action is exported (i.e., carries the required annotation). */ private def confirmExportedAction(actionLookup: Future[WhiskAction], authenticated: Boolean)( implicit transid: TransactionId): Future[WhiskAction] = { actionLookup flatMap { action => val requiresAuthenticatedUser = action.annotations.asBool("require-whisk-auth").exists(identity) val isExported = action.annotations.asBool("web-export").exists(identity) if ((isExported && requiresAuthenticatedUser && authenticated) || (isExported && !requiresAuthenticatedUser)) { logging.info(this, s"${action.fullyQualifiedName(true)} is exported") Future.successful(action) } else if (!isExported) { logging.info(this, s"${action.fullyQualifiedName(true)} not exported") Future.failed(RejectRequest(NotFound)) } else { logging.info(this, s"${action.fullyQualifiedName(true)} requires authentication") Future.failed(RejectRequest(Unauthorized)) } } } /** * Determines the result projection path, if any. * * @return optional list of projections */ private def projectResultField(context: Context, responseType: MediaExtension): List[String] = { val projection = if (responseType.projectionAllowed) { Option(context.path) .filter(_.nonEmpty) .map(_.split("/").filter(_.nonEmpty).toList) .orElse(responseType.defaultProjection) } else responseType.defaultProjection projection.getOrElse(List()) } }
xin-cai/openwhisk
core/controller/src/main/scala/whisk/core/controller/WebActions.scala
Scala
apache-2.0
30,415
/*********************************************************************** * Copyright (c) 2013-2020 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.lambda.stream.stats import org.locationtech.geomesa.curve.TimePeriod.TimePeriod import org.locationtech.geomesa.index.stats.GeoMesaStats.GeoMesaStatWriter import org.locationtech.geomesa.index.stats.{GeoMesaStats, NoopStatWriter} import org.locationtech.geomesa.lambda.stream.TransientStore import org.locationtech.geomesa.utils.collection.SelfClosingIterator import org.locationtech.geomesa.utils.stats._ import org.opengis.feature.simple.SimpleFeatureType import org.opengis.filter.Filter import scala.reflect.ClassTag class TransientStats(store: TransientStore) extends GeoMesaStats { override val writer: GeoMesaStatWriter = NoopStatWriter override def getCount(sft: SimpleFeatureType, filter: Filter, exact: Boolean): Option[Long] = Some(SelfClosingIterator(store.read(Option(filter).filter(_ != Filter.INCLUDE))).length) override def getMinMax[T]( sft: SimpleFeatureType, attribute: String, filter: Filter, exact: Boolean): Option[MinMax[T]] = getStat[MinMax[T]](sft, Stat.MinMax(attribute), filter, exact = true) override def getEnumeration[T]( sft: SimpleFeatureType, attribute: String, filter: Filter, exact: Boolean): Option[EnumerationStat[T]] = { if (!exact) { None } else { getStat[EnumerationStat[T]](sft, Stat.Enumeration(attribute), filter, exact) } } override def getFrequency[T]( sft: SimpleFeatureType, attribute: String, precision: Int, filter: Filter, exact: Boolean): Option[Frequency[T]] = { if (!exact) { None } else { getStat[Frequency[T]](sft, Stat.Frequency(attribute, precision), filter, exact) } } override def getTopK[T]( sft: SimpleFeatureType, attribute: String, filter: Filter, exact: Boolean): Option[TopK[T]] = { if (!exact) { None } else { getStat[TopK[T]](sft, Stat.TopK(attribute), filter, exact) } } override def getHistogram[T]( sft: SimpleFeatureType, attribute: String, bins: Int, min: T, max: T, filter: Filter, exact: Boolean): Option[Histogram[T]] = { if (!exact) { None } else { getStat[Histogram[T]](sft, Stat.Histogram(attribute, bins, min, max)(ClassTag(min.getClass)), filter, exact) } } override def getZ3Histogram( sft: SimpleFeatureType, geom: String, dtg: String, period: TimePeriod, bins: Int, filter: Filter, exact: Boolean): Option[Z3Histogram] = { if (!exact) { None } else { getStat[Z3Histogram](sft, Stat.Z3Histogram(geom, dtg, period, bins), filter, exact) } } override def getStat[T <: Stat]( sft: SimpleFeatureType, query: String, filter: Filter, exact: Boolean): Option[T] = { if (!exact) { None } else { val stat = Stat(sft, query).asInstanceOf[T] SelfClosingIterator(store.read(Option(filter).filter(_ != Filter.INCLUDE))).foreach(stat.observe) Some(stat) } } override def close(): Unit = {} }
aheyne/geomesa
geomesa-lambda/geomesa-lambda-datastore/src/main/scala/org/locationtech/geomesa/lambda/stream/stats/TransientStats.scala
Scala
apache-2.0
3,513
package org.scalacoin.marshallers.transaction import org.scalacoin.protocol.transaction.TransactionInput import org.scalatest.{MustMatchers, FlatSpec} import spray.json._ import DefaultJsonProtocol._ /** * Created by chris on 12/27/15. */ class TransactionInputMarshallerTest extends FlatSpec with MustMatchers { val str = """ |{ | "txid" : "808105ed5feff1c05daf6efd202e5966fcdda71ca961e393a1fc83f2b03315d1", | "vout" : 0, | "scriptSig" : { | "asm" : "0 3045022100b4062edd75b5b3117f28ba937ed737b10378f762d7d374afabf667180dedcc62022005d44c793a9d787197e12d5049da5e77a09046014219b31e9c6b89948f648f1701 3045022100b3b0c0273fc2c531083701f723e03ea3d9111e4bbca33bdf5b175cec82dcab0802206650462db37f9b4fe78da250a3b339ab11e11d84ace8f1b7394a1f6db0960ba401 5221025e9adcc3d65c11346c8a6069d6ebf5b51b348d1d6dc4b95e67480c34dc0bc75c21030585b3c80f4964bf0820086feda57c8e49fa1eab925db7c04c985467973df96521037753a5e3e9c4717d3f81706b38a6fb82b5fb89d29e580d7b98a37fea8cdefcad53ae", | "hex" : "00483045022100b4062edd75b5b3117f28ba937ed737b10378f762d7d374afabf667180dedcc62022005d44c793a9d787197e12d5049da5e77a09046014219b31e9c6b89948f648f1701483045022100b3b0c0273fc2c531083701f723e03ea3d9111e4bbca33bdf5b175cec82dcab0802206650462db37f9b4fe78da250a3b339ab11e11d84ace8f1b7394a1f6db0960ba4014c695221025e9adcc3d65c11346c8a6069d6ebf5b51b348d1d6dc4b95e67480c34dc0bc75c21030585b3c80f4964bf0820086feda57c8e49fa1eab925db7c04c985467973df96521037753a5e3e9c4717d3f81706b38a6fb82b5fb89d29e580d7b98a37fea8cdefcad53ae" | }, | "sequence" : 4294967295 |} """.stripMargin val json = str.parseJson "TransactionInputMarshaller" must "marshall a json input" in { val input : TransactionInput = TransactionInputMarshaller.TransactionInputFormatter.read(json) input.previousOutput.txId must be ("808105ed5feff1c05daf6efd202e5966fcdda71ca961e393a1fc83f2b03315d1") input.previousOutput.vout must be (0) input.scriptSignature.asm must be ("0 3045022100b4062edd75b5b3117f28ba937ed737b10378f762d7d374afabf667180dedcc62022005d44c793a9d787197e12d5049da5e77a09046014219b31e9c6b89948f648f1701 3045022100b3b0c0273fc2c531083701f723e03ea3d9111e4bbca33bdf5b175cec82dcab0802206650462db37f9b4fe78da250a3b339ab11e11d84ace8f1b7394a1f6db0960ba401 5221025e9adcc3d65c11346c8a6069d6ebf5b51b348d1d6dc4b95e67480c34dc0bc75c21030585b3c80f4964bf0820086feda57c8e49fa1eab925db7c04c985467973df96521037753a5e3e9c4717d3f81706b38a6fb82b5fb89d29e580d7b98a37fea8cdefcad53ae") input.scriptSignature.hex must be ("00483045022100b4062edd75b5b3117f28ba937ed737b10378f762d7d374afabf667180dedcc62022005d44c793a9d787197e12d5049da5e77a09046014219b31e9c6b89948f648f1701483045022100b3b0c0273fc2c531083701f723e03ea3d9111e4bbca33bdf5b175cec82dcab0802206650462db37f9b4fe78da250a3b339ab11e11d84ace8f1b7394a1f6db0960ba4014c695221025e9adcc3d65c11346c8a6069d6ebf5b51b348d1d6dc4b95e67480c34dc0bc75c21030585b3c80f4964bf0820086feda57c8e49fa1eab925db7c04c985467973df96521037753a5e3e9c4717d3f81706b38a6fb82b5fb89d29e580d7b98a37fea8cdefcad53ae") input.sequence must be (scala.math.BigInt("4294967295")) } }
scalacoin/scalacoin
src/test/scala/org/scalacoin/marshallers/transaction/TransactionInputMarshallerTest.scala
Scala
mit
3,104
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming import java.util.concurrent.TimeUnit import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.functions._ import org.apache.spark.sql.streaming.{StreamingQueryException, StreamTest} import org.apache.spark.util.ManualClock class RateSourceSuite extends StreamTest { import testImplicits._ case class AdvanceRateManualClock(seconds: Long) extends AddData { override def addData(query: Option[StreamExecution]): (Source, Offset) = { assert(query.nonEmpty) val rateSource = query.get.logicalPlan.collect { case StreamingExecutionRelation(source, _) if source.isInstanceOf[RateStreamSource] => source.asInstanceOf[RateStreamSource] }.head rateSource.clock.asInstanceOf[ManualClock].advance(TimeUnit.SECONDS.toMillis(seconds)) (rateSource, rateSource.getOffset.get) } } test("basic") { val input = spark.readStream .format("rate") .option("rowsPerSecond", "10") .option("useManualClock", "true") .load() testStream(input)( AdvanceRateManualClock(seconds = 1), CheckLastBatch((0 until 10).map(v => new java.sql.Timestamp(v * 100L) -> v): _*), StopStream, StartStream(), // Advance 2 seconds because creating a new RateSource will also create a new ManualClock AdvanceRateManualClock(seconds = 2), CheckLastBatch((10 until 20).map(v => new java.sql.Timestamp(v * 100L) -> v): _*) ) } test("uniform distribution of event timestamps") { val input = spark.readStream .format("rate") .option("rowsPerSecond", "1500") .option("useManualClock", "true") .load() .as[(java.sql.Timestamp, Long)] .map(v => (v._1.getTime, v._2)) val expectedAnswer = (0 until 1500).map { v => (math.round(v * (1000.0 / 1500)), v) } testStream(input)( AdvanceRateManualClock(seconds = 1), CheckLastBatch(expectedAnswer: _*) ) } test("valueAtSecond") { import RateStreamSource._ assert(valueAtSecond(seconds = 0, rowsPerSecond = 5, rampUpTimeSeconds = 0) === 0) assert(valueAtSecond(seconds = 1, rowsPerSecond = 5, rampUpTimeSeconds = 0) === 5) assert(valueAtSecond(seconds = 0, rowsPerSecond = 5, rampUpTimeSeconds = 2) === 0) assert(valueAtSecond(seconds = 1, rowsPerSecond = 5, rampUpTimeSeconds = 2) === 1) assert(valueAtSecond(seconds = 2, rowsPerSecond = 5, rampUpTimeSeconds = 2) === 3) assert(valueAtSecond(seconds = 3, rowsPerSecond = 5, rampUpTimeSeconds = 2) === 8) assert(valueAtSecond(seconds = 0, rowsPerSecond = 10, rampUpTimeSeconds = 4) === 0) assert(valueAtSecond(seconds = 1, rowsPerSecond = 10, rampUpTimeSeconds = 4) === 2) assert(valueAtSecond(seconds = 2, rowsPerSecond = 10, rampUpTimeSeconds = 4) === 6) assert(valueAtSecond(seconds = 3, rowsPerSecond = 10, rampUpTimeSeconds = 4) === 12) assert(valueAtSecond(seconds = 4, rowsPerSecond = 10, rampUpTimeSeconds = 4) === 20) assert(valueAtSecond(seconds = 5, rowsPerSecond = 10, rampUpTimeSeconds = 4) === 30) } test("rampUpTime") { val input = spark.readStream .format("rate") .option("rowsPerSecond", "10") .option("rampUpTime", "4s") .option("useManualClock", "true") .load() .as[(java.sql.Timestamp, Long)] .map(v => (v._1.getTime, v._2)) testStream(input)( AdvanceRateManualClock(seconds = 1), CheckLastBatch((0 until 2).map(v => v * 500 -> v): _*), // speed = 2 AdvanceRateManualClock(seconds = 1), CheckLastBatch((2 until 6).map(v => 1000 + (v - 2) * 250 -> v): _*), // speed = 4 AdvanceRateManualClock(seconds = 1), CheckLastBatch({ Seq(2000 -> 6, 2167 -> 7, 2333 -> 8, 2500 -> 9, 2667 -> 10, 2833 -> 11) }: _*), // speed = 6 AdvanceRateManualClock(seconds = 1), CheckLastBatch((12 until 20).map(v => 3000 + (v - 12) * 125 -> v): _*), // speed = 8 AdvanceRateManualClock(seconds = 1), // Now we should reach full speed CheckLastBatch((20 until 30).map(v => 4000 + (v - 20) * 100 -> v): _*), // speed = 10 AdvanceRateManualClock(seconds = 1), CheckLastBatch((30 until 40).map(v => 5000 + (v - 30) * 100 -> v): _*), // speed = 10 AdvanceRateManualClock(seconds = 1), CheckLastBatch((40 until 50).map(v => 6000 + (v - 40) * 100 -> v): _*) // speed = 10 ) } test("numPartitions") { val input = spark.readStream .format("rate") .option("rowsPerSecond", "10") .option("numPartitions", "6") .option("useManualClock", "true") .load() .select(spark_partition_id()) .distinct() testStream(input)( AdvanceRateManualClock(1), CheckLastBatch((0 until 6): _*) ) } testQuietly("overflow") { val input = spark.readStream .format("rate") .option("rowsPerSecond", Long.MaxValue.toString) .option("useManualClock", "true") .load() .select(spark_partition_id()) .distinct() testStream(input)( AdvanceRateManualClock(2), ExpectFailure[ArithmeticException](t => { Seq("overflow", "rowsPerSecond").foreach { msg => assert(t.getMessage.contains(msg)) } }) ) } testQuietly("illegal option values") { def testIllegalOptionValue( option: String, value: String, expectedMessages: Seq[String]): Unit = { val e = intercept[StreamingQueryException] { spark.readStream .format("rate") .option(option, value) .load() .writeStream .format("console") .start() .awaitTermination() } assert(e.getCause.isInstanceOf[IllegalArgumentException]) for (msg <- expectedMessages) { assert(e.getCause.getMessage.contains(msg)) } } testIllegalOptionValue("rowsPerSecond", "-1", Seq("-1", "rowsPerSecond", "positive")) testIllegalOptionValue("numPartitions", "-1", Seq("-1", "numPartitions", "positive")) } test("user-specified schema given") { val exception = intercept[AnalysisException] { spark.readStream .format("rate") .schema(spark.range(1).schema) .load() } assert(exception.getMessage.contains( "rate source does not support a user-specified schema")) } }
minixalpha/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/RateSourceSuite.scala
Scala
apache-2.0
7,166
package cromwell.engine.workflow import akka.actor.ActorSystem import cromwell.core.WorkflowId import cromwell.engine import cromwell.engine._ import cromwell.engine.backend.{CallMetadata, WorkflowDescriptor} import cromwell.engine.db.slick._ import cromwell.engine.db.{ExecutionDatabaseKey, ExecutionInfosByExecution} import cromwell.engine.finalcall.FinalCall._ import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException import cromwell.webservice._ import org.joda.time.DateTime import spray.json._ import wdl4s._ import scala.concurrent.Future object WorkflowMetadataBuilder { private type DBMap[+V] = Map[ExecutionDatabaseKey, V] // TODO: This assertion could be added to the db layer: "In the future I'll fail if the workflow doesn't exist" def assertWorkflowExistence(id: WorkflowId, workflowState: Option[WorkflowState]): Future[Unit] = { // Confirm the workflow exists by querying its state. If no state is found the workflow doesn't exist. workflowState match { case None => Future.failed(new WorkflowNotFoundException(s"Workflow '$id' not found")) case _ => Future.successful(Unit) } } private def build(workflowDescriptor: WorkflowDescriptor, execution: WorkflowExecution, workflowOutputs: engine.WorkflowOutputs, callMetadata: Map[FullyQualifiedName, Seq[CallMetadata]], workflowFailures: Traversable[FailureEventEntry]): WorkflowMetadataResponse = { val startDate = new DateTime(execution.startDt) val endDate = execution.endDt map { new DateTime(_) } val workflowInputs = workflowDescriptor.sourceFiles.inputsJson.parseJson.asInstanceOf[JsObject] val failures = if (workflowFailures.isEmpty) None else Option(workflowFailures) WorkflowMetadataResponse( id = execution.workflowExecutionUuid.toString, workflowName = execution.name, status = execution.status, // We currently do not make a distinction between the submission and start dates of a workflow, but it's // possible at least theoretically that a workflow might not begin to execute immediately upon submission. submission = startDate, start = Option(startDate), end = endDate, inputs = workflowInputs, outputs = Option(workflowOutputs) map { _.mapToValues }, calls = callMetadata, failures = failures.map(_.toSeq)) } private def callFailuresMap(failureEvents: Seq[QualifiedFailureEventEntry]): DBMap[Seq[FailureEventEntry]] = { failureEvents filter { _.execution.isDefined } groupBy { _.execution } map { case (key, qualifiedEntries: Seq[QualifiedFailureEventEntry]) => key.get -> (qualifiedEntries map { _.dequalify }) } } private def emptySeq[A] = Future.successful(Seq.empty[A]) private def retrieveTraversable[A](runQuery: Boolean, queryResult: => Future[Traversable[A]], defaultResult: => Future[Traversable[A]] = emptySeq): Future[Traversable[A]] = { if (runQuery) queryResult else defaultResult } private def emptyMap[V] = Future.successful(Map.empty[ExecutionDatabaseKey, V]) private def retrieveMap[V](runQuery: Boolean, queryResult: => Future[DBMap[V]], defaultResult: => Future[DBMap[V]] = emptyMap): Future[DBMap[V]] = { if (runQuery) queryResult else defaultResult } } class WorkflowMetadataBuilder(id: WorkflowId, parameters: WorkflowMetadataQueryParameters)(implicit actorSystem: ActorSystem) { private[this] implicit val ec = actorSystem.dispatcher import WorkflowMetadataBuilder._ import cromwell.engine.db.DataAccess.globalDataAccess private def futureAssertWorkflowExistsByState = for { workflowState <- globalDataAccess.getWorkflowState(id) // TODO: This assertion could be added to the db layer: "In the future I'll fail if the workflow doesn't exist" _ <- assertWorkflowExistence(id, workflowState) } yield () private def futureWorkflowExecutionAndAux = globalDataAccess.getWorkflowExecutionAndAux(id) // If outputs requested, don't retrieve the execution infos, only executions private def futureInfosByExecution = retrieveTraversable(parameters.outputs, globalDataAccess.infosByExecution(id), globalDataAccess.getExecutions(id) map { _ map { ExecutionInfosByExecution(_, Seq.empty) } }) // If outputs requested, get the workflow outputs private def futureWorkflowOutputs = retrieveTraversable(parameters.outputs, globalDataAccess.getWorkflowOutputs(id)) // If timings requested, we need the call statuses and execution events private def futureCallToStatusMap = retrieveMap(parameters.timings, globalDataAccess.getExecutionStatuses(id)) private def futureExecutionEvents = retrieveMap(parameters.timings, globalDataAccess.getAllExecutionEvents(id)) // Drop the below if timings _or_ outputs requested private def futureCallInputs = retrieveTraversable(parameters.timings && parameters.outputs, globalDataAccess.getAllInputs(id)) private def futureCallOutputs = retrieveTraversable(parameters.timings && parameters.outputs, globalDataAccess.getAllOutputs(id)) private def futureCallCacheData = retrieveTraversable(parameters.timings && parameters.outputs, globalDataAccess.callCacheDataByExecution(id)) private def futureRuntimeAttributes = retrieveMap(parameters.timings && parameters.outputs, globalDataAccess.getAllRuntimeAttributes(id)) private def futureFailures = retrieveTraversable(parameters.timings && parameters.outputs, globalDataAccess.getFailureEvents(id)) def build(): Future[WorkflowMetadataResponse] = { for { assertWorkflowExistsByState <- futureAssertWorkflowExistsByState workflowExecutionAndAux <- futureWorkflowExecutionAndAux infosByExecution <- futureInfosByExecution workflowOutputs <- futureWorkflowOutputs callToStatusMap <- futureCallToStatusMap executionEvents <- futureExecutionEvents callInputs <- futureCallInputs callOutputs <- futureCallOutputs callCacheData <- futureCallCacheData runtimeAttributes <- futureRuntimeAttributes failures <- futureFailures // Database work complete, but we do need one more future to get the workflow descriptor workflowDescriptor <- workflowDescriptorFromExecutionAndAux(workflowExecutionAndAux) } yield { val execution = workflowExecutionAndAux.execution val nonFinalEvents = executionEvents.filterKeys(!_.fqn.isFinalCall) val nonFinalInfosByExecution = infosByExecution.filterNot(_.execution.callFqn.isFinalCall) val wfFailures = failures collect { case QualifiedFailureEventEntry(_, None, message, timestamp) => FailureEventEntry(message, timestamp) } val callFailures = callFailuresMap(failures.toSeq) val engineWorkflowOutputs = SymbolStoreEntry.toWorkflowOutputs(workflowOutputs) val callMetadata = CallMetadataBuilder.build(nonFinalInfosByExecution, callInputs, callOutputs, nonFinalEvents, runtimeAttributes, callCacheData, callFailures) WorkflowMetadataBuilder.build(workflowDescriptor, execution, engineWorkflowOutputs, callMetadata, wfFailures) } } }
cowmoo/cromwell
engine/src/main/scala/cromwell/engine/workflow/WorkflowMetadataBuilder.scala
Scala
bsd-3-clause
7,302
package com.datlinq.datafiniti.config import com.datlinq.datafiniti.config.DatafinitiAPITypes._ /** * Created by Tom Lous on 30/08/2017. * Copyright © 2017 Datlinq B.V.. */ object DatafinitiAPIViewsV4 { sealed trait APIViewV4 { def name: String def apiType: APIType override def toString: String = name def toOptionString: Option[String] = Option(name) } object APIViewV4 { def fromString(view_name: String): APIViewV4 = { List(BusinessesDefault, BusinessesAllFlatMenus, BusinessesAllFlatReviews, BusinessesAllNested, BusinessesNoReviews, BusinessesBasic, ProductsDefault, ProductsAllNested, ProductsFlatPrices, ProductsFlatReviews, PropertiesDefault, PropertiesFlatPrices, PropertiesFlatReviews) .find(_.toString == view_name) match { case Some(view) => view case None => view_name.split("_").toList match { case Nil => CustomViewV4("", APIType.fromString("")) case typeName :: Nil => CustomViewV4(typeName, APIType.fromString(typeName)) case typeName :: parts => CustomViewV4(parts.mkString("_"), APIType.fromString(typeName)) } } } } case class CustomViewV4(name: String, apiType: APIType) extends APIViewV4 /** * Businesses */ object BusinessesDefault extends APIViewV4 { override val name: String = null override val apiType: APIType = Businesses } object BusinessesAllFlatMenus extends APIViewV4 { override val name: String = "business_flat_menus" override val apiType: APIType = Businesses } object BusinessesAllFlatReviews extends APIViewV4 { override val name: String = "business_flat_reviews" override val apiType: APIType = Businesses } object BusinessesAllNested extends APIViewV4 { override val name: String = "business_all_nested" override val apiType: APIType = Businesses } object BusinessesNoReviews extends APIViewV4 { override val name: String = "business_no_reviews" override val apiType: APIType = Businesses } object BusinessesBasic extends APIViewV4 { override val name: String = "business_basic" override val apiType: APIType = Businesses } /** * Products */ object ProductsDefault extends APIViewV4 { override val name: String = null override val apiType: APIType = Products } object ProductsAllNested extends APIViewV4 { override val name: String = "product_all_nested" override val apiType: APIType = Products } object ProductsFlatPrices extends APIViewV4 { override val name: String = "product_flat_prices" override val apiType: APIType = Products } object ProductsFlatReviews extends APIViewV4 { override val name: String = "product_flat_reviews" override val apiType: APIType = Products } /** * Properties */ object PropertiesDefault extends APIViewV4 { override val name: String = null override val apiType: APIType = Properties } object PropertiesFlatPrices extends APIViewV4 { override val name: String = "property_flat_prices" override val apiType: APIType = Properties } object PropertiesFlatReviews extends APIViewV4 { override val name: String = "property_flat_reviews" override val apiType: APIType = Properties } }
datlinq/scalafiniti
src/main/scala/com/datlinq/datafiniti/config/DatafinitiAPIViewsV4.scala
Scala
mit
3,299
package gapt.proofs.lkt import gapt.cutintro.CutIntroduction import gapt.examples.{ Pi2Pigeonhole, Pi3Pigeonhole, nTape4 } import gapt.expr.{ normalize => norm, _ } import gapt.expr.formula.hol.containsQuantifierOnLogicalLevel import gapt.proofs.context.Context import gapt.proofs.lk.transformations.eliminateDefinitions import gapt.proofs.lk.util.instanceProof import gapt.proofs.lk.util.solvePropositional import gapt.proofs.lk.{ LKProof, normalizeLKt } import gapt.proofs.{ SequentMatchers, lk } import gapt.provers.escargot.Escargot import gapt.utils.Maybe import org.specs2.matcher.Matcher import org.specs2.mutable.Specification import gapt.examples.sequence.LinearExampleProof class LktTest extends Specification with SequentMatchers { def beMostlyCutFree: Matcher[LKt] = beLike { case p => p.subProofs.foreach { case Cut( f, _, _ ) => require( !containsQuantifierOnLogicalLevel( f ) ) containsQuantifierOnLogicalLevel( f ) must_== false case _ => } ok } def beGood( implicit ctx: Maybe[Context] ): Matcher[LKProof] = beLike { case lk => val ( p0, lctx ) = LKToLKt( lk ) check( p0, lctx ) val p1 = atomizeEquality( p0, lctx ) check( p1, lctx ) val p2 = normalize.withDebug( p1, lctx ) check( p2, lctx ) p2 must beMostlyCutFree val p3 = LKtToLK( p2, lctx ) p3.endSequent must beMultiSetEqual( lk.endSequent ) ctx.foreach( _.check( p3 ) ) ok } def beInductivelyGood( implicit ctx: Context ): Matcher[LKProof] = beLike { case lk => val ( p0, lctx ) = LKToLKt( lk ) check( p0, lctx ) val p1 = atomizeEquality( p0, lctx ) check( p1, lctx ) val p2 = normalizeLKt.induction( p1, lctx, debugging = true ) check( p2, lctx ) p2 must beMostlyCutFree val p3 = LKtToLK( p2, lctx ) p3.endSequent must beMultiSetEqual( lk.endSequent ) ctx.check( p3 ) ok } "reduce 1" in { val Right( l ) = solvePropositional( hos"a & (a -> b) :- ~ ~b" ) val Right( r ) = solvePropositional( hos"~ ~b :- b" ) lk.rules.CutRule( l, r, hof"~ ~b" ) must beGood } "fol 1" in { val Some( l ) = Escargot.withDeskolemization.getLKProof( hos"!x (p x -> p (s x)) :- !x (p x -> p (s (s x)))" ) val Some( r ) = Escargot.withDeskolemization.getLKProof( hos"!x (p x -> p (s (s x))), p 0 :- p (s (s (s (s 0))))" ) lk.rules.CutRule( l, r, hof"!x (p x -> p (s (s x)))" ) must beGood } "fol 2" in { CutIntroduction( LinearExampleProof( 18 ) ).get must beGood } "fol 3" in { Pi2Pigeonhole.proof must beGood } "fol 4" in { Pi3Pigeonhole.proof must beGood } "lattice" in { import gapt.examples.lattice._ proof must beGood eliminateDefinitions( proof ) must beGood } "theory 1" in { import gapt.examples.theories.nat._ addcomm.combined() must beGood } "theory 2" in { import gapt.examples.theories.nat._ instanceProof( add0l.combined(), le"s 0" ) must beInductivelyGood } "theory 3" in { import gapt.examples.theories.nat._ instanceProof( addcomm.combined(), le"s 0", le"s (s 0)" ) must beInductivelyGood } }
gapt/gapt
tests/src/test/scala/gapt/proofs/lkt/LktTest.scala
Scala
gpl-3.0
3,173
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.keras import com.intel.analytics.bigdl.dllib.nn._ import com.intel.analytics.bigdl.dllib.utils.{Shape, TestUtils} class UpSampling1DSpec extends KerasBaseSpec { "UpSampling1D forward with size 1" should "work properly" in { val kerasCode = """ |input_tensor = Input(shape=[3, 4]) |input = np.random.uniform(-1, 1, [2, 3, 4]) |output_tensor = UpSampling1D(1)(input_tensor) |model = Model(input=input_tensor, output=output_tensor) """.stripMargin val model = UpSampling1D[Float](1) checkOutputAndGrad(model, kerasCode) } "UpSampling1D forward with size 2" should "work properly" in { val kerasCode = """ |input_tensor = Input(shape=[3, 4]) |input = np.random.uniform(-1, 1, [2, 3, 4]) |output_tensor = UpSampling1D(2)(input_tensor) |model = Model(input=input_tensor, output=output_tensor) """.stripMargin val model = UpSampling1D[Float](2) checkOutputAndGrad(model, kerasCode) } "UpSampling1D computeOutputShape" should "work properly" in { val layer = UpSampling1D[Float](3) TestUtils.compareOutputShape(layer, Shape(4, 5)) should be (true) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/keras/UpSampling1DSpec.scala
Scala
apache-2.0
1,818
import scala.reflect.{ClassTag, classTag} class Foo[@specialized A: ClassTag] { // conflicting in bounds, expect a normalized member calling m // and bridge + implementation in specialized subclasses // and overloads here according to specialization on A def m1[@specialized B <: A](x: B, y: A) = goal(x) // conflicting, unsolvable, expect a warning def m2[@specialized B <: String](x: B) = x.concat("a") // conflicting in bounds, no mention of other spec members // expect an overload here plus implementation in // compatible specialized subclasses def m3[@specialized B >: A](x: B) = () // non-conflicting, expect a normalized overload implementation here def m4[@specialized T, U <: Ordered[T]](x: T, y: U) = () // non-conflicting, expect a normalized overload implementation here def m5[@specialized B](x: B) = x // non-conflicting, expect a normalized implementation here // and specialized implementations for all expansions in specialized subclasses def m6[@specialized B](x: B, y: A) = goal(y) def goal(x: A) = { val xs = new Array[A](1) xs(0) = x } }
yusuke2255/dotty
tests/untried/pos/spec-params-new.scala
Scala
bsd-3-clause
1,123
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.system.kafka import org.apache.samza.SamzaException import org.apache.samza.config.{StorageConfig, MapConfig} import org.apache.samza.metrics.MetricsRegistryMap import org.junit.Assert._ import org.junit.Test import scala.collection.JavaConverters._ class TestKafkaSystemFactory { @Test def testFailWhenNoSerdeDefined { val producerFactory = new KafkaSystemFactory try { producerFactory.getProducer( "test", new MapConfig(Map[String, String]().asJava), new MetricsRegistryMap) fail("Expected to get a Samza exception.") } catch { case e: SamzaException => None // expected case e: Exception => fail("Expected SamzaException, but got " + e) } } @Test def testFailWhenSerdeIsInvalid { val producerFactory = new KafkaSystemFactory val config = new MapConfig(Map[String, String]( "streams.test.serde" -> "failme").asJava) try { producerFactory.getProducer( "test", config, new MetricsRegistryMap) fail("Expected to get a Samza exception.") } catch { case e: SamzaException => None // expected case e: Exception => fail("Expected SamzaException, but got " + e) } } @Test def testHappyPath { val producerFactory = new KafkaSystemFactory val config = new MapConfig(Map[String, String]( "job.name" -> "test", "systems.test.producer.bootstrap.servers" -> "", "systems.test.samza.key.serde" -> "json", "systems.test.samza.msg.serde" -> "json", "serializers.registry.json.class" -> "samza.serializers.JsonSerdeFactory").asJava) var producer = producerFactory.getProducer( "test", config, new MetricsRegistryMap) assertNotNull(producer) assertTrue(producer.isInstanceOf[KafkaSystemProducer]) producer = producerFactory.getProducer( "test", config, new MetricsRegistryMap) assertNotNull(producer) assertTrue(producer.isInstanceOf[KafkaSystemProducer]) } @Test def testInjectedProducerProps { val configMap = Map[String, String]( StorageConfig.FACTORY.format("system1") -> "some.factory.Class", StorageConfig.CHANGELOG_STREAM.format("system1") -> "system1.stream1", StorageConfig.FACTORY.format("system2") -> "some.factory.Class") val config = new MapConfig(configMap.asJava) assertEquals(Map[String, String](), KafkaSystemFactory.getInjectedProducerProperties("system3", config)) assertEquals(Map[String, String](), KafkaSystemFactory.getInjectedProducerProperties("system2", config)) assertEquals(Map[String, String]("compression.type" -> "none"), KafkaSystemFactory.getInjectedProducerProperties("system1", config)) } }
lhaiesp/samza
samza-kafka/src/test/scala/org/apache/samza/system/kafka/TestKafkaSystemFactory.scala
Scala
apache-2.0
3,547
/** * Copyright 2011-2016 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.recorder.http import java.net.InetSocketAddress import java.util.concurrent.TimeUnit import io.gatling.recorder.config.RecorderConfiguration import io.gatling.recorder.controller.RecorderController import io.gatling.recorder.http.channel.BootstrapFactory._ import io.gatling.recorder.http.ssl.SslServerContext import com.typesafe.scalalogging.StrictLogging import io.netty.channel.nio.NioEventLoopGroup import io.netty.channel.group.DefaultChannelGroup import io.netty.util.concurrent.GlobalEventExecutor private[recorder] case class HttpProxy(controller: RecorderController)(implicit config: RecorderConfiguration) extends StrictLogging { def outgoingProxy = for { host <- config.proxy.outgoing.host port <- config.proxy.outgoing.port } yield (host, port) def outgoingUsername = config.proxy.outgoing.username def outgoingPassword = config.proxy.outgoing.password private val clientGroup = new NioEventLoopGroup private val serverBossGroup = new NioEventLoopGroup(1) private val serverWorkerGroup = new NioEventLoopGroup private val group = new DefaultChannelGroup("Gatling_Recorder", GlobalEventExecutor.INSTANCE) val userBootstrap = newUserBootstrap(serverBossGroup, serverWorkerGroup, this, config) // covers both http and https group.add(userBootstrap.bind(new InetSocketAddress(config.proxy.port)).sync.channel) val remoteBootstrap = newRemoteBootstrap(clientGroup, ssl = false, config) val secureRemoteBootstrap = newRemoteBootstrap(clientGroup, ssl = true, config) val sslServerContext = SslServerContext(config) def shutdown(): Unit = { group.close.awaitUninterruptibly clientGroup.shutdownGracefully(0, 2, TimeUnit.SECONDS) serverBossGroup.shutdownGracefully(0, 2, TimeUnit.SECONDS) serverWorkerGroup.shutdownGracefully(0, 2, TimeUnit.SECONDS) } }
GabrielPlassard/gatling
gatling-recorder/src/main/scala/io/gatling/recorder/http/HttpProxy.scala
Scala
apache-2.0
2,469
/* * Copyright (c) 2014-2021 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.reactive.consumers import cats.laws._ import cats.laws.discipline._ import monix.execution.Callback import monix.execution.Ack.{Continue, Stop} import monix.execution.atomic.{Atomic, AtomicInt, AtomicLong} import monix.execution.cancelables.{AssignableCancelable, BooleanCancelable, CompositeCancelable} import monix.execution.{Ack, Cancelable, Scheduler} import monix.execution.exceptions.DummyException import monix.reactive.internal.consumers.LoadBalanceConsumer import monix.reactive.observers.Subscriber import monix.reactive.{BaseTestSuite, Consumer, Observable, Observer} import scala.concurrent.{Future, Promise} import scala.util.{Failure, Success} object LoadBalanceConsumerSuite extends BaseTestSuite { test("trigger error when parallelism < 1") { implicit s => intercept[IllegalArgumentException] { Consumer.loadBalance(0, Consumer.head[Int]) () } () } test("trigger error when array of consumers is empty") { implicit s => intercept[IllegalArgumentException] { new LoadBalanceConsumer(1, Array.empty[Consumer[Int, Int]]) () } () } test("aggregate all events") { implicit s => check2 { (source: Observable[Int], rndInt: Int) => // Parallelism value will be between 1 and 16 val parallelism = { val x = math.abs(rndInt) val pos = if (x < 0) Int.MaxValue else x (pos % 15) + 1 } val consumer = Consumer.loadBalance(parallelism, Consumer.foldLeft[Long, Int](0L)(_ + _)) val task1 = source.foldLeft(0L)(_ + _).firstL val task2 = source.consumeWith(consumer).map(_.sum) task1 <-> task2 } } test("aggregate all events with subscribers that stop") { implicit s => check2 { (source: Observable[Int], rndInt: Int) => // Parallelism value will be between 1 and 16 val parallelism = { val x = math.abs(rndInt) val pos = if (x < 0) Int.MaxValue else x (pos % 15) + 1 } val fold = Consumer.foldLeft[Long, Int](0L)(_ + _) val justOne = Consumer.headOption[Int].map(_.getOrElse(0).toLong) val allConsumers = for (i <- 0 until parallelism) yield if (i % 2 == 0) fold else justOne val consumer = Consumer.loadBalance(allConsumers: _*) val task1 = source.foldLeft(0L)(_ + _).firstL val task2 = source.consumeWith(consumer).map(_.sum) task1 <-> task2 } } test("keep subscribers busy until the end") { implicit s => val iterations = 10000 val expectedSum = iterations.toLong * (iterations - 1) / 2 val ackPromise = Promise[Ack]() val sum = Atomic(0L) val wasCompleted = Atomic(0) val async = createAsync(sum, wasCompleted) val sync = createSync(sum, wasCompleted) val busy = createBusy(sum, wasCompleted, ackPromise) val finishPromise = Promise[Int]() val loadBalancer = Consumer.loadBalance(sync, async, busy, sync, async, busy).map(_.length) val (subscriber, _) = loadBalancer.createSubscriber(Callback.fromPromise(finishPromise), s) val continue = Observer.feed(subscriber, BooleanCancelable(), (0 until 10000).iterator) s.tick() assertEquals(continue.syncTryFlatten, Continue) assertEquals(sum.get(), expectedSum - 2 - 5) // Triggering on complete subscriber.onComplete(); s.tick() assertEquals(wasCompleted.get(), 4) assertEquals(finishPromise.future.value, None) // Continue ackPromise.success(Continue); s.tick() assertEquals(sum.get(), expectedSum) assertEquals(wasCompleted.get(), 6) assertEquals(finishPromise.future.value, Some(Success(6))) } test("a subscriber triggering an error in onNext will cancel everything") { implicit s => val iterations = 10000 val ackPromise1 = Promise[Ack]() val ackPromise2 = Promise[Ack]() val expectedSum = iterations.toLong * (iterations - 1) / 2 val sum = Atomic(0L) val wasCompleted = Atomic(0) val async = createAsync(sum, wasCompleted) val sync = createSync(sum, wasCompleted) val busy1 = createBusy(sum, wasCompleted, ackPromise1) val busy2 = createBusy(sum, wasCompleted, ackPromise2) val finishPromise = Promise[Int]() val loadBalancer = Consumer.loadBalance(sync, async, busy1, sync, async, busy2).map(_.length) val conn = BooleanCancelable() val (subscriber, c) = loadBalancer.createSubscriber(Callback.fromPromise(finishPromise), s) c := conn val continue = Observer.feed(subscriber, conn, (0 until 10000).iterator) s.tick() assertEquals(continue.syncTryFlatten, Continue) assertEquals(sum.get(), expectedSum - 2 - 5) // Triggering on complete subscriber.onComplete(); s.tick() assertEquals(wasCompleted.get(), 4) assertEquals(finishPromise.future.value, None) // Continue val dummy = DummyException("dummy") ackPromise1.failure(dummy); s.tick() assertEquals(wasCompleted.get(), 4) assertEquals(finishPromise.future.value, Some(Failure(dummy))) assert(conn.isCanceled, "conn.isCanceled") assertEquals(subscriber.onNext(10), Stop) ackPromise2.success(Continue) s.tick() // We shouldn't have triggered any errors! assertEquals(s.state.lastReportedError, null) } test("a subscriber triggering an error by callback will cancel everything") { implicit s => val iterations = 10000 val ackPromise1 = Promise[Ack]() val ackPromise2 = Promise[Ack]() val expectedSum = iterations.toLong * (iterations - 1) / 2 val sum = Atomic(0L) val wasCompleted = Atomic(0) val async = createAsync(sum, wasCompleted) val sync = createSync(sum, wasCompleted) val dummy = DummyException("dummy") val withError = createErrorSignaling(ackPromise1, dummy) val busy = createBusy(sum, wasCompleted, ackPromise2) val finishPromise = Promise[Int]() val loadBalancer = Consumer.loadBalance(sync, async, withError, sync, async, busy).map(_.length) val conn = BooleanCancelable() val (subscriber, c) = loadBalancer.createSubscriber(Callback.fromPromise(finishPromise), s) c := conn val continue = Observer.feed(subscriber, conn, (0 until 10000).iterator) s.tick() assertEquals(continue.syncTryFlatten, Continue) assertEquals(sum.get(), expectedSum - 2 - 5) // Triggering on complete subscriber.onComplete(); s.tick() assertEquals(wasCompleted.get(), 4) assertEquals(finishPromise.future.value, None) // Continue ackPromise1.success(Continue); s.tick() assertEquals(wasCompleted.get(), 4) assertEquals(finishPromise.future.value, Some(Failure(dummy))) assert(conn.isCanceled, "conn.isCanceled") assertEquals(subscriber.onNext(10), Stop) ackPromise2.success(Continue) s.tick() // We shouldn't have triggered any errors! assertEquals(s.state.lastReportedError, null) } test("a subscriber can cancel at any time") { implicit s => val sum = Atomic(0L) val wasCompleted = Atomic(0) val composite = CompositeCancelable() val cancelableConsumer = createCancelable(sum, wasCompleted, composite) val sync = createSync(sum, wasCompleted) val loadBalancer = Consumer.loadBalance(sync, cancelableConsumer, sync, cancelableConsumer).map(_.length) val finishPromise = Promise[Int]() val (subscriber, _) = loadBalancer.createSubscriber(Callback.fromPromise(finishPromise), s) for (_ <- 0 until 4) assertEquals(subscriber.onNext(1), Continue) s.tick() assertEquals(sum.get(), 4 + 2) for (_ <- 0 until 4) assertEquals(subscriber.onNext(1), Continue) s.tick() assertEquals(sum.get(), 8 + 2 + 2) composite.cancel(); s.tick() for (_ <- 0 until 4) { assertEquals(subscriber.onNext(1), Continue); s.tick() } assertEquals(sum.get(), 12 + 4) subscriber.onComplete(); s.tick() assertEquals(wasCompleted.get(), 2) assertEquals(finishPromise.future.value, Some(Success(4))) } def createCancelable(sum: AtomicLong, wasCompleted: AtomicInt, conn: CompositeCancelable): Consumer[Int, Unit] = new Consumer[Int, Unit] { def createSubscriber(cb: Callback[Throwable, Unit], s: Scheduler): (Subscriber[Int], AssignableCancelable) = { val sendFinal = Cancelable { () => cb.onSuccess(()) } val c = new AssignableCancelable { def cancel(): Unit = conn.cancel() def `:=`(value: Cancelable): this.type = { conn += value conn += sendFinal this } } val sub = new Subscriber[Int] { implicit val scheduler = s def onNext(elem: Int) = { sum.increment(elem + 1) Continue } def onError(ex: Throwable): Unit = () def onComplete(): Unit = wasCompleted.increment() } (sub, c) } } def createSync(sum: AtomicLong, wasCompleted: AtomicInt): Consumer[Int, Unit] = Consumer.fromObserver { _ => new Observer.Sync[Int] { def onNext(elem: Int) = { sum.increment(elem) Continue } def onError(ex: Throwable): Unit = () def onComplete(): Unit = wasCompleted.increment() } } def createAsync(sum: AtomicLong, wasCompleted: AtomicInt): Consumer[Int, Unit] = Consumer.fromObserver { implicit scheduler => new Observer[Int] { def onNext(elem: Int) = { sum.increment(elem) Future(Continue) } def onError(ex: Throwable): Unit = () def onComplete(): Unit = wasCompleted.increment() } } def createBusy(sum: AtomicLong, wasCompleted: AtomicInt, ack: Promise[Ack]): Consumer[Int, Unit] = Consumer.fromObserver { implicit scheduler => new Observer[Int] { def onNext(elem: Int) = ack.future.map { r => sum.increment(elem) r } def onError(ex: Throwable): Unit = () def onComplete(): Unit = wasCompleted.increment() } } def createErrorSignaling(ack: Promise[Ack], ex: Throwable): Consumer[Int, Unit] = new Consumer[Int, Unit] { def createSubscriber(cb: Callback[Throwable, Unit], s: Scheduler): (Subscriber[Int], AssignableCancelable) = { val sub = new Subscriber[Int] { implicit val scheduler = s def onNext(elem: Int) = ack.future.map { _ => cb.onError(ex) Stop } def onError(ex: Throwable): Unit = () def onComplete(): Unit = () } (sub, AssignableCancelable.dummy) } } }
monixio/monix
monix-reactive/shared/src/test/scala/monix/reactive/consumers/LoadBalanceConsumerSuite.scala
Scala
apache-2.0
11,295
package scalapb.grpc import io.grpc.StatusException import io.grpc.stub.StreamObserver import org.mockito.{ArgumentMatchers, Mockito} import org.mockito.Mockito.mock import scala.util.{Failure, Success} import munit.FunSuite class GrpcSpec extends FunSuite { test("Complete observer should wrap an exception as a StatusException on failure") { val observer = mock(classOf[StreamObserver[_]]) Grpc.completeObserver(observer)(Failure(new RuntimeException("Error!"))) Mockito.verify(observer).onError(ArgumentMatchers.any(classOf[StatusException])) } test("Complete observer should call onError when onNext fails") { val observer = mock(classOf[StreamObserver[String]]) Mockito .when(observer.onNext(ArgumentMatchers.anyString())) .thenThrow(new RuntimeException("Error!")) Grpc.completeObserver(observer)(Success("Success!")) Mockito.verify(observer).onError(ArgumentMatchers.any(classOf[StatusException])) } }
scalapb/ScalaPB
scalapb-runtime-grpc/src/test/scala/scalapb/grpc/GrpcSpec.scala
Scala
apache-2.0
965
import scala.collection.mutable.ListBuffer object Solution { private val inverse = Map[Char,Char](']' -> '[',')' -> '(', '}' -> '{') private def balanced(s: String): Boolean = { val l = new ListBuffer[Char]() for(c <- s) c match { case lparen @ ('['|'('|'{') => lparen +=: l case rparen @ (']'|'}'|')') => if(l.isEmpty || !inverse.get(rparen).contains(l.head)) return false l.trimStart(1) } l.isEmpty } def main(args: Array[String]) = { val it = scala.io.Source.stdin.getLines val n = it.next.toInt var i = 0 while(i < n) { i += 1 println(if(balanced(it.next)) "YES" else "NO") } } }
marcos-sb/hacker-rank
data-structures/stacks/balanced-parentheses/Solution.scala
Scala
apache-2.0
685
/* * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.config.scala import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.matchers.ShouldMatchers @RunWith(classOf[JUnitRunner]) class DynamicFloatPropertyTest extends PropertiesTestHelp with ShouldMatchers with DynamicPropertyBehaviors[Float] { override def fixture(name: String) = DynamicFloatProperty(name, 1.0f) override def fixtureWithCallback(name: String, callback: () => Unit) = DynamicFloatProperty(name, 1.0f, callback) "DynamicFloatProperty" should { behave like dynamicProperty(1.0f, 2.2f) } }
ouyangkongtong/archaius
archaius-scala/src/test/scala/com/netflix/config/scala/DynamicFloatPropertyTest.scala
Scala
apache-2.0
1,178
/*§ =========================================================================== EighthBridge =========================================================================== Copyright (C) 2016 Gianluca Costa =========================================================================== Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =========================================================================== */ package info.gianlucacosta.eighthbridge.graphs.point2point.visual import info.gianlucacosta.eighthbridge.graphs.Link import scalafx.geometry.Point2D /** * A link for VisualGraph */ trait VisualLink[L <: VisualLink[L]] extends Link { this: L => def internalPoints: List[Point2D] def selected: Boolean def labelCenter: Option[Point2D] def styleClasses: List[String] def visualCopy( internalPoints: List[Point2D] = internalPoints, selected: Boolean = selected, labelCenter: Option[Point2D] = labelCenter): L }
giancosta86/EighthBridge
src/main/scala/info/gianlucacosta/eighthbridge/graphs/point2point/visual/VisualLink.scala
Scala
apache-2.0
1,498
trait Bifunctor[F[_, _]] { def bimap[A, B, C, D](g: A => C)(h: B => D): F[A, B] => F[C, D] = first(g) compose second(h) def first[A, B, C](g: A => C): F[A, B] => F[C, B] = bimap(g)(identity[B]) def second[A, B, D](h: B => D): F[A, B] => F[A, D] = bimap(identity[A])(h) }
hmemcpy/milewski-ctfp-pdf
src/content/1.8/code/scala/snippet01.scala
Scala
gpl-3.0
290
// Scala for the Impatient / Chapter 9 / Exercise 2: // Write a Scala program that reads a file with tabs, replaces each tab with spaces so that tab // stops are at n-column boundaries, and writes the result to the same file. import java.io.File import java.nio.file.Paths import scala.collection.JavaConverters._ import java.util.{ Scanner, Locale } import java.util.regex.Pattern import scala.io.Source object Main extends App { val cwd = Paths.get("").toAbsolutePath val file = new File(cwd + "/" + args(0)) val dbls = Source.fromFile(file).mkString.split("""\\s+""").map(_.toDouble) val correctAvg = dbls.sum / dbls.size val scn = new Scanner(file).useLocale(Locale.US) val it = new Iterator[Double] { def hasNext = scn.hasNextDouble; def next = scn.nextDouble } def shift[T](acc: Option[T], curr: T)(f: T => T) = acc match { case None => Some(curr) case Some(somePrev) => Some(f.apply(somePrev)) } val initial = (0, 0.0, None: Option[Double], None: Option[Double], None: Option[Double]) val (count, sum, avg, min, max) = it.foldLeft(initial) { (acc, x: Double) => val (count, sum, avg, min, max) = acc val newCount = count + 1 val newSum = sum + x val newAvg = shift(acc = avg, curr = x) { acc => acc / newCount * count + x / newCount } val newMin = shift(acc = min, curr = x) { acc => if (x < acc) x else acc } val newMax = shift(acc = max, curr = x) { y => if (x > y) x else y } (newCount, newSum, newAvg, newMin, newMax) } assert(avg.getOrElse(0.0) == correctAvg, (avg.getOrElse(0.0), correctAvg)) println("sum: %f\\navg: %f\\nmin: %f\\nmax: %f".format(sum, avg.get, min.get, max.get)) }
eallik/scalaimpatient
chapter09/ex02.scala
Scala
unlicense
1,664
// // ChannelType.scala -- Scala object ChannelType // Project OrcScala // // $Id: BoundedChannelType.scala 2933 2011-12-15 16:26:02Z jthywissen $ // // Created by dkitchin on Dec 1, 2010. // // Copyright (c) 2011 The University of Texas at Austin. All rights reserved. // // Use and redistribution of this file is governed by the license terms in // the LICENSE file found in the project's top-level directory and also found at // URL: http://orc.csres.utexas.edu/license.shtml . // package orc.lib.state.types import orc.types._ import orc.error.compiletime.typing._ import orc.lib.builtin.structured.ListType /** * * @author dkitchin */ object BoundedChannelType extends SimpleTypeConstructor("BoundedChannel", Invariant) { def getBuilder: Type = { val X = new TypeVariable() FunctionType(List(X), List(IntegerType), this(X)) } override def instance(ts: List[Type]) = { val List(t) = ts new RecordType( "get" -> SimpleFunctionType(t), "getD" -> SimpleFunctionType(t), "put" -> SimpleFunctionType(t, SignalType), "putD" -> SimpleFunctionType(t, SignalType), "close" -> SimpleFunctionType(SignalType), "closeD" -> SimpleFunctionType(SignalType), "getOpen" -> SimpleFunctionType(IntegerType), "getBound" -> SimpleFunctionType(IntegerType), "getAll" -> SimpleFunctionType(ListType(t))) } }
laurenyew/cOrcS
src/orc/lib/state/types/BoundedChannelType.scala
Scala
bsd-3-clause
1,379
package com.marketmaker.formula /** * Created by wacharint on 5/19/15. */ import java.util.Date import com.marketmaker.helper.{Configuration, RepositoryHelper} import com.marketmaker.math._ import com.marketmaker.repositories.{OrderValue, Order, Strategy, Phy} trait FormulaCalculatorTrait extends MarketParameters { val mathHelper = new MathHelper var repositoryHelper = new RepositoryHelper protected def valueOfSpread(phys : Map[Int,Double], spreadChange : Map[(Int,Int),Double] = spreadTransitionMatrix) (implicit currentSpread: Byte) : Double = { var ret : Double = 0 var j : Int = 1 for( j <- 1 to maxSpread){ ret = ret + ( spreadChange((currentSpread, j)) * (phys(j) - phys(currentSpread)) ) } mathHelper.round(ret, decimalPoint) } protected def valueOfLimitOrder(lamdaValue : Double, phyIfTheOrderMatched : Double, phyBeforTheOrderMatched : Double, orderSize : Int , isAggressive : Boolean) (implicit currentSpread: Byte) : Double = { val ret = if(isAggressive) { lamdaValue * ( phyIfTheOrderMatched - phyBeforTheOrderMatched + ( orderSize * (( currentSpread * tickSize / 2 ) - tickSize ) ) ) } else { lamdaValue * ( phyIfTheOrderMatched - phyBeforTheOrderMatched + ( currentSpread * tickSize / 2 * orderSize ) ) } mathHelper.round(ret, decimalPoint) } protected def valueOfMarketOrder(phyWhenTheOrderMatch : Double, orderSize : Int) (implicit currentSpread : Byte) : Double = { // phyBeforeTheOrderMatch - phyWhenTheOrderMatch - ( currentSpread * tickSize * orderSize / 2 ) phyWhenTheOrderMatch - ( currentSpread * tickSize * math.abs(orderSize) / 2 ) } protected def valueOfInventoryPunishment(currentInventory : Int) : Double = { riskAverseLevel * currentInventory * currentInventory } def addPhyAtTerminal(implicit databaseName : String, databaseSaveInterval : Short) = { for( inv <- -maximumNumberOfContract to maximumNumberOfContract) { var spread = 1 for(spread <- 1 to maxSpread) { val currentSpread: Byte = spread.asInstanceOf[Byte] repositoryHelper.addPhy(time = 0, inv, currentSpread, -math.abs(inv) * currentSpread * tickSize / 2) } } repositoryHelper.forceUpdatePhyTable } def calculateOrderValue(currentInventory : Short, currentPhys : Seq[Phy], earlierPhy : Phy) (implicit currentTime : Int, currentSpread : Byte, databaseName : String, databaseSavedInterval : Short) = { var phyMap = Map[Int, Double]() var spread = 1 for(spread <- 1 to maxSpread){ phyMap = phyMap ++ Map[Int, Double]( spread -> currentPhys.find(p => p.spread == spread.asInstanceOf[Byte] && p.time == currentTime && p.inv == currentInventory).get.value) } val calculatedValueOfSpread = valueOfSpread(phyMap, spreadTransitionMatrix) val calculatedSupBuyLimitOrder = supBuyLimitOrder(currentInventory) val calculatedSupSellLimitOrder = supSellLimitOrder(currentInventory) val calculatedInventoryPunishment = inventoryPunishment(currentInventory) val calculatedSupMarketOrder = supMarketOrder(currentInventory) val valueOfCurrentPhy = currentPhys.find(p => p.spread == currentSpread && p.time == currentTime && p.inv == currentInventory).get.value val valueOfLimitOrderStrategy = - (valueOfCurrentPhy - earlierPhy.value) - calculatedValueOfSpread - calculatedSupBuyLimitOrder._2 - calculatedSupSellLimitOrder._2 + calculatedInventoryPunishment val valueOfMarketOrderStrategy = valueOfCurrentPhy - calculatedSupMarketOrder._2 repositoryHelper.addOrderValue(new OrderValue(currentTime, currentInventory, currentSpread, valueOfLimitOrderStrategy, valueOfMarketOrderStrategy, calculatedSupBuyLimitOrder._1.orderPosition, calculatedSupBuyLimitOrder._1.orderSize, calculatedSupSellLimitOrder._1.orderPosition, calculatedSupSellLimitOrder._1.orderSize, calculatedSupMarketOrder._1.orderPosition, calculatedSupMarketOrder._1.orderSize)) } def calculateAllOrderValue(marketMaxTime : Int)(implicit databaseName : String, databaseSavedInterval : Short) = { var time : Int = 0 for(time <- 0 to marketMaxTime by marketClockInterval) { val phyAtTheTimes = repositoryHelper.getPhys(time) val phyAtEarlierTimes = repositoryHelper.getPhys(time + 500) var inventory : Short = 0 for(inventory <- -maximumNumberOfContract to maximumNumberOfContract by 1) { var spread : Byte = 1 for(spread <- 1 to maxSpread by 1) { implicit val currentTime : Int = time implicit val currentSpread : Byte = spread.asInstanceOf[Byte] val phyAtEarlierTIme: Phy = phyAtEarlierTimes.find(p => p.time == time + 500 && p.inv == inventory && p.spread == spread). getOrElse(new Phy(time, inventory, spread.asInstanceOf[Byte], 0)) calculateOrderValue(inventory.asInstanceOf[Short], phyAtTheTimes, phyAtEarlierTIme) } } Console.println("%s : Calculating order value Table : %s/%s".format(new Date(), time,marketMaxTime)) } } def supBuyLimitOrder(currentHoldingInventory : Int) (implicit currentSpread : Byte, currentTime : Int) : (Order,Double) = { supLimitOrder(currentHoldingInventory, isBidOrder = true) } def supSellLimitOrder(currentHoldingInventory : Int) (implicit currentSpread : Byte, currentTime : Int) : (Order,Double) = { supLimitOrder(currentHoldingInventory, isBidOrder = false) } def inventoryPunishment(currentInventory : Int) : Double = valueOfInventoryPunishment(currentInventory) protected def supLimitOrder(currentHoldingInventory : Int, isBidOrder : Boolean) (implicit currentSpread : Byte, currentTime : Int) : (Order,Double) protected def getStrategies(strategies : Map[(Int,Int) , Double], isAggressive : Boolean, isBidOrder : Boolean, lamda : Double, phys : Seq[Phy], currentHoldingInventory : Int) (implicit currentSpread : Byte, currentTime : Int) : Map[(Int, Int), Double] = { var returnStrategies = strategies val targetInventory = 0 for(targetInventory <- 0 to maximumNumberOfContract) { val inventoryChange = if(isBidOrder) targetInventory else -targetInventory if(math.abs(currentHoldingInventory + inventoryChange) <= maximumNumberOfContract) { val phyBeforeMatch: Phy = phys.find(phy => phy.time == currentTime && phy.inv == currentHoldingInventory) orNull val phyAfterMatch: Phy = phys.find(phy => phy.time == currentTime && phy.inv == currentHoldingInventory + inventoryChange) orNull if (phyAfterMatch == null || phyBeforeMatch == null) { // the order is not valid, skip to the next loop } else { val value = valueOfLimitOrder(lamda, phyAfterMatch.value, phyBeforeMatch.value, targetInventory, isAggressive) if (isBidOrder && !isAggressive) { returnStrategies += (Strategy.LimitBuyOrderAtTheMarket, targetInventory) -> value } else if (isBidOrder && isAggressive) { returnStrategies += (Strategy.LimitBuyOrderAtTheMarketPlusOneSpread, targetInventory) -> value } else if (!isBidOrder && isAggressive) { returnStrategies += (Strategy.LimitSellOrderAtTheMarketMinusOneSpread, targetInventory) -> value } else { returnStrategies += (Strategy.LimitSellOrderAtTheMarket, targetInventory) -> value } } } } returnStrategies } def supMarketOrder(currentHoldingInventory : Int) (implicit currentSpread : Byte, currentTime : Int) : (Order,Double) } class FormulaCalculator extends FormulaCalculatorTrait with Configuration { def calculateAllPhyTable(endTime : Int) = { addPhyAtTerminal val counter : Int = 0 val endCount = endTime / marketClockInterval for(counter <- 0 to endCount) { implicit val currentTime : Int = counter * marketClockInterval calculatePhyAtEarlyTimes Console.println("%s : Calculating Phy Table : %s/%s".format(new Date(), counter,endCount)) } } def calculatePhyAtEarlyTimes(implicit currentTime : Int) = { // create target intent to get phys var targetInventory = -maximumNumberOfContract val currentPhys = repositoryHelper.getPhys(currentTime) // looping generate phy at earlier time for(targetInventory <- -maximumNumberOfContract to maximumNumberOfContract) { calculatePhyAtEarlyTime(targetInventory.asInstanceOf[Short], currentPhys) } repositoryHelper.forceUpdatePhyTable } def calculatePhyAtEarlyTime(currentInventory : Short, currentPhys : Seq[Phy])(implicit currentTime : Int) = { implicit var currentSpread : Byte = 1 var spread = 1 for(spread <- 1 to maxSpread) { try { currentSpread = spread.asInstanceOf[Byte] val phys = currentPhys.filter(p => p.time == currentTime && p.inv == currentInventory) var currentPhyMap = Map[Int, Double]() var sp = 1 for(sp <- 1 to maxSpread){ currentPhyMap = currentPhyMap ++ Map[Int, Double]( sp -> phys.find(p => p.spread == sp.asInstanceOf[Byte]).get.value ) } val bestLimitBuyOrder = supBuyLimitOrder(currentInventory) val bestLimitSellOrder = supSellLimitOrder(currentInventory) val valueOfPhyAtTimeBefore = currentPhyMap(currentSpread) + valueOfSpread(currentPhyMap) + bestLimitBuyOrder._2 + bestLimitSellOrder._2 - inventoryPunishment(currentInventory) val phyAtTimeBefore: Phy = new Phy( currentTime + marketClockInterval, currentInventory, currentSpread, valueOfPhyAtTimeBefore) repositoryHelper.addPhy(phyAtTimeBefore) } catch { case e : Exception => { System.out.println(e.getMessage) } } } } override def supMarketOrder(currentHoldingInventory : Int) (implicit currentSpread : Byte, currentTime : Int) : (Order,Double) = { var strategies = Map[(Int,Int), Double]() var orderSize : Int = 0 val phys = repositoryHelper.getPhys(currentTime) val phyBeforeMatch = phys.find(phy => phy.time == currentTime && phy.inv == currentHoldingInventory) if(phyBeforeMatch.isEmpty) { throw new NullPointerException("""Cannot find the phy with time = %s and inventory = %s""".format(currentTime, currentHoldingInventory)) } for(orderSize <- -maximumNumberOfContract to maximumNumberOfContract) { if(currentHoldingInventory + orderSize > maximumNumberOfContract || currentHoldingInventory + orderSize < -maximumNumberOfContract) { // do nothing } else { val phyAfterMatch = phys.find(phy => phy.time == currentTime && phy.inv == currentHoldingInventory + orderSize).get val orderSide = if(orderSize > 0) Strategy.MarketBuyOrder else Strategy.MarketSellOrder val newStrategy = (orderSide, orderSize) -> valueOfMarketOrder(phyAfterMatch.value, math.abs(orderSize)) strategies = strategies ++ Map[(Int,Int),Double](newStrategy) } } val bestStrategy = mathHelper.getMax(strategies) (new Order(math.abs(bestStrategy._2), bestStrategy._1), phyBeforeMatch.get.value - strategies(bestStrategy)) } protected def supLimitOrder(currentHoldingInventory : Int, isBidOrder : Boolean) (implicit currentSpread : Byte, currentTime : Int) : (Order,Double) = { val side = if(isBidOrder) "BID" else "ASK" var strategies = Map[(Int,Int), Double]() val lamdaAtTheMarket : Double = lamdaTable(side, currentSpread) val lamdaAtTheMarketPlus : Double = lamdaTable(side, currentSpread - 1) val phys = repositoryHelper.getPhys(currentTime) strategies = strategies ++ getStrategies(strategies, isAggressive = false, isBidOrder, lamdaAtTheMarket, phys, currentHoldingInventory) strategies = strategies ++ getStrategies(strategies, isAggressive = true, isBidOrder, lamdaAtTheMarketPlus, phys, currentHoldingInventory) val bestStrategy = mathHelper.getMax(strategies) (new Order(bestStrategy._2, bestStrategy._1), strategies(bestStrategy)) } }
ougnt/StrategiesGenerator
src/main/scala/com/marketmaker/formula/FormulaCalculator.scala
Scala
mit
14,435
package ch.bsisa.hyperbird.patman.simulations.model import ch.bsisa.hyperbird.Implicits._ import ch.bsisa.hyperbird.dao.ElfinDAO import ch.bsisa.hyperbird.model.ELFIN import ch.bsisa.hyperbird.model.format.ElfinFormat import ch.bsisa.hyperbird.model.format.Implicits._ import ch.bsisa.hyperbird.patman.simulations.Constants._ import ch.bsisa.hyperbird.util.DateUtil import ch.bsisa.hyperbird.util.ElfinUtil import ch.bsisa.hyperbird.dao.ws.WSQueries import ch.bsisa.hyperbird.dao.ws.XQueryWSHelper import java.util.Date import play.api.libs.concurrent.Execution.Implicits._ import play.api.Logger import scala.concurrent.Future import ch.bsisa.hyperbird.patman.simulations.Constants import ch.bsisa.hyperbird.model.IDENTIFIANT /** * Helper to go from ELFIN to Hospital and reverse. * * <L POS="5"> * <C POS="1">503B</C> * <C POS="2">4195904</C> * <C POS="3">soins continus</C> * <C POS="4">médicalisé</C> * <C POS="5">terminé</C> * <C POS="6">occupé</C> * <C POS="7"/> * </L> * */ object HospitalHelper { val logger = Logger("ch.bsisa.hyperbird.patman.simulations.model.HospitalHelper") /** * Converts generic ELFIN type for specific CLASSE='HOSPITAL_STATE' to semantic type Hospital */ def toHospital(elfinHospitalState: ELFIN): Hospital = { val hospitalCode = getMixedContent(elfinHospitalState.CARACTERISTIQUE.get.FRACTION.get.L(0).C(0).mixed) val scheduleStr = elfinHospitalState.IDENTIFIANT.get.DE.get //play.api.Logger.info("scheduleStr = " + scheduleStr) val schedule = DateUtil.getIsoDateFormatterWithoutTz.parse(scheduleStr) val beds = for { (l, index) <- (elfinHospitalState.CARACTERISTIQUE.get.FRACTION.get.L.toList zipWithIndex) if (index > 0) // First L does not contain bed information } yield { val c = l.C val bedId = getMixedContent(c(0).mixed) val bedFree = (getMixedContent(c(5).mixed) == BED_FREE_CODE) val patientNb = getMixedContent(c(1).mixed) val patientType = getMixedContent(c(2).mixed) val transferType = getMixedContent(c(3).mixed) val bed = Bed(bedId, bedFree, patientNb, patientType, transferType) bed } Hospital(hospitalCode, schedule, beds) } /** * Converts `Hospital` instance to ELFIN generic GeoXML representation. * We consider beds data built programmatically as always having input state * as completed {`terminé`} not pending {`en cours`}. This helps reporting * queries reuse. * */ def toElfin(hospital: Hospital, bedsPosStartIndex: Int = 1): ELFIN = { val bedsXmlElem = for ((bed, i) <- hospital.beds zipWithIndex) yield { <!-- Identification lit --> val bedXmlElem = <L POS={ (i + bedsPosStartIndex).toString }> <!-- Numéro lit --> <C POS="1">{ bed.id }</C> <C POS="2">{ bed.patientNb }</C> <C POS="3">{ bed.patientType }</C> <C POS="4">{ bed.transferType }</C> <C POS="5">{ Constants.BED_COMPLETED_INPUT }</C> <C POS="6">{ if (bed.free) "libre" else "occupé" }</C> <C POS="7">{ bed.reasonForTransfer.getOrElse("") }</C> </L> bedXmlElem } val hospitalElfinTemplateXmlElem = <ELFIN Id="N/A" ID_G="N/A" CLASSE="HOSPITAL" GROUPE="" TYPE="BIEN" NATURE=""> <IDENTIFIANT> <AUT/> <NOM>N/A</NOM> <ALIAS>{ hospital.code }</ALIAS> <DE>{ DateUtil.getIsoDateFormatterWithoutTz.format(hospital.schedule) }</DE> </IDENTIFIANT> <CARACTERISTIQUE> <!-- Liste des lits de l'hopital --> <FRACTION> { bedsXmlElem } </FRACTION> </CARACTERISTIQUE> <DIVERS> <REMARQUE/> </DIVERS> </ELFIN> ElfinFormat.fromXml(hospitalElfinTemplateXmlElem) } /** * Shortcut function for repetitive Bed.patientType matches Constants.PATIENT_TYPE_SI test */ def isBedPatientTypeSi(bed: Bed): Boolean = (bed.patientType == PATIENT_TYPE_SI) /** * Sets `reasonForTransfer` to all beds */ def setReasonForTransfer(beds: List[Bed], reasonForTransfer: String): List[Bed] = { val bedsWithReasonForTransfer = for (bed <- beds) yield { Bed(bed.id, bed.free, bed.patientNb, bed.patientType, bed.transferType, Some(reasonForTransfer)) } bedsWithReasonForTransfer } /** * Returns a pair of Seq[Bed]. The first one contains incoming SI patients while the second contains incoming SC patients. * Only SI at CDF end can be a reason for transfer, not SC. */ def getBedsWithIncomingPatient(previousStateOption: Option[Hospital], currentStateOption: Option[Hospital]): (List[Bed], List[Bed]) = { previousStateOption match { case Some(previousState) => currentStateOption match { // previous and current available case Some(currentState) => { // Incoming patients val bedsWithIncomingPatients = currentState.beds.filter { currentStateBed => if (!currentStateBed.free) { // Check if the current patient was already there val existingBed = previousState.beds.find(previousStateBed => currentStateBed.patientNb == previousStateBed.patientNb) // Return true if was not previously there (existingBed == None) } else { // Skip empty bed false } } val bedsWithIncomingPatientTypeSi = bedsWithIncomingPatients.filter(isBedPatientTypeSi) val bedsWithIncomingPatientTypeSc = bedsWithIncomingPatients.filterNot(isBedPatientTypeSi) //(bedsWithIncomingPatientTypeSi, bedsWithIncomingPatientTypeSc) if (currentState.code == Constants.HOSPITAL_CODE_CDF) { val bedsWithIncomingPatientTypeSiAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSi, Constants.BED_REASON_FOR_TRANSFER_SI) //(bedsWithIncomingPatientTypeSiAndReasonForTransfer, bedsWithIncomingPatientTypeSc) val bedsWithIncomingPatientTypeScAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSc, Constants.BED_REASON_FOR_TRANSFER_NONE) (bedsWithIncomingPatientTypeSiAndReasonForTransfer, bedsWithIncomingPatientTypeScAndReasonForTransfer) } else { val bedsWithIncomingPatientTypeSiAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSi, Constants.BED_REASON_FOR_TRANSFER_NONE) val bedsWithIncomingPatientTypeScAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSc, Constants.BED_REASON_FOR_TRANSFER_NONE) (bedsWithIncomingPatientTypeSiAndReasonForTransfer, bedsWithIncomingPatientTypeScAndReasonForTransfer) } } // previous available but no current: Nothing is incoming. case None => (List(), List()) } case None => currentStateOption match { // current available but no previous: Everything is incoming case Some(currentState) => val currentNonEmptyBeds = currentState.beds.filter(bed => !bed.free) val bedsWithIncomingPatientTypeSi = currentNonEmptyBeds.filter(isBedPatientTypeSi) val bedsWithIncomingPatientTypeSc = currentNonEmptyBeds.filterNot(isBedPatientTypeSi) //(bedsWithIncomingPatientTypeSi, bedsWithIncomingPatientTypeSc) if (currentState.code == Constants.HOSPITAL_CODE_CDF) { val bedsWithIncomingPatientTypeSiAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSi, Constants.BED_REASON_FOR_TRANSFER_SI) val bedsWithIncomingPatientTypeScAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSc, Constants.BED_REASON_FOR_TRANSFER_NONE) (bedsWithIncomingPatientTypeSiAndReasonForTransfer, bedsWithIncomingPatientTypeScAndReasonForTransfer) } else { val bedsWithIncomingPatientTypeSiAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSi, Constants.BED_REASON_FOR_TRANSFER_NONE) val bedsWithIncomingPatientTypeScAndReasonForTransfer = setReasonForTransfer(bedsWithIncomingPatientTypeSc, Constants.BED_REASON_FOR_TRANSFER_NONE) (bedsWithIncomingPatientTypeSiAndReasonForTransfer, bedsWithIncomingPatientTypeScAndReasonForTransfer) } // no previous nor current state available case None => (List(), List()) } } } /** * Return a pair of Seq[Bed]. The first one contains outgoing SI patients while the second contains outgoing SC patients. */ def getBedsWithOutgoingPatient(previousStateOption: Option[Hospital], currentStateOption: Option[Hospital]): (List[Bed], List[Bed]) = { previousStateOption match { case Some(previousState) => currentStateOption match { // previous and current available case Some(currentState) => { // Outgoing patients val bedsWithOutgoingPatient = previousState.beds.filter { previousStateBed => if (!previousStateBed.free) { // Check if the previous patient is still there val existingBed = currentState.beds.find(currentStateBed => currentStateBed.patientNb == previousStateBed.patientNb) // Return true if no more there (existingBed == None) } else { // Skip empty bed false } } // Split outgoing in SI, SC. val bedsWithOutgoingPatientTypeSi = bedsWithOutgoingPatient.filter(isBedPatientTypeSi) val bedsWithOutgoingPatientTypeSc = bedsWithOutgoingPatient.filterNot(isBedPatientTypeSi) (bedsWithOutgoingPatientTypeSi, bedsWithOutgoingPatientTypeSc) } // Previous available but nothing anymore in current: Everything is outgoing case None => val previousNonEmptyBeds = previousState.beds.filter(bed => !bed.free) val bedsWithOutgoingPatientTypeSi = previousNonEmptyBeds.filter(isBedPatientTypeSi) val bedsWithOutgoingPatientTypeSc = previousNonEmptyBeds.filterNot(isBedPatientTypeSi) (bedsWithOutgoingPatientTypeSi, bedsWithOutgoingPatientTypeSc) } case None => currentStateOption match { // current available but no previous: Nothing outgoing case Some(currentState) => (List(), List()) // no previous nor current state available case None => (List(), List()) } } } /** * Return a pair of Seq[Bed]. The first one contains patients patientType SC to SI change while the second contains patients patientType SI to SC change */ def getBedsWithPatientTypeChange(previousStateOption: Option[Hospital], currentStateOption: Option[Hospital]): (List[Bed], List[Bed]) = { previousStateOption match { case Some(previousState) => currentStateOption match { // previous and current available case Some(currentState) => { // From SC to SI val bedsWithPatientTypeChangeFromScToSi = currentState.beds.filter { currentStateBed => if (!currentStateBed.free) { // Check if the current patient was already there val bedWithPatientTypeChange = previousState.beds.find(previousStateBed => currentStateBed.patientNb == previousStateBed.patientNb) match { case Some(previousStateBed) => (currentStateBed.patientType != previousStateBed.patientType) && (previousStateBed.patientType == PATIENT_TYPE_SC) && (currentStateBed.patientType == PATIENT_TYPE_SI) case None => false } bedWithPatientTypeChange } else { // Skip empty bed false } } // From SI to SC val bedsWithPatientTypeChangeFromSiToSc = currentState.beds.filter { currentStateBed => if (!currentStateBed.free) { // Check if the current patient was already there val bedWithPatientTypeChange = previousState.beds.find(previousStateBed => currentStateBed.patientNb == previousStateBed.patientNb) match { case Some(previousStateBed) => (currentStateBed.patientType != previousStateBed.patientType) && (previousStateBed.patientType == PATIENT_TYPE_SI) && (currentStateBed.patientType == PATIENT_TYPE_SC) case None => false } bedWithPatientTypeChange } else { // Skip empty bed false } } //(bedsWithPatientTypeChangeFromScToSi, bedsWithPatientTypeChangeFromSiToSc) /** * Reason for transfer only applies to CDF. We log both SI to SC and SC to SI to remember some SC entries by PRT originaly came from CDF. */ if (currentState.code == Constants.HOSPITAL_CODE_CDF) { val bedsWithPatientTypeChangeFromScToSiAndReasonForTransfer = setReasonForTransfer(bedsWithPatientTypeChangeFromScToSi, Constants.BED_REASON_FOR_TRANSFER_SC_TO_SI) val bedsWithPatientTypeChangeFromSiToScAndReasonForTransfer = setReasonForTransfer(bedsWithPatientTypeChangeFromSiToSc, Constants.BED_REASON_FOR_TRANSFER_SI_TO_SC) (bedsWithPatientTypeChangeFromScToSiAndReasonForTransfer, bedsWithPatientTypeChangeFromSiToScAndReasonForTransfer) } else { val bedsWithPatientTypeChangeFromScToSiAndReasonForTransfer = setReasonForTransfer(bedsWithPatientTypeChangeFromScToSi, Constants.BED_REASON_FOR_TRANSFER_NONE) val bedsWithPatientTypeChangeFromSiToScAndReasonForTransfer = setReasonForTransfer(bedsWithPatientTypeChangeFromSiToSc, Constants.BED_REASON_FOR_TRANSFER_NONE) (bedsWithPatientTypeChangeFromScToSiAndReasonForTransfer, bedsWithPatientTypeChangeFromSiToScAndReasonForTransfer) } } // previous available but no current: No existing bed change tracking. case None => (List(), List()) } // No previous available: No existing bed change tracking. case None => (List(), List()) } } /** * Return (bedsWithTransferTypeOnlyChangePatientTypeSi:List[Bed],bedsWithTransferTypeOnlyChangePatientTypeSc:List[Bed]) * for which only TransferType changed. * It excludes those already included in patientType change. */ def getBedsWithTransfertTypeChangeOnly(previousStateOption: Option[Hospital], currentStateOption: Option[Hospital]): (List[Bed], List[Bed]) = { previousStateOption match { case Some(previousState) => currentStateOption match { // previous and current available case Some(currentState) => { // Beds with transfer type only change val bedsWithTransferTypeOnlyChange = currentState.beds.filter { currentStateBed => if (!currentStateBed.free) { // Check if the current patient was already there val isBedWithTransferTypeOnlyChange = previousState.beds.find(previousStateBed => currentStateBed.patientNb == previousStateBed.patientNb) match { case Some(previousStateBed) => // We exclude patient type change beds already included in getBedsWithPatientTypeChange check (currentStateBed.patientType == previousStateBed.patientType) && // We check transfer type changed (any change) (previousStateBed.transferType != currentStateBed.transferType) case None => false } isBedWithTransferTypeOnlyChange } else { // Skip empty bed false } } val bedsWithTransferTypeOnlyChangePatientTypeSi = bedsWithTransferTypeOnlyChange.filter(isBedPatientTypeSi) val bedsWithTransferTypeOnlyChangePatientTypeSc = bedsWithTransferTypeOnlyChange.filterNot(isBedPatientTypeSi) if (currentState.code == Constants.HOSPITAL_CODE_CDF) { // Preserve original reason for transfer rather than overwriting it with update message. val bedsWithTransferTypeOnlyChangePatientTypeSiAndReasonForTransfer = setReasonForTransfer(bedsWithTransferTypeOnlyChangePatientTypeSi, Constants.BED_REASON_FOR_TRANSFER_SI) // For SC patients it is not possible to state here if it is managed at CDF as expected or at PRT for a formerly transferred SI bed. Thus keep this update message as reason for change. val bedsWithTransferTypeOnlyChangePatientTypeScAndReasonForTransfer = setReasonForTransfer(bedsWithTransferTypeOnlyChangePatientTypeSc, Constants.BED_REASON_FOR_TRANSFER_TRANSFER_TYPE_CHANGE_FOR_SC) (bedsWithTransferTypeOnlyChangePatientTypeSiAndReasonForTransfer, bedsWithTransferTypeOnlyChangePatientTypeScAndReasonForTransfer) } else { // No transfer here val bedsWithTransferTypeOnlyChangePatientTypeSiAndReasonForTransfer = setReasonForTransfer(bedsWithTransferTypeOnlyChangePatientTypeSi, Constants.BED_REASON_FOR_TRANSFER_NONE) // No transfer here val bedsWithTransferTypeOnlyChangePatientTypeScAndReasonForTransfer = setReasonForTransfer(bedsWithTransferTypeOnlyChangePatientTypeSc, Constants.BED_REASON_FOR_TRANSFER_NONE) (bedsWithTransferTypeOnlyChangePatientTypeSiAndReasonForTransfer, bedsWithTransferTypeOnlyChangePatientTypeScAndReasonForTransfer) } } // previous available but no current: No existing bed change tracking. case None => (List(), List()) } // No previous available: No existing bed change tracking. case None => (List(), List()) } } /** * Encloses several beds updates function calls to provide a single tuple of 7th results as a time. * * List[Bed] results are: * * `(bedsWithIncomingPatientTypeSi, bedsWithIncomingPatientTypeSc, * bedsWithOutgoingPatientTypeSi, bedsWithOutgoingPatientTypeSc, * patientTypeChangeFromScToSi, patientTypeChangeFromSiToSc, * tranferTypeOnlyChange)` * */ def getBedsUpdates(previousHospitalState: Option[Hospital], currentHospitalState: Option[Hospital]): (List[Bed], List[Bed], List[Bed], List[Bed], List[Bed], List[Bed], List[Bed], List[Bed]) = { val incoming = HospitalHelper.getBedsWithIncomingPatient(previousHospitalState, currentHospitalState) val outgoing = HospitalHelper.getBedsWithOutgoingPatient(previousHospitalState, currentHospitalState) val patientTypeChange = HospitalHelper.getBedsWithPatientTypeChange(previousHospitalState, currentHospitalState) val tranferTypeOnlyChange = HospitalHelper.getBedsWithTransfertTypeChangeOnly(previousHospitalState, currentHospitalState) (incoming._1, incoming._2, outgoing._1, outgoing._2, patientTypeChange._1, patientTypeChange._2, tranferTypeOnlyChange._1, tranferTypeOnlyChange._2) } /** * * ===================================================================================================================================== * ==== UPDATE ALGO CDF ==== * ===================================================================================================================================== * These events are determined on static HOSPITAL_STATEs NOT on simulated hospital state. * * - bedsWithIncomingPatientTypeSi must trigger TRANSFER NATURE="add" => be transferred to PRT * - patientTypeChangeFromScToSi must trigger TRANSFER NATURE="add" => be transferred to PRT * * - bedsWithOutgoingPatientTypeSi must trigger TRANSFER NATURE="remove" => notify PRT these transferred SI patients are going out * * - patientTypeChangeFromSiToSc must trigger TRANSFER NATURE="update" => notify PRT patients have had their patient type changed * * - tranferTypeOnlyChange: * if SI patient type must trigger TRANSFER NATURE="update" => notify PRT patients have had transfer type changed * (replace their previous bed values with new updated ones) * if SC patient type must update CDF `simulatedHospitalState` => replace their previous bed values with new updated ones * * - bedsWithIncomingPatientTypeSc must update CDF `simulatedHospitalState` => stay at CDF * - bedsWithOutgoingPatientTypeSc must update CDF `simulatedHospitalState` => out of CDF * * Returns a new copy of `currentSimulatedHospitalStateOption` updated with all provided information. */ def updateSimulatedHospitalStateForCdf( currentSimulatedHospitalStateOption: Option[Hospital], newStaticHospitalStateOption: Option[Hospital], bedsWithIncomingPatientTypeSi: List[Bed], bedsWithIncomingPatientTypeSc: List[Bed], bedsWithOutgoingPatientTypeSi: List[Bed], bedsWithOutgoingPatientTypeSc: List[Bed], patientTypeChangeFromScToSi: List[Bed], patientTypeChangeFromSiToSc: List[Bed], transferTypeOnlyChangePatientTypeSi: List[Bed], transferTypeOnlyChangePatientTypeSc: List[Bed]): Option[Hospital] = { currentSimulatedHospitalStateOption match { case Some(currentSimulatedHospitalState) => newStaticHospitalStateOption match { case Some(newStaticHospitalState) => // bedsWithIncomingPatientTypeSi - DO NOTHING - These are transferred to PRT // bedsWithIncomingPatientTypeSc - TO ADD - new CDF incoming beds val currentWithIncomingSc = currentSimulatedHospitalState.beds ++ bedsWithIncomingPatientTypeSc // bedsWithOutgoingPatientTypeSi - DO NOTHING - These changes are forwarded to PRT where the SI beds have been transferred // bedsWithOutgoingPatientTypeSc - TO REMOVE - outgoing SC beds at CDF val currentWithIncomingScMinusOutgoingSc = currentWithIncomingSc diff bedsWithOutgoingPatientTypeSc // patientTypeChangeFromScToSi - TO REMOVE - beds we transfer to PRT following patient type change val currentWithIncomingScMinusOutgoingScMinusScToSi = currentWithIncomingScMinusOutgoingSc diff patientTypeChangeFromScToSi // patientTypeChangeFromSiToSc - DO NOTHING - These changes are forwarded to PRT where the SI beds have been transferred. (They could be transferred back from PRT but this would not be dealt with here anyway.) // transferTypeOnlyChangePatientTypeSi - DO NOTHING - SI beds with new updated transfer type information are managed at PRT side // transferTypeOnlyChangePatientTypeSc - TO REPLACE - SC beds with new updated transfer type information val currentWithIncomingScMinusOutgoingScMinusScToSiWithUpdatedTransferType = (currentWithIncomingScMinusOutgoingScMinusScToSi diff transferTypeOnlyChangePatientTypeSc) ++ transferTypeOnlyChangePatientTypeSc Some(Hospital(newStaticHospitalState.code, newStaticHospitalState.schedule, currentWithIncomingScMinusOutgoingScMinusScToSiWithUpdatedTransferType)) case None => logger.error("updateSimulatedHospitalStateForCdf received None for newStaticHospitalStateOption !?") // Nothing new provided keep the current simulated state unchanged. We do not expect such call. Some(currentSimulatedHospitalState) } case None => newStaticHospitalStateOption match { case Some(newStaticHospitalState) => // bedsWithIncomingPatientTypeSi - DO NOTHING - These are transferred to PRT // bedsWithIncomingPatientTypeSc - TO ADD - new CDF incoming beds // bedsWithOutgoingPatientTypeSi - DO NOTHING - No current state: must be empty // bedsWithOutgoingPatientTypeSc - DO NOTHING - No current state: must be empty // patientTypeChangeFromScToSi - DO NOTHING - No current state: must be empty // patientTypeChangeFromSiToSc - DO NOTHING - No current state: must be empty // transferTypeOnlyChangePatientTypeSi - DO NOTHING - No current state: must be empty // transferTypeOnlyChangePatientTypeSc - DO NOTHING - No current state: must be empty Some(Hospital(newStaticHospitalState.code, newStaticHospitalState.schedule, bedsWithIncomingPatientTypeSc)) case None => None } } } /** * * ===================================================================================================================================== * ==== UPDATE ALGO PRT ==== * ===================================================================================================================================== * These events are determined on static HOSPITAL_STATEs NOT on simulated hospital state. * These events are either coming from PRT as HospitalState or from CDF as either: * {TransferRequestCreate, TransferRequestUpdate, TransferRequestDelete} * * * Returns a new copy of `currentSimulatedHospitalStateOption` updated with all provided information. */ def updateSimulatedHospitalStateForPrt( currentSimulatedHospitalStateOption: Option[Hospital], newStaticHospitalStateOption: Option[Hospital], bedsWithIncomingPatientTypeSi: List[Bed], bedsWithIncomingPatientTypeSc: List[Bed], bedsWithOutgoingPatientTypeSi: List[Bed], bedsWithOutgoingPatientTypeSc: List[Bed], patientTypeChangeFromScToSi: List[Bed], patientTypeChangeFromSiToSc: List[Bed], bedsWithTransferTypeOnlyChangePatientTypeSi: List[Bed], bedsWithTransferTypeOnlyChangePatientTypeSc: List[Bed]): Option[Hospital] = { currentSimulatedHospitalStateOption match { case Some(currentSimulatedHospitalState) => newStaticHospitalStateOption match { case Some(newStaticHospitalState) => // bedsWithIncomingPatientTypeSi - TO ADD - ( Directly incoming or transferred from CDF) //val bedsWithIncomingSi = currentSimulatedHospitalState.beds ++ bedsWithIncomingPatientTypeSi // Fix duplicate for CDF SC to SI => PRT SI to SC => PRT SC to SI scenario // 1) Remove any Bed with same patient number in current list as in incoming list. val currentBedsWithoutIncomingSi = currentSimulatedHospitalState.beds filterNot { bedsWithIncomingPatientTypeSi.contains(_) } val currentBedsWithoutIncoming = currentBedsWithoutIncomingSi filterNot { bedsWithIncomingPatientTypeSc.contains(_) } // 2) Add incoming list beds to current list. val bedsWithIncomingSi = currentBedsWithoutIncoming ++ bedsWithIncomingPatientTypeSi // bedsWithIncomingPatientTypeSc - TO ADD val bedsWithIncomingSiAndSc = bedsWithIncomingSi ++ bedsWithIncomingPatientTypeSc // bedsWithOutgoingPatientTypeSi - TO REMOVE - outgoing SI beds either at PRT or CDF // Fix removing actual transferred patients (not simulated) by forbidding removal of newStaticHospitalState beds. // Outgoing patients present in newStaticHospitalState.beds should only happen when an actual transfer is done from // CDF to PRT with outgoing CDF patient xxx incoming PRT patient xxx. Applying removal to PRT would be wrong. val bedsWithOutgoingPatientTypeSiAndNotActuallyTransferred = bedsWithOutgoingPatientTypeSi filterNot { newStaticHospitalState.beds.contains(_) } val bedsWithIncomingMinusOutgoingSi = bedsWithIncomingSiAndSc diff bedsWithOutgoingPatientTypeSiAndNotActuallyTransferred // bedsWithOutgoingPatientTypeSc - TO REMOVE - outgoing SC beds either at PRT or CDF (SI to SC) // Fix removing actual transferred patients (not simulated) by forbidding removal of newStaticHospitalState beds. val bedsWithOutgoingPatientTypeScAndNotActuallyTransferred = bedsWithOutgoingPatientTypeSc filterNot { newStaticHospitalState.beds.contains(_) } val bedsWithIncomingMinusOutgoing = bedsWithIncomingMinusOutgoingSi diff bedsWithOutgoingPatientTypeScAndNotActuallyTransferred // patientTypeChangeFromScToSi - TO UPDATE - beds with type change val bedsWithIncomingMinusOutgoingWithScToSiUpdate = (bedsWithIncomingMinusOutgoing diff patientTypeChangeFromScToSi) ++ patientTypeChangeFromScToSi // patientTypeChangeFromSiToSc - TO UPDATE - beds with type change val bedsWithIncomingMinusOutgoingWithPatTypeChange = (bedsWithIncomingMinusOutgoingWithScToSiUpdate diff patientTypeChangeFromSiToSc) ++ patientTypeChangeFromSiToSc // transferTypeOnlyChangePatientTypeSi - TO UPDATE - beds with transfer type change val bedsWithIncomingMinusOutgoingWithPatTypeChangeAndTransferTypeChangeForSi = (bedsWithIncomingMinusOutgoingWithPatTypeChange diff bedsWithTransferTypeOnlyChangePatientTypeSi) ++ bedsWithTransferTypeOnlyChangePatientTypeSi // transferTypeOnlyChangePatientTypeSi - TO UPDATE - beds with transfer type change val bedsWithIncomingMinusOutgoingWithPatTypeChangeAndTransferTypeChange = (bedsWithIncomingMinusOutgoingWithPatTypeChangeAndTransferTypeChangeForSi diff bedsWithTransferTypeOnlyChangePatientTypeSc) ++ bedsWithTransferTypeOnlyChangePatientTypeSc Some(Hospital(newStaticHospitalState.code, newStaticHospitalState.schedule, bedsWithIncomingMinusOutgoingWithPatTypeChangeAndTransferTypeChange)) case None => logger.error("updateSimulatedHospitalStateForPrt received None for newStaticHospitalStateOption !?") // Nothing new provided keep the current simulated state unchanged. We do not expect such call. Some(currentSimulatedHospitalState) } case None => newStaticHospitalStateOption match { case Some(newStaticHospitalState) => // bedsWithIncomingPatientTypeSi - TO ADD - new incoming beds // bedsWithIncomingPatientTypeSc - TO ADD - new incoming beds val bedsIncoming = bedsWithIncomingPatientTypeSi ++ bedsWithIncomingPatientTypeSc // bedsWithOutgoingPatientTypeSi - DO NOTHING - No current state: must be empty // bedsWithOutgoingPatientTypeSc - DO NOTHING - No current state: must be empty // patientTypeChangeFromScToSi - DO NOTHING - No current state: must be empty // patientTypeChangeFromSiToSc - DO NOTHING - No current state: must be empty // transferTypeOnlyChangePatientTypeSi - DO NOTHING - No current state: must be empty // transferTypeOnlyChangePatientTypeSc - DO NOTHING - No current state: must be empty Some(Hospital(newStaticHospitalState.code, newStaticHospitalState.schedule, bedsIncoming)) case None => None } } } /** * Builds `ELFIN` of CLASSE='HOSPITAL_STATE' for `elfinHospitalStateTemplate: ELFIN`, `simulationId: String`, ``, `hospitalState: Hospital` */ def buildHospitalStateElfin(elfinHospitalStateTemplate: ELFIN, simulationId: String, hospitalState: Hospital): Future[ELFIN] = { val elfinHospitalStateWithIdFuture: Future[ELFIN] = ElfinUtil.assignElfinId(elfinHospitalStateTemplate) val elfinHospitalStateWithBedsFuture = elfinHospitalStateWithIdFuture.map { elfinHospitalStateWithId => // Assign ID_G: G20150114160000006 to have ELFIN_SIMULATION_NATURE store in a collection distinct // from end users recorded HOSPITAL_STATE ELFINs. val elfinHospitalStateWithUpdatedID_G = ElfinUtil.replaceElfinID_G(elfinHospitalStateWithId, Constants.ELFIN_HOSPITAL_STATE_SIMULATION_COLLECTION_ID) val elfinHospitalStateWithNewNatureGroupeSource = ElfinUtil.replaceElfinNatureGroupeSource(elfin = elfinHospitalStateWithUpdatedID_G, newNature = Constants.ELFIN_HOSPITAL_STATE_SIMULATION_NATURE, newGroupe = elfinHospitalStateWithUpdatedID_G.GROUPE, newSource = Some(simulationId)) // bedsPosStartIndex starts at 2 as we manually add lMeta with POS=1 at the head of lSeq val bedsHospitalWrapperElfin = HospitalHelper.toElfin(hospital = hospitalState, bedsPosStartIndex = 2) val identifiantHospitalState = IDENTIFIANT(AUT = Some("FluxPatients - Simulator"), NOM = None, ORIGINE = None, OBJECTIF = None, DE = Option(DateUtil.getIsoDateFormatterWithoutTz.format(hospitalState.schedule))) val elfinHospitalStateWithIdentifiant = ElfinUtil.replaceElfinIdentifiant(elfinHospitalStateWithNewNatureGroupeSource, identifiantHospitalState) val lSeq: Seq[ch.bsisa.hyperbird.model.L] = bedsHospitalWrapperElfin.CARACTERISTIQUE.get.FRACTION.get.L // Meta-data identifying hospital val lMetaXmlElem = <L POS="1"> <!-- Code ALIAS (Stable) --> <C POS="1">{ hospitalState.code }</C> <!-- Nom NOM (full name) --> <C POS="2">N/A</C> <!-- Id --> <C POS="3">N/A</C> <!-- ID_G --> <C POS="4">N/A</C> </L> val lMeta = ElfinFormat.lFromXml(lMetaXmlElem) val lSeqWithMeta: Seq[ch.bsisa.hyperbird.model.L] = lMeta +: lSeq // TODO: we need L[0] to match Hospital meta-data unlike TRANSFER //val elfinHospitalStateWithBeds = ElfinUtil.replaceElfinCaracteristiqueFractionL(elfinHospitalStateWithIdentifiant, bedsHospitalWrapperElfin.CARACTERISTIQUE.get.FRACTION.get.L) val elfinHospitalStateWithBeds = ElfinUtil.replaceElfinCaracteristiqueFractionL(elfinHospitalStateWithIdentifiant, lSeqWithMeta) elfinHospitalStateWithBeds } elfinHospitalStateWithBedsFuture } /** * Builds `ELFIN` of CLASSE='TRANSFER' given provided parameters. */ def buildTransferElfin(elfinTransferTemplate: ELFIN, simulationId: String, nature: String, fromHospitalCode: String, toHospitalCode: String, schedule: Date, beds: List[Bed]): Future[ELFIN] = { val elfinTransferWithIdFuture: Future[ELFIN] = ElfinUtil.assignElfinId(elfinTransferTemplate) val elfinTransferWithBedsFuture = elfinTransferWithIdFuture.map { elfinTransferWithId => val elfinTransferWithNewNatureGroupeSource = ElfinUtil.replaceElfinNatureGroupeSource(elfin = elfinTransferWithId, newNature = nature, newGroupe = elfinTransferWithId.GROUPE, newSource = Some(simulationId)) val bedsHospitalWrapper = Hospital(code = fromHospitalCode, schedule = schedule, beds = beds) val bedsHospitalWrapperElfin = HospitalHelper.toElfin(bedsHospitalWrapper) val identifiantTransfer = IDENTIFIANT(AUT = Some("FluxPatients - Simulator"), NOM = None, ORIGINE = Option(fromHospitalCode), OBJECTIF = Option(toHospitalCode), DE = Option(DateUtil.getIsoDateFormatterWithoutTz.format(schedule))) val elfinTransferWithIdentifiant = ElfinUtil.replaceElfinIdentifiant(elfinTransferWithNewNatureGroupeSource, identifiantTransfer) val elfinTransferWithBeds = ElfinUtil.replaceElfinCaracteristiqueFractionL(elfinTransferWithIdentifiant, bedsHospitalWrapperElfin.CARACTERISTIQUE.get.FRACTION.get.L) elfinTransferWithBeds } elfinTransferWithBedsFuture } /** * Create SIMULATION database entry and return a the corresponding ELFIN.Id */ def createSimulationDatabaseEntry(author: Option[String] = None, dateFrom: String, dateTo: String): Future[String] = { val simulationIdFutureFuture = ElfinDAO.getNewFromCatalogue("SIMULATION").flatMap { simulationElfinTemplate => val simulationIdFuture = buildSimulationElfin(simulationElfinTemplate, author, dateFrom, dateTo).map { simulationElfin => ElfinDAO.create(simulationElfin) simulationElfin.Id } simulationIdFuture } simulationIdFutureFuture } /** * Update SIMULATION database entry with HospitalSimulationSummary */ def updateSimulationDatabaseEntry(simulationId: String, hssList: List[HospitalSimulationSummary]): Unit = { logger.info(s">>>> updateSimulationDatabaseEntry(simulationId = ${simulationId})") val simulationElfinFuture = XQueryWSHelper.find(WSQueries.elfinQuery(Constants.ELFIN_SIMULATION_COLLECTION_ID, elfinId = simulationId)) simulationElfinFuture.map { simulationElfin => val currLSeq = simulationElfin.CARACTERISTIQUE.get.FRACTION.get.L val newLXmlElements = for ((hss, i) <- hssList zipWithIndex) yield { val newLXmlElement = <L POS={ (i + currLSeq.size + 1).toString }> <!-- Code ALIAS (Stable) --> <C POS="1">{ hss.hospitalCode }</C> <!-- Total incoming SI --> <C POS="2">{ hss.totalIncomingSiPatient }</C> <!-- Total incoming SC --> <C POS="3">{ hss.totalIncomingScPatient }</C> <!-- Total outgoing SI --> <C POS="4">{ hss.totalOutgoingSiPatient }</C> <!-- Total outgoing SC --> <C POS="5">{ hss.totalOutgoingScPatient }</C> </L> newLXmlElement } val newL = for (newLXmlElement <- newLXmlElements) yield { ElfinFormat.lFromXml(newLXmlElement) } val newLSeq = currLSeq ++ newL val updatedSimulationElfin = ElfinUtil.replaceElfinCaracteristiqueFractionL(elfin = simulationElfin, newLSeq = newLSeq) ElfinDAO.update(updatedSimulationElfin) } } /** * Builds `ELFIN` of CLASSE='SIMULATION' given provided parameters. */ def buildSimulationElfin(elfinSimulationTemplate: ELFIN, author: Option[String] = None, dateFrom: String, dateTo: String): Future[ELFIN] = { val simulationElfinWithIdFuture: Future[ELFIN] = ElfinUtil.assignElfinId(elfinSimulationTemplate) val simulationElfinWithCaracteristiqueFuture: Future[ELFIN] = simulationElfinWithIdFuture.map { simulationElfinWithId => val identifiantSimulation = IDENTIFIANT(AUT = author, DE = Option(DateUtil.getIsoDateFormatterWithoutTz.format(new Date()))) val simulationElfinWithIdentifiant = ElfinUtil.replaceElfinIdentifiant(simulationElfinWithId, identifiantSimulation) val characteristicsXmlElem = <CARACTERISTIQUE> <FRACTION> <!-- Simulation parameters --> <L POS="1"> <!-- Simulation parameter date from --> <C POS="1">{ DateUtil.getIsoDateFormatterWithoutTz.format(DateUtil.hbDateFormat.parse(dateFrom)) }</C> <!-- Simulation parameter date to --> <C POS="2">{ DateUtil.getIsoDateFormatterWithoutTz.format(DateUtil.hbDateFormat.parse(dateTo)) }</C> </L> </FRACTION> </CARACTERISTIQUE> val caracteristique = ElfinFormat.caracteristiqueFromXml(characteristicsXmlElem) val simulationElfinWithCaracteristique = ElfinUtil.replaceElfinCaracteristique(simulationElfinWithIdentifiant, caracteristique) simulationElfinWithCaracteristique } simulationElfinWithCaracteristiqueFuture } /** * HospitalSimulationSummary update helper */ def updateHospitalSimulationSummary(hospitalCode: String, currentHss: Option[HospitalSimulationSummary], bedsWithIncomingPatientTypeSi: List[Bed], bedsWithIncomingPatientTypeSc: List[Bed], bedsWithOutgoingPatientTypeSi: List[Bed], bedsWithOutgoingPatientTypeSc: List[Bed]): HospitalSimulationSummary = { val newHss = currentHss match { case Some(hss) => HospitalSimulationSummary(hospitalCode = hss.hospitalCode, totalIncomingSiPatient = hss.totalIncomingSiPatient + bedsWithIncomingPatientTypeSi.size, totalIncomingScPatient = hss.totalIncomingScPatient + bedsWithIncomingPatientTypeSc.size, totalOutgoingSiPatient = hss.totalOutgoingSiPatient + bedsWithOutgoingPatientTypeSi.size, totalOutgoingScPatient = hss.totalOutgoingScPatient + bedsWithOutgoingPatientTypeSc.size) case None => HospitalSimulationSummary(hospitalCode = hospitalCode, totalIncomingSiPatient = bedsWithIncomingPatientTypeSi.size, totalIncomingScPatient = bedsWithIncomingPatientTypeSc.size, totalOutgoingSiPatient = bedsWithOutgoingPatientTypeSi.size, totalOutgoingScPatient = bedsWithOutgoingPatientTypeSc.size) } newHss } }
bsisa/hb-api
app/ch/bsisa/hyperbird/patman/simulations/model/HospitalHelper.scala
Scala
gpl-2.0
41,640
package org.jetbrains.plugins.scala import com.intellij.openapi.extensions.ExtensionPointName import scala.jdk.CollectionConverters._ /** * Handy base class for declaring extension points. */ abstract class ExtensionPointDeclaration[T](private val name: String) { private val extensionPointName = ExtensionPointName.create[T](name) def implementations: Seq[T] = { extensionPointName.getExtensionList.asScala.toSeq } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/ExtensionPointDeclaration.scala
Scala
apache-2.0
435
//############################################################################ // Serialization //############################################################################ object Serialize { @throws(classOf[java.io.IOException]) def write[A](o: A): Array[Byte] = { val ba = new java.io.ByteArrayOutputStream(512) val out = new java.io.ObjectOutputStream(ba) out.writeObject(o) out.close() ba.toByteArray() } @throws(classOf[java.io.IOException]) @throws(classOf[ClassNotFoundException]) def read[A](buffer: Array[Byte]): A = { val in = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer)) in.readObject().asInstanceOf[A] } def check[A, B](x: A, y: B): Unit = { println("x = " + x) println("y = " + y) println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x)) assert((x equals y) && (y equals x)) println() } } import Serialize.* //############################################################################ // Test classes in package "scala" object Test1_scala { private def arrayToString[A](arr: Array[A]): String = arr.mkString("Array[",",","]") private def arrayEquals[A, B](a1: Array[A], a2: Array[B]): Boolean = (a1.length == a2.length) && (Iterator.range(0, a1.length) forall { i => a1(i) == a2(i) }) object WeekDay extends Enumeration { type WeekDay = Value val Monday, Tuesday, Wednesday, Thusday, Friday, Saturday, Sunday = Value } import WeekDay._, BigDecimal._, RoundingMode.* // in alphabetic order try { // Array val a1 = Array(1, 2, 3) val _a1: Array[Int] = read(write(a1)) println("a1 = " + arrayToString(a1)) println("_a1 = " + arrayToString(_a1)) println("arrayEquals(a1, _a1): " + arrayEquals(a1, _a1)) println() // Either val e1 = Left(1) val _e1: Either[Int, String] = read(write(e1)) println("e1 = " + e1) println("_e1 = " + _e1) println("e1 eq _e1: " + (e1 eq _e1) + ", _e1 eq e1: " + (_e1 eq e1)) println("e1 equals _e1: " + (e1 equals _e1) + ", _e1 equals e1: " + (_e1 equals e1)) println() // Enumeration val x7 = BigDecimal.RoundingMode val y7: RoundingMode.type = read(write(x7)) println("x7 = " + x7) println("y7 = " + y7) println("x7 eq y7: " + (x7 eq y7) + ", y7 eq x7: " + (y7 eq x7)) println("x7 equals y7: " + (x7 equals y7) + ", y7 equals x7: " + (y7 equals x7)) println() val x8 = WeekDay val y8: WeekDay.type = read(write(x8)) println("x8 = " + x8) println("y8 = " + y8) println("x8 eq y8: " + (x8 eq y8) + ", y8 eq x8: " + (y8 eq x8)) println("x8 equals y8: " + (x8 equals y8) + ", y8 equals x8: " + (y8 equals x8)) println() val x9 = UP val y9: RoundingMode = read(write(x9)) println("x9 = " + x9) println("y9 = " + y9) println("x9 eq y9: " + (x9 eq y9) + ", y9 eq x9: " + (y9 eq x9)) println("x9 equals y9: " + (x9 equals y9) + ", y9 equals x9: " + (y9 equals x9)) println() val x10 = Monday val y10: WeekDay = read(write(x10)) println("x10 = " + x10) println("y10 = " + y10) println("x10 eq y10: " + (x10 eq y10) + ", y10 eq x10: " + (y10 eq x10)) println("x10 equals y10: " + (x10 equals y10) + ", y10 equals x10: " + (y10 equals x10)) println() println("x9 eq x10: " + (x9 eq x10) + ", x10 eq x9: " + (x10 eq x9)) println("x9 equals x10: " + (x9 equals x10) + ", x10 equals x9: " + (x10 equals x9)) println("x9 eq y10: " + (x9 eq y10) + ", y10 eq x9: " + (y10 eq x9)) println("x9 equals y10: " + (x9 equals y10) + ", y10 equals x9: " + (y10 equals x9)) println() // Function val f1 = { (x: Int) => 2 * x } val _f1: Function[Int, Int] = read(write(f1)) println("f1 = <na>") println("_f1 = <na>") println("f1(2): " + f1(2) + ", _f1(2): " + _f1(2)) println() // List val xs0 = List(1, 2, 3) val _xs0: List[Int] = read(write(xs0)) println("xs0 = " + xs0) println("_xs0 = " + _xs0) println("xs0 eq _xs0: " + (xs0 eq _xs0) + ", _xs0 eq xs0: " + (_xs0 eq xs0)) println("xs0 equals _xs0: " + (xs0 equals _xs0) + ", _xs0 equals xs0: " + (_xs0 equals xs0)) println() val xs1 = Nil val _xs1: List[Nothing] = read(write(xs1)) println("xs1 = " + xs1) println("_xs1 = " + _xs1) println("xs1 eq _xs1: " + (xs1 eq _xs1) + ", _xs1 eq xs1: " + (_xs1 eq xs1)) println() // Option val o1 = None val _o1: Option[Nothing] = read(write(o1)) println("o1 = " + o1) println("_o1 = " + _o1) println("o1 eq _o1: " + (o1 eq _o1) + ", _o1 eq o1: " + (_o1 eq o1)) println() val o2 = Some(1) val _o2: Option[Int] = read(write(o2)) println("o2 = " + o2) println("_o2 = " + _o2) println("o2 eq _o2: " + (o2 eq _o2) + ", _o2 eq o2: " + (_o2 eq o2)) println("o2 equals _o2: " + (o2 equals _o2) + ", _o2 equals o2: " + (_o2 equals o2)) println() /* // Responder val r1 = Responder.constant("xyz") val _r1: Responder[String] = read(write(r1)) check(r1, _r1) */ // Symbol val s1 = Symbol("hello") val _s1: Symbol = read(write(s1)) println("s1 = " + s1) println("_s1 = " + _s1) println("s1 eq _s1: " + (s1 eq _s1) + ", _s1 eq s1: " + (_s1 eq s1)) println("s1 equals _s1: " + (s1 equals _s1) + ", _s1 equals s1: " + (_s1 equals s1)) println() // Tuple val t1 = ("BannerLimit", 12345) val _t1: (String, Int) = read(write(t1)) println("t1 = " + t1) println("_t1 = " + _t1) println("t1 eq _t1: " + (t1 eq _t1) + ", _t1 eq t1: " + (_t1 eq t1)) println("t1 equals _t1: " + (t1 equals _t1) + ", _t1 equals t1: " + (_t1 equals t1)) println() } catch { case e: Exception => println("Error in Test1_scala: " + e) throw e } } //############################################################################ // Test classes in package "scala.collection.immutable" object Test2_immutable { import scala.collection.immutable.{ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap, SortedSet, LazyList, TreeMap, TreeSet, Vector} // in alphabetic order try { // BitSet val bs1 = BitSet.empty + 1 + 2 val _bs1: BitSet = read(write(bs1)) check(bs1, _bs1) val bs2 = { val bs = new collection.mutable.BitSet() bs += 2; bs += 3 bs.toImmutable } val _bs2: BitSet = read(write(bs2)) check(bs2, _bs2) // HashMap val hm1 = HashMap.empty[Int, String] ++ List(1 -> "A", 2 -> "B", 3 -> "C") val _hm1: HashMap[Int, String] = read(write(hm1)) check(hm1, _hm1) // HashSet val hs1 = HashSet.empty[Int] + 1 + 2 val _hs1: HashSet[Int] = read(write(hs1)) check(hs1, _hs1) // List val xs1 = List(("buffers", 20), ("layers", 2), ("title", 3)) val _xs1: List[(String, Int)] = read(write(xs1)) check(xs1, _xs1) // ListMap val lm1 = new ListMap[String, Int] ++ List("buffers" -> 20, "layers" -> 2, "title" -> 3) val _lm1: ListMap[String, Int] = read(write(lm1)) check(lm1, _lm1) // ListSet val ls1 = new ListSet[Int] + 3 + 5 val _ls1: ListSet[Int] = read(write(ls1)) check(ls1, _ls1) // Queue val q1 = Queue("a", "b", "c") val _q1: Queue[String] = read(write(q1)) check(q1, _q1) // Range val r1 = 0 until 10 val _r1: Range = read(write(r1)) check(r1, _r1) val r2 = Range.Long(0L, 10L, 1) val _r2: r2.type = read(write(r2)) check(r2, _r2) // SortedMap val sm1 = SortedMap.empty[Int, String] ++ List(2 -> "B", 3 -> "C", 1 -> "A") val _sm1: SortedMap[Int, String] = read(write(sm1)) check(sm1, _sm1) // SortedSet val ss1 = SortedSet.empty[Int] + 2 + 3 + 1 val _ss1: SortedSet[Int] = read(write(ss1)) check(ss1, _ss1) // LazyList val st1 = LazyList.range(0, 10) val _st1: LazyList[Int] = read(write(st1)) check(st1, _st1) // TreeMap val tm1 = new TreeMap[Int, String] + (42 -> "FortyTwo") val _tm1: TreeMap[Int, String] = read(write(tm1)) check(tm1, _tm1) // TreeSet val ts1 = new TreeSet[Int]() + 2 + 0 val _ts1: TreeSet[Int] = read(write(ts1)) check(ts1, _ts1) // Vector val v1 = Vector(Symbol("a"), Symbol("b"), Symbol("c")) val _v1: Vector[Symbol] = read(write(v1)) check(v1, _v1) } catch { case e: Exception => println("Error in Test2_immutable: " + e) throw e } } //############################################################################ // Test classes in package "scala.collection.mutable" object Test3_mutable { import scala.reflect.ClassTag import scala.collection.mutable.{ ArrayBuffer, ArrayBuilder, BitSet, HashMap, HashSet, LinkedHashMap, LinkedHashSet, ListBuffer, Queue, Stack, StringBuilder, ArraySeq, TreeSet} import scala.collection.concurrent.TrieMap // in alphabetic order try { // ArrayBuffer val ab1 = new ArrayBuffer[String] ab1 ++= List("one", "two") val _ab1: ArrayBuffer[String] = read(write(ab1)) check(ab1, _ab1) // ArrayBuilder val abu1 = ArrayBuilder.make[Long] val _abu1: ArrayBuilder[ClassTag[Long]] = read(write(abu1)) check(abu1, _abu1) val abu2 = ArrayBuilder.make[Float] val _abu2: ArrayBuilder[ClassTag[Float]] = read(write(abu2)) check(abu2, _abu2) // BitSet val bs1 = new BitSet() bs1 += 0 bs1 += 8 bs1 += 9 val _bs1: BitSet = read(write(bs1)) check(bs1, _bs1) // HashMap val hm1 = new HashMap[String, Int] hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator val _hm1: HashMap[String, Int] = read(write(hm1)) check(hm1, _hm1) // HashSet val hs1 = new HashSet[String] hs1 ++= List("layers", "buffers", "title").iterator val _hs1: HashSet[String] = read(write(hs1)) check(hs1, _hs1) // LinkedHashMap { val lhm1 = new LinkedHashMap[String, Int] val list = List(("Linked", 1), ("Hash", 2), ("Map", 3)) lhm1 ++= list.iterator val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1)) check(lhm1, _lhm1) check(lhm1.toSeq, _lhm1.toSeq) // check elements order check(lhm1.toSeq, list) // check elements order } // LinkedHashSet { val lhs1 = new LinkedHashSet[String] val list = List("layers", "buffers", "title") lhs1 ++= list.iterator val _lhs1: LinkedHashSet[String] = read(write(lhs1)) check(lhs1, _lhs1) check(lhs1.toSeq, _lhs1.toSeq) // check elements order check(lhs1.toSeq, list) // check elements order } // ListBuffer val lb1 = new ListBuffer[String] lb1 ++= List("white", "black") val _lb1: ListBuffer[String] = read(write(lb1)) check(lb1, _lb1) // Queue val q1 = new Queue[Int] q1 ++= List(20, 2, 3).iterator val _q1: Queue[Int] = read(write(q1)) check(q1, _q1) // Stack val s1 = new Stack[Int] s1 pushAll q1 val _s1: Stack[Int] = read(write(s1)) check(s1, _s1) // StringBuilder val sb1 = new StringBuilder sb1 append "abc" val _sb1: StringBuilder = read(write(sb1)) check(sb1, _sb1) // ArraySeq val wa1 = ArraySeq.make(Array(1, 2, 3)) val _wa1: ArraySeq[Int] = read(write(wa1)) check(wa1, _wa1) // TreeSet val ts1 = TreeSet[Int]() ++= Array(1, 2, 3) val _ts1: TreeSet[Int] = read(write(ts1)) check(ts1, _ts1) // concurrent.TrieMap val ct1 = TrieMap[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three") val _ct1: TrieMap[Int, String] = read(write(ct1)) check(ct1, _ct1) } catch { case e: Exception => println("Error in Test3_mutable: " + e) throw e } } //############################################################################ // Test user-defined classes WITHOUT nesting class Person(_name: String) extends Serializable { private var name = _name override def toString() = name override def equals(that: Any): Boolean = that.isInstanceOf[Person] && (name == that.asInstanceOf[Person].name) } class Employee(_name: String) extends Serializable { private var name = _name override def toString() = name } object bob extends Employee("Bob") object Test5 { val x1 = new Person("Tim") val x2 = bob try { val y1: Person = read(write(x1)) val y2: Employee = read(write(x2)) check(x1, y1) check(x2, y2) } catch { case e: Exception => println("Error in Test5: " + e) } } //############################################################################ // Test user-defined classes WITH nesting object Test6 { object bill extends Employee("Bill") { val x = paul } object paul extends Person("Paul") { val x = 4 // bill; => StackOverflowException !!! } val x1 = new Person("John") val x2 = bill val x3 = paul try { val y1: Person = read(write(x1)) val y2: Employee = read(write(x2)) val y3: Person = read(write(x3)) check(x1, y1) check(x2, y2) check(x3, y3) } catch { case e: Exception => println("Error in Test6: " + e) } } //############################################################################ // Nested objects cannot get readresolve automatically because after deserialization // they would be null (they are treated as lazy vals) class Outer extends Serializable { object Inner extends Serializable } object Test7 { val x = new Outer x.Inner // initialize val y:Outer = read(write(x)) if (y.Inner == null) println("Inner object is null") } // Verify that transient lazy vals don't get serialized class WithTransient extends Serializable { @transient lazy val a1 = 1 @transient private lazy val a2 = 2 @transient object B extends Serializable @transient private object C extends Serializable def test = { println(a1) println(a2) if (B == null || C == null) println("Transient nested object failed to serialize properly") } } object Test8 { val x = new WithTransient x.test try { val y:WithTransient = read(write(x)) y.test } catch { case e: Exception => println("Error in Test8: " + e) } } //############################################################################ // Test code object Test { def main(args: Array[String]): Unit = { Test1_scala Test2_immutable Test3_mutable Test5 Test6 Test7 Test8 } }
lampepfl/dotty
tests/run/serialization-new.scala
Scala
apache-2.0
14,415
/* ************************************************************************************* * Copyright 2011 Normation SAS ************************************************************************************* * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU Affero GPL v3, the copyright holders add the following * Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3 * licence, when you create a Related Module, this Related Module is * not considered as a part of the work and may be distributed under the * license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>. * ************************************************************************************* */ package com.normation.rudder.services.policies import scala.Option.option2Iterable import org.joda.time.DateTime import com.normation.cfclerk.domain.Technique import com.normation.cfclerk.domain.TrackerVariable import com.normation.cfclerk.domain.Variable import com.normation.inventory.domain.NodeId import com.normation.rudder.domain.Constants import com.normation.rudder.domain.nodes.NodeInfo import com.normation.rudder.domain.parameters.GlobalParameter import com.normation.rudder.domain.parameters.ParameterName import com.normation.rudder.domain.policies._ import com.normation.rudder.domain.reports.RuleExpectedReports import com.normation.rudder.repository._ import com.normation.rudder.services.eventlog.HistorizationService import com.normation.rudder.services.nodes.NodeInfoService import com.normation.rudder.services.policies.nodeconfig.NodeConfiguration import com.normation.rudder.services.policies.nodeconfig.NodeConfigurationCache import com.normation.rudder.services.policies.nodeconfig.NodeConfigurationService import com.normation.rudder.services.policies.nodeconfig.ParameterForConfiguration import com.normation.rudder.services.reports.ReportingService import com.normation.utils.Control._ import com.normation.utils.HashcodeCaching import net.liftweb.common._ import com.normation.rudder.domain.parameters.GlobalParameter import scala.collection.immutable.TreeMap import com.normation.inventory.services.core.ReadOnlyFullInventoryRepository import com.normation.inventory.domain.NodeInventory import com.normation.inventory.domain.AcceptedInventory import com.normation.inventory.domain.NodeInventory import com.normation.rudder.domain.parameters.GlobalParameter import com.normation.rudder.services.policies.nodeconfig.NodeConfiguration import com.normation.rudder.domain.reports.NodeAndConfigId import com.normation.inventory.domain.NodeId import com.normation.rudder.domain.reports.NodeConfigId import com.normation.rudder.reports.ComplianceMode import com.normation.rudder.reports.ComplianceModeService import com.normation.rudder.reports.AgentRunIntervalService import com.normation.rudder.reports.AgentRunInterval /** * The main service which deploy modified rules and * their dependencies. */ trait DeploymentService extends Loggable { /** * All mighy method that take all modified rules, find their * dependencies, proccess ${vars}, build the list of node to update, * update nodes. * * Return the list of node IDs actually updated. * */ def deploy() : Box[Set[NodeId]] = { logger.info("Start policy generation, checking updated rules") val initialTime = System.currentTimeMillis val rootNodeId = Constants.ROOT_POLICY_SERVER_ID val result = for { //fetch all - yep, memory is cheap... (TODO: size of that for 1000 nodes, 100 rules, 100 directives, 100 groups ?) allRules <- findDependantRules() ?~! "Could not find dependant rules" allNodeInfos <- getAllNodeInfos ?~! "Could not get Node Infos" allInventories <- getAllInventories ?~! "Could not get Node inventories" directiveLib <- getDirectiveLibrary() ?~! "Could not get the directive library" groupLib <- getGroupLibrary() ?~! "Could not get the group library" allParameters <- getAllGlobalParameters ?~! "Could not get global parameters" timeFetchAll = (System.currentTimeMillis - initialTime) _ = logger.debug(s"All relevant information fetched in ${timeFetchAll}ms, start names historization.") historizeTime = System.currentTimeMillis historize <- historizeData(allRules, directiveLib, groupLib, allNodeInfos) timeHistorize = (System.currentTimeMillis - historizeTime) _ = logger.debug(s"Historization of names done in ${timeHistorize}ms, start to build rule values.") ruleValTime = System.currentTimeMillis ruleVals <- buildRuleVals(allRules, directiveLib, groupLib, allNodeInfos) ?~! "Cannot build Rule vals" timeRuleVal = (System.currentTimeMillis - ruleValTime) _ = logger.debug(s"RuleVals built in ${timeRuleVal}ms, start to expand their values.") globalSystemVarTime = System.currentTimeMillis globalSystemVariables <- buildGlobalSystemVariables() ?~! "Cannot build global system configuration" timeGlobalSystemVar = (System.currentTimeMillis - globalSystemVarTime) _ = logger.debug(s"Global system variables built in ${timeGlobalSystemVar}ms, start to build new node configurations.") buildConfigTime = System.currentTimeMillis globalRunInterval <- getGlobalAgentRun globalComplianceMode <- getGlobalComplianceMode config <- buildNodeConfigurations( ruleVals , allNodeInfos , allInventories , groupLib , allParameters , globalSystemVariables , globalRunInterval , globalComplianceMode ) ?~! "Cannot build target configuration node" timeBuildConfig = (System.currentTimeMillis - buildConfigTime) _ = logger.debug(s"Node's target configuration built in ${timeBuildConfig}, start to update rule values.") sanitizeTime = System.currentTimeMillis _ <- forgetOtherNodeConfigurationState(config.map(_.nodeInfo.id).toSet) ?~! "Cannot clean the configuration cache" sanitizedNodeConfig <- sanitize(config) ?~! "Cannot set target configuration node" timeSanitize = (System.currentTimeMillis - sanitizeTime) _ = logger.debug(s"RuleVals updated in ${timeSanitize} millisec, start to detect changes in node configuration.") beginTime = System.currentTimeMillis //that's the first time we actually write something in repos: new serial for updated rules nodeConfigCache <- getNodeConfigurationCache() ?~! "Cannot get the Configuration Cache" (updatedCrs, deletedCrs) <- detectUpdatesAndIncrementRuleSerial(sanitizedNodeConfig.values.toSeq, nodeConfigCache, directiveLib, allRules.map(x => (x.id, x)).toMap)?~! "Cannot detect the updates in the NodeConfiguration" serialedNodes = updateSerialNumber(sanitizedNodeConfig, updatedCrs.toMap) // Update the serial of ruleVals when there were modifications on Rules values // replace variables with what is really applied timeIncrementRuleSerial = (System.currentTimeMillis - beginTime) _ = logger.debug(s"Checked node configuration updates leading to rules serial number updates and serial number updated in ${timeIncrementRuleSerial}ms") writeTime = System.currentTimeMillis nodeConfigVersions = calculateNodeConfigVersions(serialedNodes.values.toSeq) //second time we write something in repos: updated node configuration writtenNodeConfigs <- writeNodeConfigurations(rootNodeId, serialedNodes, nodeConfigVersions, nodeConfigCache) ?~! "Cannot write configuration node" timeWriteNodeConfig = (System.currentTimeMillis - writeTime) _ = logger.debug(s"Node configuration written in ${timeWriteNodeConfig}ms, start to update expected reports.") reportTime = System.currentTimeMillis // need to update this part as well updatedNodeConfig = writtenNodeConfigs.map( _.nodeInfo.id ) expectedReports <- setExpectedReports(ruleVals, sanitizedNodeConfig.values.toSeq, nodeConfigVersions, updatedCrs.toMap, deletedCrs, updatedNodeConfig, new DateTime()) ?~! "Cannot build expected reports" timeSetExpectedReport = (System.currentTimeMillis - reportTime) _ = logger.debug(s"Reports updated in ${timeSetExpectedReport}ms") } yield { logger.debug("Timing summary:") logger.debug("Fetch all information : %10s ms".format(timeFetchAll)) logger.debug("Historize names : %10s ms".format(timeHistorize)) logger.debug("Build current rule values : %10s ms".format(timeRuleVal)) logger.debug("Build target configuration: %10s ms".format(timeBuildConfig)) logger.debug("Update rule vals : %10s ms".format(timeSanitize)) logger.debug("Increment rule serials : %10s ms".format(timeIncrementRuleSerial)) logger.debug("Write node configurations : %10s ms".format(timeWriteNodeConfig)) logger.debug("Save expected reports : %10s ms".format(timeSetExpectedReport)) writtenNodeConfigs.map( _.nodeInfo.id ) } logger.debug("Policy generation completed in %d millisec".format((System.currentTimeMillis - initialTime))) result } /** * Snapshot all information needed: * - node infos * - rules * - directives library * - groups library */ def getAllNodeInfos(): Box[Map[NodeId, NodeInfo]] def getDirectiveLibrary(): Box[FullActiveTechniqueCategory] def getGroupLibrary(): Box[FullNodeGroupCategory] def getAllGlobalParameters: Box[Seq[GlobalParameter]] def getAllInventories(): Box[Map[NodeId, NodeInventory]] def getGlobalComplianceMode(): Box[ComplianceMode] def getGlobalAgentRun : Box[AgentRunInterval] /** * Find all modified rules. * For them, find all directives with variables * referencing these rules. * Add them to the set of rules to return, and * recurse. * Stop when convergence is reached * * No modification on back-end are performed * (perhaps safe setting the "isModified" value to "true" for * all dependent CR). * */ def findDependantRules() : Box[Seq[Rule]] /** * Build the list of "CFclerkRuleVal" from a list of * rules. * These objects are a cache of all rules */ def buildRuleVals(rules: Seq[Rule], directiveLib: FullActiveTechniqueCategory, groupLib: FullNodeGroupCategory, allNodeInfos: Map[NodeId, NodeInfo]) : Box[Seq[RuleVal]] /** * Compute all the global system variable */ def buildGlobalSystemVariables() : Box[Map[String, Variable]] /** * From a list of ruleVal, find the list of all impacted nodes * with the actual Cf3PolicyDraftBean they will have. * Replace all ${node.varName} vars. */ def buildNodeConfigurations( ruleVals : Seq[RuleVal] , allNodeInfos : Map[NodeId, NodeInfo] , allInventories : Map[NodeId, NodeInventory] , groupLib : FullNodeGroupCategory , parameters : Seq[GlobalParameter] , globalSystemVariable : Map[String, Variable] , globalAgentRun : AgentRunInterval , globalComplianceMode : ComplianceMode ) : Box[(Seq[NodeConfiguration])] /** * Check the consistency of each NodeConfiguration. */ def sanitize(configurations:Seq[NodeConfiguration]) : Box[Map[NodeId, NodeConfiguration]] /** * Forget all other node configuration state. * If passed with an empty set, actually forget all node configuration. */ def forgetOtherNodeConfigurationState(keep: Set[NodeId]) : Box[Set[NodeId]] /** * Get the actual cached values for NodeConfiguration */ def getNodeConfigurationCache(): Box[Map[NodeId, NodeConfigurationCache]] /** * Detect changes in the NodeConfiguration, to trigger an increment in the related CR * The CR are updated in the LDAP * Must have all the NodeConfiguration in nodes * Returns two seq : the updated rule, and the deleted rule */ def detectUpdatesAndIncrementRuleSerial(nodes : Seq[NodeConfiguration], cache: Map[NodeId, NodeConfigurationCache], directiveLib: FullActiveTechniqueCategory, rules: Map[RuleId, Rule]) : Box[(Map[RuleId,Int], Seq[RuleId])] /** * Set all the serial number when needed (a change in CR) * Must have all the NodeConfiguration in nodes */ def updateSerialNumber(nodes : Map[NodeId, NodeConfiguration], rules : Map[RuleId, Int]) : Map[NodeId, NodeConfiguration] /** * Actually write the new configuration for the list of given node. * If the node target configuration is the same as the actual, nothing is done. * Else, promises are generated; * Return the list of configuration successfully written. */ def writeNodeConfigurations(rootNodeId: NodeId, allNodeConfig: Map[NodeId, NodeConfiguration], versions: Map[NodeId, NodeConfigId], cache: Map[NodeId, NodeConfigurationCache]) : Box[Set[NodeConfiguration]] /** * Set the exepcted reports for the rule * Caution : we can't handle deletion with this * @param ruleVal * @return */ def setExpectedReports( ruleVal : Seq[RuleVal] , nodeConfigs : Seq[NodeConfiguration] , versions : Map[NodeId, NodeConfigId] , updateCrs : Map[RuleId, Int] , deletedCrs : Seq[RuleId] , updatedNodeConfig: Set[NodeId] , generationTime : DateTime ) : Box[Seq[RuleExpectedReports]] /** * Store groups and directive in the database */ def historizeData(rules:Seq[Rule], directiveLib: FullActiveTechniqueCategory, groupLib: FullNodeGroupCategory, allNodeInfos: Map[NodeId, NodeInfo]) : Box[Unit] def calculateNodeConfigVersions(configs: Seq[NodeConfiguration]): Map[NodeId, NodeConfigId] = { configs.map(x => (x.nodeInfo.id, NodeConfigId(NodeConfigurationCache(x).hashCode.toString))).toMap } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Implémentation /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// class DeploymentServiceImpl ( override val roRuleRepo: RoRuleRepository , override val woRuleRepo: WoRuleRepository , override val ruleValService : RuleValService , override val systemVarService: SystemVariableService , override val nodeConfigurationService : NodeConfigurationService , override val nodeInfoService : NodeInfoService , override val reportingService : ExpectedReportsUpdate , override val historizationService : HistorizationService , override val roNodeGroupRepository: RoNodeGroupRepository , override val roDirectiveRepository: RoDirectiveRepository , override val ruleApplicationStatusService: RuleApplicationStatusService , override val parameterService : RoParameterService , override val interpolatedValueCompiler:InterpolatedValueCompiler , override val roInventoryRepository: ReadOnlyFullInventoryRepository , override val complianceModeService : ComplianceModeService , override val agentRunService : AgentRunIntervalService ) extends DeploymentService with DeploymentService_findDependantRules_bruteForce with DeploymentService_buildRuleVals with DeploymentService_buildNodeConfigurations with DeploymentService_updateAndWriteRule with DeploymentService_setExpectedReports with DeploymentService_historization /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Follows: traits implementing each part of the deployment service /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /** * So. There is a lot of "hidden" dependencies, * so for now, we just return *ALL* rule. * * It might not scale very well. * * Latter (3 years): in fact, perhaps most of the * time, being too smart is much more slow. * */ trait DeploymentService_findDependantRules_bruteForce extends DeploymentService { def roRuleRepo : RoRuleRepository def nodeInfoService: NodeInfoService def roNodeGroupRepository: RoNodeGroupRepository def roDirectiveRepository: RoDirectiveRepository def parameterService : RoParameterService def roInventoryRepository: ReadOnlyFullInventoryRepository def complianceModeService : ComplianceModeService def agentRunService : AgentRunIntervalService override def findDependantRules() : Box[Seq[Rule]] = roRuleRepo.getAll(true) override def getAllNodeInfos(): Box[Map[NodeId, NodeInfo]] = nodeInfoService.getAll override def getDirectiveLibrary(): Box[FullActiveTechniqueCategory] = roDirectiveRepository.getFullDirectiveLibrary() override def getGroupLibrary(): Box[FullNodeGroupCategory] = roNodeGroupRepository.getFullGroupLibrary() override def getAllGlobalParameters: Box[Seq[GlobalParameter]] = parameterService.getAllGlobalParameters() override def getAllInventories(): Box[Map[NodeId, NodeInventory]] = roInventoryRepository.getAllNodeInventories(AcceptedInventory) override def getGlobalComplianceMode(): Box[ComplianceMode] = complianceModeService.getComplianceMode override def getGlobalAgentRun(): Box[AgentRunInterval] = agentRunService.getAgentRun } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait DeploymentService_buildRuleVals extends DeploymentService { def ruleApplicationStatusService: RuleApplicationStatusService def ruleValService : RuleValService /** * Build the list of "CFclerkRuleVal" from a list of * rules. * These objects are a cache of all rules */ override def buildRuleVals(rules:Seq[Rule], directiveLib: FullActiveTechniqueCategory, groupLib: FullNodeGroupCategory, allNodeInfos: Map[NodeId, NodeInfo]) : Box[Seq[RuleVal]] = { val appliedRules = rules.filter(r => ruleApplicationStatusService.isApplied(r, groupLib, directiveLib, allNodeInfos) match { case _:AppliedStatus => true case _ => false }) for { rawRuleVals <- sequence(appliedRules) { rule => ruleValService.buildRuleVal(rule, directiveLib) } ?~! "Could not find configuration vals" } yield rawRuleVals } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait DeploymentService_buildNodeConfigurations extends DeploymentService with Loggable { def systemVarService: SystemVariableService def roNodeGroupRepository: RoNodeGroupRepository def interpolatedValueCompiler:InterpolatedValueCompiler /** * This is the draft of the policy, not yet a cfengine policy, but a level of abstraction between both */ private[this] case class PolicyDraft( ruleId : RuleId , directiveId : DirectiveId , technique : Technique , variableMap : InterpolationContext => Box[Map[String, Variable]] , trackerVariable: TrackerVariable , priority : Int , serial : Int ) extends HashcodeCaching /* * parameters have to be taken appart: * * - they can be overriden by node - not handled here, it will be in the resolution of node * when implemented. Most likelly, we will have the information in the node info. And * in that case, we could just use an interpolation variable * * - they can be plain string => nothing to do * - they can contains interpolated strings: * - to node info parameters: ok * - to parameters : hello loops! */ private[this] def buildParams(parameters: Seq[GlobalParameter]): Box[Map[ParameterName, InterpolationContext => Box[String]]] = { sequence(parameters) { param => for { p <- interpolatedValueCompiler.compile(param.value) ?~! s"Error when looking for interpolation variable in global parameter '${param.name}'" } yield { (param.name, p) } }.map( _.toMap) } /** * Build interpolation contexts. * * An interpolation context is a node-dependant * context for resolving ("expdanding", "binding") * interpolation variable in directive values. * * It's also the place where parameters are looked for * local overrides. */ private[this] def buildInterpolationContext( nodeIds : Set[NodeId] , allNodeInfos : Map[NodeId, NodeInfo] , allInventories : Map[NodeId, NodeInventory] , parameters : Map[ParameterName, InterpolationContext => Box[String]] , globalSystemVariables : Map[String, Variable] , globalAgentRun : AgentRunInterval , globalComplianceMode : ComplianceMode ): Map[NodeId, InterpolationContext] = { (nodeIds.flatMap { nodeId:NodeId => (for { nodeInfo <- Box(allNodeInfos.get(nodeId)) ?~! s"Node with ID ${nodeId.value} was not found" inventory <- Box(allInventories.get(nodeId)) ?~! s"Inventory for node with ID ${nodeId.value} was not found" policyServer <- Box(allNodeInfos.get(nodeInfo.policyServerId)) ?~! s"Node with ID ${nodeId.value} was not found" nodeContext <- systemVarService.getSystemVariables(nodeInfo, allNodeInfos, globalSystemVariables, globalAgentRun, globalComplianceMode : ComplianceMode) } yield { (nodeId, InterpolationContext( nodeInfo , policyServer , inventory , nodeContext , parameters ) ) }) match { case eb:EmptyBox => val e = eb ?~! s"Error while building target configuration node for node ${nodeId.value} which is one of the target of rules. Ignoring it for the rest of the process" logger.error(e.messageChain) None case x => x } }).toMap } /** * really, simply fetch all the global system variables */ override def buildGlobalSystemVariables() : Box[Map[String, Variable]] = { systemVarService.getGlobalSystemVariables() } /** * From a list of ruleVal, find the list of all impacted nodes * with the actual Cf3PolicyDraftBean they will have. * Replace all ${rudder.node.varName} vars, returns the nodes ready to be configured, and expanded RuleVal * allNodeInfos *must* contains the nodes info of every nodes */ override def buildNodeConfigurations( ruleVals : Seq[RuleVal] , allNodeInfos : Map[NodeId, NodeInfo] , allInventories : Map[NodeId, NodeInventory] , groupLib : FullNodeGroupCategory , parameters : Seq[GlobalParameter] , globalSystemVariables : Map[String, Variable] , globalAgentRun : AgentRunInterval , globalComplianceMode : ComplianceMode ) : Box[Seq[NodeConfiguration]] = { val interpolatedParameters = buildParams(parameters) match { case Full(x) => x case eb: EmptyBox => return eb ?~! "Can not parsed global parameter (looking for interpolated variables)" } //step 1: from RuleVals to expanded rules vals //1.1: group by nodes (because parameter expansion is node sensitive //1.2: for each node, build the node context //1.3: build node config, binding ${rudder.parameters} parameters //1.1: group by nodes val seqOfMapOfPolicyDraftByNodeId = ruleVals.map { ruleVal => val wantedNodeIds = groupLib.getNodeIds(ruleVal.targets, allNodeInfos) val nodeIds = wantedNodeIds.intersect(allNodeInfos.keySet) if(nodeIds.size != wantedNodeIds.size) { logger.error(s"Some nodes are in the target of rule ${ruleVal.ruleId.value} but are not present " + s"in the system. It looks like an inconsistency error. Ignored nodes: ${(wantedNodeIds -- nodeIds).map( _.value).mkString(", ")}") } val drafts: Seq[PolicyDraft] = ruleVal.directiveVals.map { directive => PolicyDraft( ruleId = ruleVal.ruleId , directiveId = directive.directiveId , technique = directive.technique , variableMap = directive.variables , trackerVariable= directive.trackerVariable , priority = directive.priority , serial = ruleVal.serial ) } nodeIds.map(id => (id, drafts)).toMap } //now, actually group by node, and also check //consistency: a node can't have two directives based on //different version of the same technique val policyDraftByNode: Map[NodeId, Seq[PolicyDraft]] = { val map = (Map.empty[NodeId, Seq[PolicyDraft]]/:seqOfMapOfPolicyDraftByNodeId){ case (global, newMap) => val g = global.map{ case (nodeId, seq) => (nodeId, seq ++ newMap.getOrElse(nodeId, Seq()))} //add node not yet in global val keys = newMap.keySet -- global.keySet val missing = newMap.filterKeys(k => keys.contains(k)) g ++ missing } //now, for each node, check for technique version consistency val notConsistent = map.values.flatMap { seq => // Group policydraft of a node by technique name val group = seq.groupBy(x => x.technique.id.name) // Filter this grouping by technique having two different version group.filter(x => x._2.groupBy(x => x.technique.id.version).size > 1).map(x => x._1) }.toSet if(notConsistent.nonEmpty) { return Failure(s"There are directives based on techniques with different versions applied to the same node, please correct the version for the following directive(s): ${notConsistent.mkString(", ")}") } else { map } } //1.2: for each node, build the interpolation context //this also give us the list of actual node to consider val interpolationContexts = { buildInterpolationContext( policyDraftByNode.keySet , allNodeInfos , allInventories , interpolatedParameters , globalSystemVariables , globalAgentRun , globalComplianceMode ) } //1.3: build node config, binding ${rudder.parameters} parameters val nodeConfigs = sequence(interpolationContexts.toSeq) { case (nodeId, context) => for { drafts <- Box(policyDraftByNode.get(nodeId)) ?~! "Promise generation algorithme error: cannot find back the configuration information for a node" /* * Clearly, here, we are evaluating parameters, and we are not using that just after in the * variable expansion, which mean that we are doing the same work again and again and again. * Moreover, we also are evaluating again and again parameters whose context ONLY depends * on other parameter, and not node config at all. Bad bad bad bad. * TODO: two stages parameter evaluation * - global * - by node * + use them in variable expansion (the variable expansion should have a fully evaluated InterpolationContext) */ parameters <- sequence(context.parameters.toSeq) { case (name, param) => for { p <- param(context) } yield { (name, p) } } cf3PolicyDrafts <- sequence(drafts) { draft => //bind variables draft.variableMap(context).map{ expandedVariables => RuleWithCf3PolicyDraft( ruleId = draft.ruleId , directiveId = draft.directiveId , technique = draft.technique , variableMap = expandedVariables , trackerVariable = draft.trackerVariable , priority = draft.priority , serial = draft.serial ) } } } yield { NodeConfiguration( nodeInfo = context.nodeInfo , policyDrafts = cf3PolicyDrafts.toSet , nodeContext = context.nodeContext , parameters = parameters.map { case (k,v) => ParameterForConfiguration(k, v) }.toSet , isRootServer = context.nodeInfo.id == context.policyServerInfo.id ) } } nodeConfigs } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait DeploymentService_updateAndWriteRule extends DeploymentService { def nodeConfigurationService : NodeConfigurationService // def roRuleRepo: RoRuleRepository def woRuleRepo: WoRuleRepository /** * That methode remove node configurations for nodes not in allNodes. * Corresponding nodes are deleted from the repository of node configurations. * Return the updated map of all node configurations (really present). */ def purgeDeletedNodes(allNodes: Set[NodeId], allNodeConfigs: Map[NodeId, NodeConfiguration]) : Box[Map[NodeId, NodeConfiguration]] = { val nodesToDelete = allNodeConfigs.keySet -- allNodes for { deleted <- nodeConfigurationService.deleteNodeConfigurations(nodesToDelete) } yield { allNodeConfigs -- nodesToDelete } } /** * Check the consistency of each NodeConfiguration. */ def sanitize(targetConfigurations:Seq[NodeConfiguration]) : Box[Map[NodeId, NodeConfiguration]] = { nodeConfigurationService.sanitize(targetConfigurations) } def forgetOtherNodeConfigurationState(keep: Set[NodeId]) : Box[Set[NodeId]] = { nodeConfigurationService.onlyKeepNodeConfiguration(keep) } def getNodeConfigurationCache(): Box[Map[NodeId, NodeConfigurationCache]] = nodeConfigurationService.getNodeConfigurationCache() /** * Detect changes in rules and update their serial * Returns two seq : the updated rules, and the deleted rules */ def detectUpdatesAndIncrementRuleSerial( nodes : Seq[NodeConfiguration] , cache : Map[NodeId, NodeConfigurationCache] , directiveLib: FullActiveTechniqueCategory , allRules : Map[RuleId, Rule] ) : Box[(Map[RuleId,Int], Seq[RuleId])] = { val firstElt = (Map[RuleId,Int](), Seq[RuleId]()) // First, fetch the updated CRs (which are either updated or deleted) (( Full(firstElt) )/:(nodeConfigurationService.detectChangeInNodes(nodes, cache, directiveLib)) ) { case (Full((updated, deleted)), ruleId) => { allRules.get(ruleId) match { case Some(rule) => woRuleRepo.incrementSerial(rule.id) match { case Full(newSerial) => logger.trace("Updating rule %s to serial %d".format(rule.id.value, newSerial)) Full( (updated + (rule.id -> newSerial), deleted) ) case f : EmptyBox => //early stop return f } case None => Full((updated.toMap, (deleted :+ ruleId))) } } } } /** * Increment the serial number of the CR. Must have ALL NODES as inputs */ def updateSerialNumber(allConfigs : Map[NodeId, NodeConfiguration], rules: Map[RuleId, Int]) : Map[NodeId, NodeConfiguration] = { allConfigs.map { case (id, config) => (id, config.setSerial(rules)) }.toMap } /** * Actually write the new configuration for the list of given node. * If the node target configuration is the same as the actual, nothing is done. * Else, promises are generated; * Return the list of configuration successfully written. */ def writeNodeConfigurations(rootNodeId: NodeId, allNodeConfigs: Map[NodeId, NodeConfiguration], versions: Map[NodeId, NodeConfigId], cache: Map[NodeId, NodeConfigurationCache]) : Box[Set[NodeConfiguration]] = { /* * Several steps heres: * - look what node configuration are updated (based on their cache ?) * - write these node configuration * - update caches */ val updated = nodeConfigurationService.selectUpdatedNodeConfiguration(allNodeConfigs, cache) val writtingTime = Some(DateTime.now) val fsWrite0 = writtingTime.get.getMillis for { written <- nodeConfigurationService.writeTemplate(rootNodeId, updated, allNodeConfigs, versions) ldapWrite0 = DateTime.now.getMillis fsWrite1 = (ldapWrite0 - fsWrite0) _ = logger.debug(s"Node configuration written on filesystem in ${fsWrite1} millisec.") //before caching, update the timestamp for last written time toCache = allNodeConfigs.filterKeys(updated.contains(_)).values.toSet.map( (x:NodeConfiguration) => x.copy(writtenDate = writtingTime)) cached <- nodeConfigurationService.cacheNodeConfiguration(toCache) ldapWrite1 = (DateTime.now.getMillis - ldapWrite0) _ = logger.debug(s"Node configuration cached in LDAP in ${ldapWrite1} millisec.") } yield { written.toSet } } } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait DeploymentService_setExpectedReports extends DeploymentService { def reportingService : ExpectedReportsUpdate /** * Update the serials in the rule vals based on the updated rule (which may be empty if nothing is updated) * Goal : actually set the right serial in them, as well as the correct variable * So we can have several rule with different subset of values */ private[this] def updateRuleVal( rulesVal: Seq[ExpandedRuleVal] , rules : Map[RuleId,Int] ) : Seq[ExpandedRuleVal] = { rulesVal.map(ruleVal => { rules.find { case(id,serial) => id == ruleVal.ruleId } match { case Some((id,serial)) => ruleVal.copy(serial = serial) case _ => ruleVal } }) } private[this] def getExpandedRuleVal( ruleVals :Seq[RuleVal] , nodeConfigs: Seq[NodeConfiguration] , versions : Map[NodeId, NodeConfigId] ) : Seq[ExpandedRuleVal]= { ruleVals map { rule => val directives = (nodeConfigs.flatMap { nodeConfig => val expectedDirectiveValsForNode = nodeConfig.policyDrafts.filter( x => x.ruleId == rule.ruleId) match { case drafts if drafts.size > 0 => val directives = drafts.map { draft => rule.directiveVals.find( _.directiveId == draft.directiveId) match { case None => logger.error("Inconsistency in promise generation algorithme: missing original directive for a node configuration,"+ s"please report this message. Directive with id '${draft.directiveId.value}' in rule '${rule.ruleId.value}' will be ignored") None case Some(directiveVal) => Some(draft.toDirectiveVal(directiveVal.originalVariables)) } }.flatten if(directives.size > 0) Some(directives) else None case _ => None } expectedDirectiveValsForNode.map(d => NodeAndConfigId(nodeConfig.nodeInfo.id, versions(nodeConfig.nodeInfo.id)) -> d.toSeq) }) ExpandedRuleVal( rule.ruleId , rule.serial , directives.toMap ) } } def setExpectedReports( ruleVal : Seq[RuleVal] , configs : Seq[NodeConfiguration] , versions : Map[NodeId, NodeConfigId] , updatedCrs : Map[RuleId, Int] , deletedCrs : Seq[RuleId] , updatedNodeConfig: Set[NodeId] , generationTime : DateTime ) : Box[Seq[RuleExpectedReports]] = { val expandedRuleVal = getExpandedRuleVal(ruleVal, configs, versions) val updatedRuleVal = updateRuleVal(expandedRuleVal, updatedCrs) val updatedConfigIds = updatedNodeConfig.flatMap(id => //we should have all the nodeConfig for the nodeIds, but if it isn't //the case, it seems safer to not try to save a new version of the nodeConfigId //for that node and just ignore it. configs.find( _.nodeInfo.id == id).map { x => (x.nodeInfo.id, versions(x.nodeInfo.id)) } ).toMap //we also want to build the list of overriden directive based on unique techniques. val overriden = configs.flatMap { nodeConfig => nodeConfig.policyDrafts.flatMap( x => x.overrides.map(o => UniqueOverrides(nodeConfig.nodeInfo.id, x.ruleId, x.directiveId, o))) } reportingService.updateExpectedReports(updatedRuleVal, deletedCrs, updatedConfigIds, generationTime, overriden.toSet) } } trait DeploymentService_historization extends DeploymentService { def historizationService : HistorizationService def historizeData(rules:Seq[Rule], directiveLib: FullActiveTechniqueCategory, groupLib: FullNodeGroupCategory, allNodeInfos: Map[NodeId, NodeInfo]) : Box[Unit] = { for { _ <- historizationService.updateNodes(allNodeInfos.values.toSet) _ <- historizationService.updateGroups(groupLib) _ <- historizationService.updateDirectiveNames(directiveLib) _ <- historizationService.updatesRuleNames(rules) } yield { () // unit is expected } } }
Kegeruneku/rudder
rudder-core/src/main/scala/com/normation/rudder/services/policies/DeploymentService.scala
Scala
agpl-3.0
38,506
/* * Copyright (c) 2012 Dame Ningen. * All rights reserved. * * This file is part of Gausel. * * Gausel is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Gausel is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Gausel. If not, see <http://www.gnu.org/licenses/>. */ package gausel.lib /** Simple verbose trait providing printing modulo an integer. * Prefixes the print with the name defined. * * @author dameNingen <[email protected]> * @version $Revision$ * $Id$ */ trait Verb { val verbLevel: Int val name: String val color: Int object Colors { val red = 31 val green = 32 val yellow = 33 val blue = 34 val magenta = 35 val cyan = 36 } lazy val prefix = "[\\033[" + color + ";1m" + name + "\\033[0m] " def verb(s: => String, v: Int = 1) = if (v <= verbLevel) print(s) def verbln(s: => String, v: Int = 1) = if (v <= verbLevel) println(prefix + s) def verbln(v: Int) = if (v <= verbLevel) println def verbList(ss: => List[String], v: Int = 1) = if (v <= verbLevel) for (s <- ss) println(prefix + s) }
Uchuu/Gausel
trunk/src/main/scala/lib/Library.scala
Scala
gpl-3.0
1,529
package io.github.yzernik.bitcoinscodec.messages import io.github.yzernik.bitcoinscodec.CodecSuite import io.github.yzernik.bitcoinscodec.structures._ import scodec.bits.ByteVector class GetBlocksSpec extends CodecSuite { val getblocks = GetBlocks( 1L, List(Hash(ByteVector.fill(32)(0x42))), Hash(ByteVector.fill(32)(0x42))) "GetBlocks codec" should { "roundtrip" in { roundtrip(GetBlocks.codec(1), getblocks) roundtrip(Message.codec(Network.TestnetParams, 1), getblocks) } } }
yzernik/bitcoin-scodec
src/test/scala/io/github/yzernik/bitcoinscodec/messages/GetBlocksSpec.scala
Scala
mit
521
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.k8s.integrationtest import io.fabric8.kubernetes.api.model.Pod import org.apache.spark.launcher.SparkLauncher private[spark] trait BasicTestsSuite { k8sSuite: KubernetesSuite => import BasicTestsSuite._ import KubernetesSuite.k8sTestTag test("Run SparkPi with no resources", k8sTestTag) { runSparkPiAndVerifyCompletion() } test("Run SparkPi with a very long application name.", k8sTestTag) { sparkAppConf.set("spark.app.name", "long" * 40) runSparkPiAndVerifyCompletion() } test("Use SparkLauncher.NO_RESOURCE", k8sTestTag) { sparkAppConf.setJars(Seq(containerLocalSparkDistroExamplesJar)) runSparkPiAndVerifyCompletion( appResource = SparkLauncher.NO_RESOURCE) } test("Run SparkPi with a master URL without a scheme.", k8sTestTag) { val url = kubernetesTestComponents.kubernetesClient.getMasterUrl val k8sMasterUrl = if (url.getPort < 0) { s"k8s://${url.getHost}" } else { s"k8s://${url.getHost}:${url.getPort}" } sparkAppConf.set("spark.master", k8sMasterUrl) runSparkPiAndVerifyCompletion() } test("Run SparkPi with an argument.", k8sTestTag) { // This additional configuration with snappy is for SPARK-26995 sparkAppConf .set("spark.io.compression.codec", "snappy") runSparkPiAndVerifyCompletion(appArgs = Array("5")) } test("Run SparkPi with custom labels, annotations, and environment variables.", k8sTestTag) { sparkAppConf .set("spark.kubernetes.driver.label.label1", "label1-value") .set("spark.kubernetes.driver.label.label2", "label2-value") .set("spark.kubernetes.driver.annotation.annotation1", "annotation1-value") .set("spark.kubernetes.driver.annotation.annotation2", "annotation2-value") .set("spark.kubernetes.driverEnv.ENV1", "VALUE1") .set("spark.kubernetes.driverEnv.ENV2", "VALUE2") .set("spark.kubernetes.executor.label.label1", "label1-value") .set("spark.kubernetes.executor.label.label2", "label2-value") .set("spark.kubernetes.executor.annotation.annotation1", "annotation1-value") .set("spark.kubernetes.executor.annotation.annotation2", "annotation2-value") .set("spark.executorEnv.ENV1", "VALUE1") .set("spark.executorEnv.ENV2", "VALUE2") runSparkPiAndVerifyCompletion( driverPodChecker = (driverPod: Pod) => { doBasicDriverPodCheck(driverPod) checkCustomSettings(driverPod) }, executorPodChecker = (executorPod: Pod) => { doBasicExecutorPodCheck(executorPod) checkCustomSettings(executorPod) }) } test("Run extraJVMOptions check on driver", k8sTestTag) { sparkAppConf .set("spark.driver.extraJavaOptions", "-Dspark.test.foo=spark.test.bar") runSparkJVMCheckAndVerifyCompletion( expectedJVMValue = Seq("(spark.test.foo,spark.test.bar)")) } test("Run SparkRemoteFileTest using a remote data file", k8sTestTag) { sparkAppConf .set("spark.files", REMOTE_PAGE_RANK_DATA_FILE) runSparkRemoteCheckAndVerifyCompletion(appArgs = Array(REMOTE_PAGE_RANK_FILE_NAME)) } } private[spark] object BasicTestsSuite { val SPARK_PAGE_RANK_MAIN_CLASS: String = "org.apache.spark.examples.SparkPageRank" val CONTAINER_LOCAL_FILE_DOWNLOAD_PATH = "/var/spark-data/spark-files" val CONTAINER_LOCAL_DOWNLOADED_PAGE_RANK_DATA_FILE = s"$CONTAINER_LOCAL_FILE_DOWNLOAD_PATH/pagerank_data.txt" val REMOTE_PAGE_RANK_DATA_FILE = "https://storage.googleapis.com/spark-k8s-integration-tests/files/pagerank_data.txt" val REMOTE_PAGE_RANK_FILE_NAME = "pagerank_data.txt" }
yanboliang/spark
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
Scala
apache-2.0
4,419
package isabelle.eclipse.launch.actions import org.eclipse.debug.ui.actions.OpenLaunchDialogAction import isabelle.eclipse.launch.IsabelleLaunchConstants.ISABELLE_LAUNCH_GROUP /** * Opens the launch config dialog on the Isabelle configurations launch group. */ class OpenIsabelleLaunchConfigurations extends OpenLaunchDialogAction(ISABELLE_LAUNCH_GROUP)
andriusvelykis/isabelle-eclipse
isabelle.eclipse.launch/src/isabelle/eclipse/launch/actions/OpenIsabelleLaunchConfigurations.scala
Scala
epl-1.0
360
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.util import org.apache.spark.internal.Logging /** * The default uncaught exception handler for Executors terminates the whole process, to avoid * getting into a bad state indefinitely. Since Executors are relatively lightweight, it's better * to fail fast when things go wrong. */ private[spark] object SparkUncaughtExceptionHandler extends Thread.UncaughtExceptionHandler with Logging { override def uncaughtException(thread: Thread, exception: Throwable) { try { // Make it explicit that uncaught exceptions are thrown when container is shutting down. // It will help users when they analyze the executor logs val inShutdownMsg = if (ShutdownHookManager.inShutdown()) "[Container in shutdown] " else "" val errMsg = "Uncaught exception in thread " logError(inShutdownMsg + errMsg + thread, exception) // We may have been called from a shutdown hook. If so, we must not call System.exit(). // (If we do, we will deadlock.) if (!ShutdownHookManager.inShutdown()) { if (exception.isInstanceOf[OutOfMemoryError]) { System.exit(SparkExitCode.OOM) } else { System.exit(SparkExitCode.UNCAUGHT_EXCEPTION) } } } catch { case oom: OutOfMemoryError => Runtime.getRuntime.halt(SparkExitCode.OOM) case t: Throwable => Runtime.getRuntime.halt(SparkExitCode.UNCAUGHT_EXCEPTION_TWICE) } } def uncaughtException(exception: Throwable) { uncaughtException(Thread.currentThread(), exception) } }
sh-cho/cshSpark
util/SparkUncaughtExceptionHandler.scala
Scala
apache-2.0
2,350
/* * Copyright 2009-2010 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.linkedin.norbert package cluster package memory import common.ClusterNotificationManagerComponent class InMemoryClusterClient(val serviceName: String, override val clientName: Option[String] = None) extends ClusterClient with ClusterNotificationManagerComponent with InMemoryClusterManagerComponent{ val clusterNotificationManager = new ClusterNotificationManager val clusterManager = new InMemoryClusterManager }
thesiddharth/norbert
cluster/src/main/scala/com/linkedin/norbert/cluster/memory/InMemoryClusterClient.scala
Scala
apache-2.0
1,034
package com.ubeeko.htalk.hbase import org.apache.hadoop.hbase._ import org.apache.hadoop.hbase.client.{BufferedMutator, Table} case class HTalkOptions(prefix: Option[String] = None) class HTalkContext(val hbaseManager: HBaseManager, val options: Option[HTalkOptions]) extends AutoCloseable { final def tableName(name: String) = { TableName.valueOf(options.flatMap(_.prefix).getOrElse("") + name) } def getTableDescriptors(tableNames: Seq[String]): Array[HTableDescriptor] = hbaseManager.getTableDescriptors(tableNames.map(tableName)) def isTableEnabled(name: String): Boolean = hbaseManager.isTableEnabled(tableName(name)) def createTable(name: String, families: Seq[String] = Seq(HBaseManager.defaultFamilyName), ignoreExisting: Boolean = false): Unit = hbaseManager.createTable(tableName(name), families, ignoreExisting) def deleteTable(name: String): Unit = hbaseManager.deleteTable(tableName(name)) def getTable(name: String): Table = hbaseManager.getTable(tableName(name)) def getBufferedMutator(name: String): BufferedMutator = hbaseManager.getBufferedMutator(tableName(name)) def tableExists(name: String): Boolean = hbaseManager.tableExists(tableName(name)) /** Closes the context. * * The context must not be used after it's been closed. * Doing would result in an undetermined behaviour. * * If a derived class overrides this method, it *must* call super. */ def close(): Unit = hbaseManager.close() } object HTalkContext { def apply(hbaseManager: HBaseManager, options: Option[HTalkOptions] = None) = new HTalkContext(hbaseManager, options) }
eric-leblouch/htalk
src/main/scala/com/ubeeko/htalk/hbase/HTalkContext.scala
Scala
apache-2.0
1,708
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler import org.apache.spark.executor.ExecutorExitCode /** * Represents an explanation for an executor or whole process failing or exiting. */ private[spark] class ExecutorLossReason(val message: String) extends Serializable { override def toString: String = message } private[spark] case class ExecutorExited(exitCode: Int, exitCausedByApp: Boolean, reason: String) extends ExecutorLossReason(reason) private[spark] object ExecutorExited { def apply(exitCode: Int, exitCausedByApp: Boolean): ExecutorExited = { ExecutorExited( exitCode, exitCausedByApp, ExecutorExitCode.explainExitCode(exitCode)) } } private[spark] object ExecutorKilled extends ExecutorLossReason("Executor killed by driver.") /** * A loss reason that means we don't yet know why the executor exited. * * This is used by the task scheduler to remove state associated with the executor, but * not yet fail any tasks that were running in the executor before the real loss reason * is known. */ private [spark] object LossReasonPending extends ExecutorLossReason("Pending loss reason.") /** * @param _message human readable loss reason * @param workerLost whether the worker is confirmed lost too (i.e. including shuffle service) */ private[spark] case class ExecutorProcessLost(_message: String = "Worker lost", workerLost: Boolean = false) extends ExecutorLossReason(_message) /** * A loss reason that means the executor is marked for decommissioning. * * This is used by the task scheduler to remove state associated with the executor, but * not yet fail any tasks that were running in the executor before the executor is "fully" lost. */ private [spark] object ExecutorDecommission extends ExecutorLossReason("Executor decommission.")
dbtsai/spark
core/src/main/scala/org/apache/spark/scheduler/ExecutorLossReason.scala
Scala
apache-2.0
2,594
// See the LICENCE.txt file distributed with this work for additional // information regarding copyright ownership. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package scray.hdfs.io.configure import scray.hdfs.io.write.IHdfsWriterConstats import java.util.Optional import scray.hdfs.io.configure.WriteDestionationConstats.WriteMode import java.util.Arrays class CompactionState {} case object NEW extends CompactionState case object IsReadyForCompaction extends CompactionState case object CompactionIsStarted extends CompactionState case object IsCompacted extends CompactionState case class Version(number: Int, compactionState: CompactionState = NEW) { def this(number: Int) = { this(number, NEW) } } /** * Parameters for WriteCoordinatr * * @param id id of logical system * @param path path to folder * @param customFileName defines a file name. If not defined a random UUID will be used * @param fileFormat format to store files e.g. ORC, SequenceFile * @param version paths are versioned to handle them independently e.g. for compactions * @param maxFileSize If maxFileSize is reached a new file will be used * @param maxNumberOfInserts If maxNumberOfInserts is reached a new file will be used. * @param sequenceFileCompressionType type of compression of SequenceFiles. Possible values 'NONE', 'BLOCK' or 'RECORD'. For details see: https://hadoop.apache.org/docs/r2.8.2/hadoop-project-dist/hadoop-common/api/org/apache/hadoop/io/SequenceFile.CompressionType.html */ class WriteParameter( var queryspace: String, val user: String, val password: Array[Byte] = null, var path: String, var fileNameCreator: Optional[FilenameCreator] = Optional.empty(), var fileFormat: IHdfsWriterConstats.SequenceKeyValueFormat, var version: Version = Version(0), var writeVersioned: Boolean = false, var maxFileSize: Long = Long.MaxValue, var maxNumberOfInserts: Int = Integer.MAX_VALUE, var timeLimit: Int = -1, var storeAsHiddenFileTillClosed: Boolean = false, var createScrayIndexFile: Boolean = false, var sequenceFileCompressionType: String = "RECORD") { override def equals(that: Any): Boolean = { that match { case that: WriteParameter => { (this.user == that.user && Arrays.equals(this.password, that.password) && this.path == that.path && this.fileNameCreator == that.fileNameCreator && this.fileFormat == that.fileFormat && this.version == that.version && this.writeVersioned == that.writeVersioned && this.maxFileSize == that.maxFileSize && this.maxNumberOfInserts == that.maxNumberOfInserts && (this.storeAsHiddenFileTillClosed == that.storeAsHiddenFileTillClosed) && (this.createScrayIndexFile == that.createScrayIndexFile) && (this.sequenceFileCompressionType == that.sequenceFileCompressionType) ) } case _ => false } } override def hashCode: Int = { val result = 1 (composeAttributes(result) _) .andThen(composeAttributes(getHashCodeOrZero(queryspace))) .andThen(composeAttributes(getHashCodeOrZero(user))) .andThen(composeAttributes(getHashCodeOrZeroOfArray(password))) .andThen(composeAttributes(getHashCodeOrZero(path))) .andThen(composeAttributes(getHashCodeOrZero(fileNameCreator))) .andThen(composeAttributes(getHashCodeOrZero(fileFormat))) .andThen(composeAttributes(getHashCodeOrZero(version))) .andThen(composeAttributes(getHashCodeOrZero(writeVersioned))) .andThen(composeAttributes(getHashCodeOrZero(maxFileSize))) .andThen(composeAttributes(getHashCodeOrZero(maxNumberOfInserts))) .andThen(composeAttributes(getHashCodeOrZero(storeAsHiddenFileTillClosed))) .andThen(composeAttributes(getHashCodeOrZero(sequenceFileCompressionType))) .apply(getHashCodeOrZero(createScrayIndexFile)) } def composeAttributes(prev: Int)(next: Int): Int = { val prime = 31 prime * prev + next } private def getHashCodeOrZeroOfArray(value: Array[Byte]): Int = { if (value == null) { 0 } else { Arrays.hashCode(value) } } private def getHashCodeOrZero(value: Any): Int = { if (value == null) { 0 } else { value.hashCode() } } } object WriteParameter { class Builder { var queryspace: String = null var user: String = System.getProperty("user.name") var password: Array[Byte] = null var path: String = null var fileNameCreator: Optional[FilenameCreator] = Optional.empty() var fileFormat: IHdfsWriterConstats.SequenceKeyValueFormat = null var version: Version = Version(0) var writeVersioned: Boolean = false var maxFileSize: Long = Long.MaxValue var maxNumberOfInserts: Int = Integer.MAX_VALUE var timeLimit: Int = -1 var writeMode: WriteMode = WriteMode.WriteBack var storeAsHiddenFileTillClosed: Boolean = false var createScrayIndexFile: Boolean = false var sequenceFileCompressionType: String = "RECORD" def setQueryspace(queryspace: String): Builder = { this.queryspace = queryspace this } def setPath(path: String): Builder = { this.path = path this } def setFileNameCreator(fileName: FilenameCreator): Builder = { this.fileNameCreator = Optional.of(fileName); this } def setUser(user: String): Builder = { this.user = user; this } def setPassword(password: Array[Byte]) = { this.password = password this } def setFileFormat(kvFormat: IHdfsWriterConstats.SequenceKeyValueFormat): Builder = { this.fileFormat = kvFormat this } def setVersion(version: Version): Builder = { this.version = version this } def setWriteVersioned(writeVersioned: Boolean): Builder = { this.writeVersioned = writeVersioned this } def setMaxFileSize(maxFileSize: Long): Builder = { this.maxFileSize = maxFileSize this } def setMaxNumberOfInserts(maxNumberOfInserts: Int): Builder = { this.maxNumberOfInserts = maxNumberOfInserts this } def setTimeLimit(seconds: Int): Builder = { this.timeLimit = seconds * 1000 this } def setWriteMode(mode: WriteMode): Builder = { this.writeMode = mode this } def setStoreAsHiddenFileTillClosed(storeAsHiddenFileTillClosed: Boolean): Builder = { this.storeAsHiddenFileTillClosed = storeAsHiddenFileTillClosed this } def setCreateScrayIndexFile(createScrayIndexFile: Boolean): Builder = { this.createScrayIndexFile = createScrayIndexFile this } def setSequenceFileCompressionType(compressionType: String): Builder = { this.sequenceFileCompressionType = compressionType this } def createConfiguration: WriteParameter = { new WriteParameter( queryspace, user, password, path, fileNameCreator, fileFormat, version, writeVersioned, maxFileSize, maxNumberOfInserts, timeLimit, storeAsHiddenFileTillClosed, createScrayIndexFile, sequenceFileCompressionType) } } }
scray/scray
scray-hdfs/modules/scray-hdfs-writer/src/main/scala/scray/hdfs/io/configure/CoordinatedWriterConf.scala
Scala
apache-2.0
8,298
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.parser import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, RelationTimeTravel, UnresolvedAlias, UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, UnresolvedInlineTable, UnresolvedRelation, UnresolvedStar, UnresolvedSubqueryColumnAliases, UnresolvedTableValuedFunction} import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.Percentile import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{Decimal, DecimalType, IntegerType, LongType, StringType} /** * Parser test cases for rules defined in [[CatalystSqlParser]] / [[AstBuilder]]. * * There is also SparkSqlParserSuite in sql/core module for parser rules defined in sql/core module. */ class PlanParserSuite extends AnalysisTest { import CatalystSqlParser._ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ private def assertEqual(sqlCommand: String, plan: LogicalPlan): Unit = { comparePlans(parsePlan(sqlCommand), plan, checkAnalysis = false) } private def intercept(sqlCommand: String, messages: String*): Unit = interceptParseException(parsePlan)(sqlCommand, messages: _*)() private def intercept(sqlCommand: String, errorClass: Option[String], messages: String*): Unit = interceptParseException(parsePlan)(sqlCommand, messages: _*)(errorClass) private def cte( plan: LogicalPlan, namedPlans: (String, (LogicalPlan, Seq[String]))*): UnresolvedWith = { val ctes = namedPlans.map { case (name, (cte, columnAliases)) => val subquery = if (columnAliases.isEmpty) { cte } else { UnresolvedSubqueryColumnAliases(columnAliases, cte) } name -> SubqueryAlias(name, subquery) } UnresolvedWith(plan, ctes) } test("single comment case one") { val plan = table("a").select(star()) assertEqual("-- single comment\nSELECT * FROM a", plan) } test("single comment case two") { val plan = table("a").select(star()) assertEqual("-- single comment\\\nwith line continuity\nSELECT * FROM a", plan) } test("bracketed comment case one") { val plan = table("a").select(star()) assertEqual( """ |/* This is an example of SQL which should not execute: | * select 'multi-line'; | */ |SELECT * FROM a """.stripMargin, plan) } test("bracketed comment case two") { val plan = table("a").select(star()) assertEqual( """ |/* |SELECT 'trailing' as x1; -- inside block comment |*/ |SELECT * FROM a """.stripMargin, plan) } test("nested bracketed comment case one") { val plan = table("a").select(star()) assertEqual( """ |/* This block comment surrounds a query which itself has a block comment... |SELECT /* embedded single line */ 'embedded' AS x2; |*/ |SELECT * FROM a """.stripMargin, plan) } test("nested bracketed comment case two") { val plan = table("a").select(star()) assertEqual( """ |SELECT -- continued after the following block comments... |/* Deeply nested comment. | This includes a single apostrophe to make sure we aren't decoding this part as a string. |SELECT 'deep nest' AS n1; |/* Second level of nesting... |SELECT 'deeper nest' as n2; |/* Third level of nesting... |SELECT 'deepest nest' as n3; |*/ |Hoo boy. Still two deep... |*/ |Now just one deep... |*/ |* FROM a """.stripMargin, plan) } test("nested bracketed comment case three") { val plan = table("a").select(star()) assertEqual( """ |/* This block comment surrounds a query which itself has a block comment... |//* I am a nested bracketed comment. |*/ |*/ |SELECT * FROM a """.stripMargin, plan) } test("nested bracketed comment case four") { val plan = table("a").select(star()) assertEqual( """ |/*/**/*/ |SELECT * FROM a """.stripMargin, plan) } test("nested bracketed comment case five") { val plan = table("a").select(star()) assertEqual( """ |/*/*abc*/*/ |SELECT * FROM a """.stripMargin, plan) } test("nested bracketed comment case six") { val plan = table("a").select(star()) assertEqual( """ |/*/*foo*//*bar*/*/ |SELECT * FROM a """.stripMargin, plan) } test("nested bracketed comment case seven") { val plan = OneRowRelation().select(Literal(1).as("a")) assertEqual( """ |/*abc*/ |select 1 as a |/* | |2 as b |/*abc */ |, 3 as c | |/**/ |*/ """.stripMargin, plan) } test("unclosed bracketed comment one") { val query = """ |/*abc*/ |select 1 as a |/* | |2 as b |/*abc */ |, 3 as c | |/**/ |""".stripMargin val e = intercept[ParseException](parsePlan(query)) assert(e.getMessage.contains(s"Unclosed bracketed comment")) } test("unclosed bracketed comment two") { val query = """ |/*abc*/ |select 1 as a |/* | |2 as b |/*abc */ |, 3 as c | |/**/ |select 4 as d |""".stripMargin val e = intercept[ParseException](parsePlan(query)) assert(e.getMessage.contains(s"Unclosed bracketed comment")) } test("case insensitive") { val plan = table("a").select(star()) assertEqual("sELEct * FroM a", plan) assertEqual("select * fRoM a", plan) assertEqual("SELECT * FROM a", plan) } test("explain") { intercept("EXPLAIN logical SELECT 1", "Unsupported SQL statement") intercept("EXPLAIN formatted SELECT 1", "Unsupported SQL statement") } test("set operations") { val a = table("a").select(star()) val b = table("b").select(star()) assertEqual("select * from a union select * from b", Distinct(a.union(b))) assertEqual("select * from a union distinct select * from b", Distinct(a.union(b))) assertEqual("select * from a union all select * from b", a.union(b)) assertEqual("select * from a except select * from b", a.except(b, isAll = false)) assertEqual("select * from a except distinct select * from b", a.except(b, isAll = false)) assertEqual("select * from a except all select * from b", a.except(b, isAll = true)) assertEqual("select * from a minus select * from b", a.except(b, isAll = false)) assertEqual("select * from a minus all select * from b", a.except(b, isAll = true)) assertEqual("select * from a minus distinct select * from b", a.except(b, isAll = false)) assertEqual("select * from a " + "intersect select * from b", a.intersect(b, isAll = false)) assertEqual("select * from a intersect distinct select * from b", a.intersect(b, isAll = false)) assertEqual("select * from a intersect all select * from b", a.intersect(b, isAll = true)) } test("common table expressions") { assertEqual( "with cte1 as (select * from a) select * from cte1", cte(table("cte1").select(star()), "cte1" -> ((table("a").select(star()), Seq.empty)))) assertEqual( "with cte1 (select 1) select * from cte1", cte(table("cte1").select(star()), "cte1" -> ((OneRowRelation().select(1), Seq.empty)))) assertEqual( "with cte1 (select 1), cte2 as (select * from cte1) select * from cte2", cte(table("cte2").select(star()), "cte1" -> ((OneRowRelation().select(1), Seq.empty)), "cte2" -> ((table("cte1").select(star()), Seq.empty)))) intercept( "with cte1 (select 1), cte1 as (select 1 from cte1) select * from cte1", "CTE definition can't have duplicate names: 'cte1'.") } test("simple select query") { assertEqual("select 1", OneRowRelation().select(1)) assertEqual("select a, b", OneRowRelation().select('a, 'b)) assertEqual("select a, b from db.c", table("db", "c").select('a, 'b)) assertEqual("select a, b from db.c where x < 1", table("db", "c").where('x < 1).select('a, 'b)) assertEqual( "select a, b from db.c having x < 1", table("db", "c").having()('a, 'b)('x < 1)) assertEqual("select distinct a, b from db.c", Distinct(table("db", "c").select('a, 'b))) assertEqual("select all a, b from db.c", table("db", "c").select('a, 'b)) assertEqual("select from tbl", OneRowRelation().select('from.as("tbl"))) assertEqual("select a from 1k.2m", table("1k", "2m").select('a)) } test("hive-style single-FROM statement") { assertEqual("from a select b, c", table("a").select('b, 'c)) assertEqual( "from db.a select b, c where d < 1", table("db", "a").where('d < 1).select('b, 'c)) assertEqual("from a select distinct b, c", Distinct(table("a").select('b, 'c))) // Weird "FROM table" queries, should be invalid anyway intercept("from a", "no viable alternative at input 'from a'") intercept("from (from a union all from b) c select *", "no viable alternative at input 'from") } test("multi select query") { assertEqual( "from a select * select * where s < 10", table("a").select(star()).union(table("a").where('s < 10).select(star()))) intercept( "from a select * select * from x where a.s < 10", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near 'from'") intercept( "from a select * from b", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near 'from'") assertEqual( "from a insert into tbl1 select * insert into tbl2 select * where s < 10", table("a").select(star()).insertInto("tbl1").union( table("a").where('s < 10).select(star()).insertInto("tbl2"))) assertEqual( "select * from (from a select * select *)", table("a").select(star()) .union(table("a").select(star())) .as("__auto_generated_subquery_name").select(star())) } test("query organization") { // Test all valid combinations of order by/sort by/distribute by/cluster by/limit/windows val baseSql = "select * from t" val basePlan = table("t").select(star()) val ws = Map("w1" -> WindowSpecDefinition(Seq.empty, Seq.empty, UnspecifiedFrame)) val limitWindowClauses = Seq( ("", (p: LogicalPlan) => p), (" limit 10", (p: LogicalPlan) => p.limit(10)), (" window w1 as ()", (p: LogicalPlan) => WithWindowDefinition(ws, p)), (" window w1 as () limit 10", (p: LogicalPlan) => WithWindowDefinition(ws, p).limit(10)) ) val orderSortDistrClusterClauses = Seq( ("", basePlan), (" order by a, b desc", basePlan.orderBy('a.asc, 'b.desc)), (" sort by a, b desc", basePlan.sortBy('a.asc, 'b.desc)) ) orderSortDistrClusterClauses.foreach { case (s1, p1) => limitWindowClauses.foreach { case (s2, pf2) => assertEqual(baseSql + s1 + s2, pf2(p1)) } } val msg = "Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY is not supported" intercept(s"$baseSql order by a sort by a", msg) intercept(s"$baseSql cluster by a distribute by a", msg) intercept(s"$baseSql order by a cluster by a", msg) intercept(s"$baseSql order by a distribute by a", msg) } test("insert into") { import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ val sql = "select * from t" val plan = table("t").select(star()) def insert( partition: Map[String, Option[String]], overwrite: Boolean = false, ifPartitionNotExists: Boolean = false): LogicalPlan = InsertIntoStatement(table("s"), partition, Nil, plan, overwrite, ifPartitionNotExists) // Single inserts assertEqual(s"insert overwrite table s $sql", insert(Map.empty, overwrite = true)) assertEqual(s"insert overwrite table s partition (e = 1) if not exists $sql", insert(Map("e" -> Option("1")), overwrite = true, ifPartitionNotExists = true)) assertEqual(s"insert into s $sql", insert(Map.empty)) assertEqual(s"insert into table s partition (c = 'd', e = 1) $sql", insert(Map("c" -> Option("d"), "e" -> Option("1")))) // Multi insert val plan2 = table("t").where('x > 5).select(star()) assertEqual("from t insert into s select * limit 1 insert into u select * where x > 5", plan.limit(1).insertInto("s").union(plan2.insertInto("u"))) } test("aggregation") { val sql = "select a, b, sum(c) as c from d group by a, b" val sqlWithoutGroupBy = "select a, b, sum(c) as c from d" // Normal assertEqual(sql, table("d").groupBy('a, 'b)('a, 'b, 'sum.function('c).as("c"))) // Cube assertEqual(s"$sql with cube", table("d").groupBy(Cube(Seq(Seq('a), Seq('b))))('a, 'b, 'sum.function('c).as("c"))) assertEqual(s"$sqlWithoutGroupBy group by cube(a, b)", table("d").groupBy(Cube(Seq(Seq('a), Seq('b))))('a, 'b, 'sum.function('c).as("c"))) assertEqual(s"$sqlWithoutGroupBy group by cube (a, b)", table("d").groupBy(Cube(Seq(Seq('a), Seq('b))))('a, 'b, 'sum.function('c).as("c"))) // Rollup assertEqual(s"$sql with rollup", table("d").groupBy(Rollup(Seq(Seq('a), Seq('b))))('a, 'b, 'sum.function('c).as("c"))) assertEqual(s"$sqlWithoutGroupBy group by rollup(a, b)", table("d").groupBy(Rollup(Seq(Seq('a), Seq('b))))('a, 'b, 'sum.function('c).as("c"))) assertEqual(s"$sqlWithoutGroupBy group by rollup (a, b)", table("d").groupBy(Rollup(Seq(Seq('a), Seq('b))))('a, 'b, 'sum.function('c).as("c"))) // Grouping Sets assertEqual(s"$sql grouping sets((a, b), (a), ())", Aggregate(Seq(GroupingSets(Seq(Seq('a, 'b), Seq('a), Seq()), Seq('a, 'b))), Seq('a, 'b, 'sum.function('c).as("c")), table("d"))) assertEqual(s"$sqlWithoutGroupBy group by grouping sets((a, b), (a), ())", Aggregate(Seq(GroupingSets(Seq(Seq('a, 'b), Seq('a), Seq()))), Seq('a, 'b, 'sum.function('c).as("c")), table("d"))) val m = intercept[ParseException] { parsePlan("SELECT a, b, count(distinct a, distinct b) as c FROM d GROUP BY a, b") }.getMessage assert(m.contains("extraneous input 'b'")) } test("limit") { val sql = "select * from t" val plan = table("t").select(star()) assertEqual(s"$sql limit 10", plan.limit(10)) assertEqual(s"$sql limit cast(9 / 4 as int)", plan.limit(Cast(Literal(9) / 4, IntegerType))) } test("window spec") { // Note that WindowSpecs are testing in the ExpressionParserSuite val sql = "select * from t" val plan = table("t").select(star()) val spec = WindowSpecDefinition(Seq('a, 'b), Seq('c.asc), SpecifiedWindowFrame(RowFrame, -Literal(1), Literal(1))) // Test window resolution. val ws1 = Map("w1" -> spec, "w2" -> spec, "w3" -> spec) assertEqual( s"""$sql |window w1 as (partition by a, b order by c rows between 1 preceding and 1 following), | w2 as w1, | w3 as w1""".stripMargin, WithWindowDefinition(ws1, plan)) // Fail with no reference. intercept(s"$sql window w2 as w1", "Cannot resolve window reference 'w1'") // Fail when resolved reference is not a window spec. intercept( s"""$sql |window w1 as (partition by a, b order by c rows between 1 preceding and 1 following), | w2 as w1, | w3 as w2""".stripMargin, "Window reference 'w2' is not a window specification" ) } test("lateral view") { val explode = UnresolvedGenerator(FunctionIdentifier("explode"), Seq('x)) val jsonTuple = UnresolvedGenerator(FunctionIdentifier("json_tuple"), Seq('x, 'y)) // Single lateral view assertEqual( "select * from t lateral view explode(x) expl as x", table("t") .generate(explode, alias = Some("expl"), outputNames = Seq("x")) .select(star())) // Multiple lateral views assertEqual( """select * |from t |lateral view explode(x) expl |lateral view outer json_tuple(x, y) jtup q, z""".stripMargin, table("t") .generate(explode, alias = Some("expl")) .generate(jsonTuple, outer = true, alias = Some("jtup"), outputNames = Seq("q", "z")) .select(star())) // Multi-Insert lateral views. val from = table("t1").generate(explode, alias = Some("expl"), outputNames = Seq("x")) assertEqual( """from t1 |lateral view explode(x) expl as x |insert into t2 |select * |lateral view json_tuple(x, y) jtup q, z |insert into t3 |select * |where s < 10 """.stripMargin, Union(from .generate(jsonTuple, alias = Some("jtup"), outputNames = Seq("q", "z")) .select(star()) .insertInto("t2"), from.where('s < 10).select(star()).insertInto("t3"))) // Unresolved generator. val expected = table("t") .generate( UnresolvedGenerator(FunctionIdentifier("posexplode"), Seq('x)), alias = Some("posexpl"), outputNames = Seq("x", "y")) .select(star()) assertEqual( "select * from t lateral view posexplode(x) posexpl as x, y", expected) intercept( """select * |from t |lateral view explode(x) expl |pivot ( | sum(x) | FOR y IN ('a', 'b') |)""".stripMargin, "LATERAL cannot be used together with PIVOT in FROM clause") } test("joins") { // Test single joins. val testUnconditionalJoin = (sql: String, jt: JoinType) => { assertEqual( s"select * from t as tt $sql u", table("t").as("tt").join(table("u"), jt, None).select(star())) } val testConditionalJoin = (sql: String, jt: JoinType) => { assertEqual( s"select * from t $sql u as uu on a = b", table("t").join(table("u").as("uu"), jt, Option('a === 'b)).select(star())) } val testNaturalJoin = (sql: String, jt: JoinType) => { assertEqual( s"select * from t tt natural $sql u as uu", table("t").as("tt").join(table("u").as("uu"), NaturalJoin(jt), None).select(star())) } val testUsingJoin = (sql: String, jt: JoinType) => { assertEqual( s"select * from t $sql u using(a, b)", table("t").join(table("u"), UsingJoin(jt, Seq("a", "b")), None).select(star())) } val testLateralJoin = (sql: String, jt: JoinType) => { assertEqual( s"select * from t $sql lateral (select * from u) uu", LateralJoin( table("t"), LateralSubquery(table("u").select(star()).as("uu")), jt, None).select(star())) } val testAllExceptLateral = Seq(testUnconditionalJoin, testConditionalJoin, testNaturalJoin, testUsingJoin) val testAll = testAllExceptLateral :+ testLateralJoin val testExistence = Seq(testUnconditionalJoin, testConditionalJoin, testUsingJoin) def test(sql: String, jt: JoinType, tests: Seq[(String, JoinType) => Unit]): Unit = { tests.foreach(_(sql, jt)) } test("cross join", Cross, Seq(testUnconditionalJoin, testLateralJoin)) test(",", Inner, Seq(testUnconditionalJoin, testLateralJoin)) test("join", Inner, testAll) test("inner join", Inner, testAll) test("left join", LeftOuter, testAll) test("left outer join", LeftOuter, testAll) test("right join", RightOuter, testAllExceptLateral) test("right outer join", RightOuter, testAllExceptLateral) test("full join", FullOuter, testAllExceptLateral) test("full outer join", FullOuter, testAllExceptLateral) test("left semi join", LeftSemi, testExistence) test("semi join", LeftSemi, testExistence) test("left anti join", LeftAnti, testExistence) test("anti join", LeftAnti, testExistence) // Test natural cross join intercept("select * from a natural cross join b") // Test natural join with a condition intercept("select * from a natural join b on a.id = b.id") // Test multiple consecutive joins assertEqual( "select * from a join b join c right join d", table("a").join(table("b")).join(table("c")).join(table("d"), RightOuter).select(star())) // SPARK-17296 assertEqual( "select * from t1 cross join t2 join t3 on t3.id = t1.id join t4 on t4.id = t1.id", table("t1") .join(table("t2"), Cross) .join(table("t3"), Inner, Option(Symbol("t3.id") === Symbol("t1.id"))) .join(table("t4"), Inner, Option(Symbol("t4.id") === Symbol("t1.id"))) .select(star())) // Test multiple on clauses. intercept("select * from t1 inner join t2 inner join t3 on col3 = col2 on col3 = col1") // Parenthesis assertEqual( "select * from t1 inner join (t2 inner join t3 on col3 = col2) on col3 = col1", table("t1") .join(table("t2") .join(table("t3"), Inner, Option('col3 === 'col2)), Inner, Option('col3 === 'col1)) .select(star())) assertEqual( "select * from t1 inner join (t2 inner join t3) on col3 = col2", table("t1") .join(table("t2").join(table("t3"), Inner, None), Inner, Option('col3 === 'col2)) .select(star())) assertEqual( "select * from t1 inner join (t2 inner join t3 on col3 = col2)", table("t1") .join(table("t2").join(table("t3"), Inner, Option('col3 === 'col2)), Inner, None) .select(star())) // Implicit joins. assertEqual( "select * from t1, t3 join t2 on t1.col1 = t2.col2", table("t1") .join(table("t3")) .join(table("t2"), Inner, Option(Symbol("t1.col1") === Symbol("t2.col2"))) .select(star())) // Test lateral join with join conditions assertEqual( s"select * from t join lateral (select * from u) uu on true", LateralJoin( table("t"), LateralSubquery(table("u").select(star()).as("uu")), Inner, Option(true)).select(star())) // Test multiple lateral joins assertEqual( "select * from a, lateral (select * from b) bb, lateral (select * from c) cc", LateralJoin( LateralJoin( table("a"), LateralSubquery(table("b").select(star()).as("bb")), Inner, None), LateralSubquery(table("c").select(star()).as("cc")), Inner, None).select(star()) ) } test("sampled relations") { val sql = "select * from t" assertEqual(s"$sql tablesample(100 rows)", table("t").limit(100).select(star())) assertEqual(s"$sql tablesample(43 percent) as x", Sample(0, .43d, withReplacement = false, 10L, table("t").as("x")).select(star())) assertEqual(s"$sql tablesample(bucket 4 out of 10) as x", Sample(0, .4d, withReplacement = false, 10L, table("t").as("x")).select(star())) intercept(s"$sql tablesample(bucket 4 out of 10 on x) as x", "TABLESAMPLE(BUCKET x OUT OF y ON colname) is not supported") intercept(s"$sql tablesample(bucket 11 out of 10) as x", s"Sampling fraction (${11.0/10.0}) must be on interval [0, 1]") intercept("SELECT * FROM parquet_t0 TABLESAMPLE(300M) s", "TABLESAMPLE(byteLengthLiteral) is not supported") intercept("SELECT * FROM parquet_t0 TABLESAMPLE(BUCKET 3 OUT OF 32 ON rand()) s", "TABLESAMPLE(BUCKET x OUT OF y ON function) is not supported") } test("sub-query") { val plan = table("t0").select('id) assertEqual("select id from (t0)", plan) assertEqual("select id from ((((((t0))))))", plan) assertEqual( "(select * from t1) union distinct (select * from t2)", Distinct(table("t1").select(star()).union(table("t2").select(star())))) assertEqual( "select * from ((select * from t1) union (select * from t2)) t", Distinct( table("t1").select(star()).union(table("t2").select(star()))).as("t").select(star())) assertEqual( """select id |from (((select id from t0) | union all | (select id from t0)) | union all | (select id from t0)) as u_1 """.stripMargin, plan.union(plan).union(plan).as("u_1").select('id)) } test("scalar sub-query") { assertEqual( "select (select max(b) from s) ss from t", table("t").select(ScalarSubquery(table("s").select('max.function('b))).as("ss"))) assertEqual( "select * from t where a = (select b from s)", table("t").where('a === ScalarSubquery(table("s").select('b))).select(star())) assertEqual( "select g from t group by g having a > (select b from s)", table("t") .having('g)('g)('a > ScalarSubquery(table("s").select('b)))) } test("table reference") { assertEqual("table t", table("t")) assertEqual("table d.t", table("d", "t")) } test("table valued function") { assertEqual( "select * from range(2)", UnresolvedTableValuedFunction("range", Literal(2) :: Nil, Seq.empty).select(star())) // SPARK-34627 intercept("select * from default.range(2)", "table valued function cannot specify database name: default.range") } test("SPARK-20311 range(N) as alias") { assertEqual( "SELECT * FROM range(10) AS t", SubqueryAlias("t", UnresolvedTableValuedFunction("range", Literal(10) :: Nil, Seq.empty)) .select(star())) assertEqual( "SELECT * FROM range(7) AS t(a)", SubqueryAlias("t", UnresolvedTableValuedFunction("range", Literal(7) :: Nil, "a" :: Nil)) .select(star())) } test("SPARK-20841 Support table column aliases in FROM clause") { assertEqual( "SELECT * FROM testData AS t(col1, col2)", SubqueryAlias( "t", UnresolvedSubqueryColumnAliases( Seq("col1", "col2"), UnresolvedRelation(TableIdentifier("testData")) ) ).select(star())) } test("SPARK-20962 Support subquery column aliases in FROM clause") { assertEqual( "SELECT * FROM (SELECT a AS x, b AS y FROM t) t(col1, col2)", SubqueryAlias( "t", UnresolvedSubqueryColumnAliases( Seq("col1", "col2"), UnresolvedRelation(TableIdentifier("t")).select('a.as("x"), 'b.as("y")) ) ).select(star())) } test("SPARK-20963 Support aliases for join relations in FROM clause") { val src1 = UnresolvedRelation(TableIdentifier("src1")).as("s1") val src2 = UnresolvedRelation(TableIdentifier("src2")).as("s2") assertEqual( "SELECT * FROM (src1 s1 INNER JOIN src2 s2 ON s1.id = s2.id) dst(a, b, c, d)", SubqueryAlias( "dst", UnresolvedSubqueryColumnAliases( Seq("a", "b", "c", "d"), src1.join(src2, Inner, Option(Symbol("s1.id") === Symbol("s2.id"))) ) ).select(star())) } test("SPARK-34335 Support referencing subquery with column aliases by table alias") { assertEqual( "SELECT t.col1, t.col2 FROM (SELECT a AS x, b AS y FROM t) t(col1, col2)", SubqueryAlias( "t", UnresolvedSubqueryColumnAliases( Seq("col1", "col2"), UnresolvedRelation(TableIdentifier("t")).select('a.as("x"), 'b.as("y"))) ).select($"t.col1", $"t.col2") ) } test("inline table") { assertEqual("values 1, 2, 3, 4", UnresolvedInlineTable(Seq("col1"), Seq(1, 2, 3, 4).map(x => Seq(Literal(x))))) assertEqual( "values (1, 'a'), (2, 'b') as tbl(a, b)", UnresolvedInlineTable( Seq("a", "b"), Seq(Literal(1), Literal("a")) :: Seq(Literal(2), Literal("b")) :: Nil).as("tbl")) } test("simple select query with !> and !<") { // !< is equivalent to >= assertEqual("select a, b from db.c where x !< 1", table("db", "c").where('x >= 1).select('a, 'b)) // !> is equivalent to <= assertEqual("select a, b from db.c where x !> 1", table("db", "c").where('x <= 1).select('a, 'b)) } test("select hint syntax") { // Hive compatibility: Missing parameter raises ParseException. intercept("SELECT /*+ HINT() */ * FROM t", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near") // Disallow space as the delimiter. intercept("SELECT /*+ INDEX(a b c) */ * from default.t", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near 'b'") comparePlans( parsePlan("SELECT /*+ HINT */ * FROM t"), UnresolvedHint("HINT", Seq.empty, table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ BROADCASTJOIN(u) */ * FROM t"), UnresolvedHint("BROADCASTJOIN", Seq($"u"), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ MAPJOIN(u) */ * FROM t"), UnresolvedHint("MAPJOIN", Seq($"u"), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ STREAMTABLE(a,b,c) */ * FROM t"), UnresolvedHint("STREAMTABLE", Seq($"a", $"b", $"c"), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ INDEX(t, emp_job_ix) */ * FROM t"), UnresolvedHint("INDEX", Seq($"t", $"emp_job_ix"), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ MAPJOIN(`default.t`) */ * from `default.t`"), UnresolvedHint("MAPJOIN", Seq(UnresolvedAttribute.quoted("default.t")), table("default.t").select(star()))) comparePlans( parsePlan("SELECT /*+ MAPJOIN(t) */ a from t where true group by a order by a"), UnresolvedHint("MAPJOIN", Seq($"t"), table("t").where(Literal(true)).groupBy('a)('a)).orderBy('a.asc)) comparePlans( parsePlan("SELECT /*+ COALESCE(10) */ * FROM t"), UnresolvedHint("COALESCE", Seq(Literal(10)), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ REPARTITION(100) */ * FROM t"), UnresolvedHint("REPARTITION", Seq(Literal(100)), table("t").select(star()))) comparePlans( parsePlan( "INSERT INTO s SELECT /*+ REPARTITION(100), COALESCE(500), COALESCE(10) */ * FROM t"), InsertIntoStatement(table("s"), Map.empty, Nil, UnresolvedHint("REPARTITION", Seq(Literal(100)), UnresolvedHint("COALESCE", Seq(Literal(500)), UnresolvedHint("COALESCE", Seq(Literal(10)), table("t").select(star())))), overwrite = false, ifPartitionNotExists = false)) comparePlans( parsePlan("SELECT /*+ BROADCASTJOIN(u), REPARTITION(100) */ * FROM t"), UnresolvedHint("BROADCASTJOIN", Seq($"u"), UnresolvedHint("REPARTITION", Seq(Literal(100)), table("t").select(star())))) intercept("SELECT /*+ COALESCE(30 + 50) */ * FROM t", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near") comparePlans( parsePlan("SELECT /*+ REPARTITION(c) */ * FROM t"), UnresolvedHint("REPARTITION", Seq(UnresolvedAttribute("c")), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ REPARTITION(100, c) */ * FROM t"), UnresolvedHint("REPARTITION", Seq(Literal(100), UnresolvedAttribute("c")), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ REPARTITION(100, c), COALESCE(50) */ * FROM t"), UnresolvedHint("REPARTITION", Seq(Literal(100), UnresolvedAttribute("c")), UnresolvedHint("COALESCE", Seq(Literal(50)), table("t").select(star())))) comparePlans( parsePlan("SELECT /*+ REPARTITION(100, c), BROADCASTJOIN(u), COALESCE(50) */ * FROM t"), UnresolvedHint("REPARTITION", Seq(Literal(100), UnresolvedAttribute("c")), UnresolvedHint("BROADCASTJOIN", Seq($"u"), UnresolvedHint("COALESCE", Seq(Literal(50)), table("t").select(star()))))) comparePlans( parsePlan( """ |SELECT |/*+ REPARTITION(100, c), BROADCASTJOIN(u), COALESCE(50), REPARTITION(300, c) */ |* FROM t """.stripMargin), UnresolvedHint("REPARTITION", Seq(Literal(100), UnresolvedAttribute("c")), UnresolvedHint("BROADCASTJOIN", Seq($"u"), UnresolvedHint("COALESCE", Seq(Literal(50)), UnresolvedHint("REPARTITION", Seq(Literal(300), UnresolvedAttribute("c")), table("t").select(star())))))) comparePlans( parsePlan("SELECT /*+ REPARTITION_BY_RANGE(c) */ * FROM t"), UnresolvedHint("REPARTITION_BY_RANGE", Seq(UnresolvedAttribute("c")), table("t").select(star()))) comparePlans( parsePlan("SELECT /*+ REPARTITION_BY_RANGE(100, c) */ * FROM t"), UnresolvedHint("REPARTITION_BY_RANGE", Seq(Literal(100), UnresolvedAttribute("c")), table("t").select(star()))) } test("SPARK-20854: select hint syntax with expressions") { comparePlans( parsePlan("SELECT /*+ HINT1(a, array(1, 2, 3)) */ * from t"), UnresolvedHint("HINT1", Seq($"a", UnresolvedFunction("array", Literal(1) :: Literal(2) :: Literal(3) :: Nil, false)), table("t").select(star()) ) ) comparePlans( parsePlan("SELECT /*+ HINT1(a, 5, 'a', b) */ * from t"), UnresolvedHint("HINT1", Seq($"a", Literal(5), Literal("a"), $"b"), table("t").select(star()) ) ) comparePlans( parsePlan("SELECT /*+ HINT1('a', (b, c), (1, 2)) */ * from t"), UnresolvedHint("HINT1", Seq(Literal("a"), CreateStruct($"b" :: $"c" :: Nil), CreateStruct(Literal(1) :: Literal(2) :: Nil)), table("t").select(star()) ) ) } test("SPARK-20854: multiple hints") { comparePlans( parsePlan("SELECT /*+ HINT1(a, 1) hint2(b, 2) */ * from t"), UnresolvedHint("HINT1", Seq($"a", Literal(1)), UnresolvedHint("hint2", Seq($"b", Literal(2)), table("t").select(star()) ) ) ) comparePlans( parsePlan("SELECT /*+ HINT1(a, 1),hint2(b, 2) */ * from t"), UnresolvedHint("HINT1", Seq($"a", Literal(1)), UnresolvedHint("hint2", Seq($"b", Literal(2)), table("t").select(star()) ) ) ) comparePlans( parsePlan("SELECT /*+ HINT1(a, 1) */ /*+ hint2(b, 2) */ * from t"), UnresolvedHint("HINT1", Seq($"a", Literal(1)), UnresolvedHint("hint2", Seq($"b", Literal(2)), table("t").select(star()) ) ) ) comparePlans( parsePlan("SELECT /*+ HINT1(a, 1), hint2(b, 2) */ /*+ hint3(c, 3) */ * from t"), UnresolvedHint("HINT1", Seq($"a", Literal(1)), UnresolvedHint("hint2", Seq($"b", Literal(2)), UnresolvedHint("hint3", Seq($"c", Literal(3)), table("t").select(star()) ) ) ) ) } test("TRIM function") { def assertTrimPlans(inputSQL: String, expectedExpression: Expression): Unit = { comparePlans( parsePlan(inputSQL), Project(Seq(UnresolvedAlias(expectedExpression)), OneRowRelation()) ) } intercept("select ltrim(both 'S' from 'SS abc S'", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near 'from'") // expecting {')' intercept("select rtrim(trailing 'S' from 'SS abc S'", Some("PARSE_INPUT_MISMATCHED"), "Syntax error at or near 'from'") // expecting {')' assertTrimPlans( "SELECT TRIM(BOTH '@$%&( )abc' FROM '@ $ % & ()abc ' )", StringTrim(Literal("@ $ % & ()abc "), Some(Literal("@$%&( )abc"))) ) assertTrimPlans( "SELECT TRIM(LEADING 'c []' FROM '[ ccccbcc ')", StringTrimLeft(Literal("[ ccccbcc "), Some(Literal("c []"))) ) assertTrimPlans( "SELECT TRIM(TRAILING 'c&^,.' FROM 'bc...,,,&&&ccc')", StringTrimRight(Literal("bc...,,,&&&ccc"), Some(Literal("c&^,."))) ) assertTrimPlans( "SELECT TRIM(BOTH FROM ' bunch o blanks ')", StringTrim(Literal(" bunch o blanks "), None) ) assertTrimPlans( "SELECT TRIM(LEADING FROM ' bunch o blanks ')", StringTrimLeft(Literal(" bunch o blanks "), None) ) assertTrimPlans( "SELECT TRIM(TRAILING FROM ' bunch o blanks ')", StringTrimRight(Literal(" bunch o blanks "), None) ) assertTrimPlans( "SELECT TRIM('xyz' FROM 'yxTomxx')", StringTrim(Literal("yxTomxx"), Some(Literal("xyz"))) ) } test("OVERLAY function") { def assertOverlayPlans(inputSQL: String, expectedExpression: Expression): Unit = { comparePlans( parsePlan(inputSQL), Project(Seq(UnresolvedAlias(expectedExpression)), OneRowRelation()) ) } assertOverlayPlans( "SELECT OVERLAY('Spark SQL' PLACING '_' FROM 6)", new Overlay(Literal("Spark SQL"), Literal("_"), Literal(6)) ) assertOverlayPlans( "SELECT OVERLAY('Spark SQL' PLACING 'CORE' FROM 7)", new Overlay(Literal("Spark SQL"), Literal("CORE"), Literal(7)) ) assertOverlayPlans( "SELECT OVERLAY('Spark SQL' PLACING 'ANSI ' FROM 7 FOR 0)", Overlay(Literal("Spark SQL"), Literal("ANSI "), Literal(7), Literal(0)) ) assertOverlayPlans( "SELECT OVERLAY('Spark SQL' PLACING 'tructured' FROM 2 FOR 4)", Overlay(Literal("Spark SQL"), Literal("tructured"), Literal(2), Literal(4)) ) } test("precedence of set operations") { val a = table("a").select(star()) val b = table("b").select(star()) val c = table("c").select(star()) val d = table("d").select(star()) val query1 = """ |SELECT * FROM a |UNION |SELECT * FROM b |EXCEPT |SELECT * FROM c |INTERSECT |SELECT * FROM d """.stripMargin val query2 = """ |SELECT * FROM a |UNION |SELECT * FROM b |EXCEPT ALL |SELECT * FROM c |INTERSECT ALL |SELECT * FROM d """.stripMargin assertEqual(query1, Distinct(a.union(b)).except(c.intersect(d, isAll = false), isAll = false)) assertEqual(query2, Distinct(a.union(b)).except(c.intersect(d, isAll = true), isAll = true)) // Now disable precedence enforcement to verify the old behaviour. withSQLConf(SQLConf.LEGACY_SETOPS_PRECEDENCE_ENABLED.key -> "true") { assertEqual(query1, Distinct(a.union(b)).except(c, isAll = false).intersect(d, isAll = false)) assertEqual(query2, Distinct(a.union(b)).except(c, isAll = true).intersect(d, isAll = true)) } // Explicitly enable the precedence enforcement withSQLConf(SQLConf.LEGACY_SETOPS_PRECEDENCE_ENABLED.key -> "false") { assertEqual(query1, Distinct(a.union(b)).except(c.intersect(d, isAll = false), isAll = false)) assertEqual(query2, Distinct(a.union(b)).except(c.intersect(d, isAll = true), isAll = true)) } } test("create/alter view as insert into table") { val m1 = intercept[ParseException] { parsePlan("CREATE VIEW testView AS INSERT INTO jt VALUES(1, 1)") }.getMessage assert(m1.contains("Syntax error at or near 'INSERT'")) // Multi insert query val m2 = intercept[ParseException] { parsePlan( """ |CREATE VIEW testView AS FROM jt |INSERT INTO tbl1 SELECT * WHERE jt.id < 5 |INSERT INTO tbl2 SELECT * WHERE jt.id > 4 """.stripMargin) }.getMessage assert(m2.contains("Syntax error at or near 'INSERT'")) val m3 = intercept[ParseException] { parsePlan("ALTER VIEW testView AS INSERT INTO jt VALUES(1, 1)") }.getMessage assert(m3.contains("Syntax error at or near 'INSERT'")) // Multi insert query val m4 = intercept[ParseException] { parsePlan( """ |ALTER VIEW testView AS FROM jt |INSERT INTO tbl1 SELECT * WHERE jt.id < 5 |INSERT INTO tbl2 SELECT * WHERE jt.id > 4 """.stripMargin ) }.getMessage assert(m4.contains("Syntax error at or near 'INSERT'")) } test("Invalid insert constructs in the query") { val m1 = intercept[ParseException] { parsePlan("SELECT * FROM (INSERT INTO BAR VALUES (2))") }.getMessage assert(m1.contains("missing ')' at 'BAR'")) val m2 = intercept[ParseException] { parsePlan("SELECT * FROM S WHERE C1 IN (INSERT INTO T VALUES (2))") }.getMessage assert(m2.contains("Syntax error at or near 'IN'")) } test("relation in v2 catalog") { assertEqual("TABLE testcat.db.tab", table("testcat", "db", "tab")) assertEqual("SELECT * FROM testcat.db.tab", table("testcat", "db", "tab").select(star())) assertEqual( """ |WITH cte1 AS (SELECT * FROM testcat.db.tab) |SELECT * FROM cte1 """.stripMargin, cte(table("cte1").select(star()), "cte1" -> ((table("testcat", "db", "tab").select(star()), Seq.empty)))) assertEqual( "SELECT /*+ BROADCAST(tab) */ * FROM testcat.db.tab", table("testcat", "db", "tab").select(star()).hint("BROADCAST", $"tab")) } test("CTE with column alias") { assertEqual( "WITH t(x) AS (SELECT c FROM a) SELECT * FROM t", cte(table("t").select(star()), "t" -> ((table("a").select('c), Seq("x"))))) } test("statement containing terminal semicolons") { assertEqual("select 1;", OneRowRelation().select(1)) assertEqual("select a, b;", OneRowRelation().select('a, 'b)) assertEqual("select a, b from db.c;;;", table("db", "c").select('a, 'b)) assertEqual("select a, b from db.c; ;; ;", table("db", "c").select('a, 'b)) } test("SPARK-32106: TRANSFORM plan") { // verify schema less assertEqual( """ |SELECT TRANSFORM(a, b, c) |USING 'cat' |FROM testData """.stripMargin, ScriptTransformation( "cat", Seq(AttributeReference("key", StringType)(), AttributeReference("value", StringType)()), Project(Seq('a, 'b, 'c), UnresolvedRelation(TableIdentifier("testData"))), ScriptInputOutputSchema(List.empty, List.empty, None, None, List.empty, List.empty, None, None, true)) ) // verify without output schema assertEqual( """ |SELECT TRANSFORM(a, b, c) |USING 'cat' AS (a, b, c) |FROM testData """.stripMargin, ScriptTransformation( "cat", Seq(AttributeReference("a", StringType)(), AttributeReference("b", StringType)(), AttributeReference("c", StringType)()), Project(Seq('a, 'b, 'c), UnresolvedRelation(TableIdentifier("testData"))), ScriptInputOutputSchema(List.empty, List.empty, None, None, List.empty, List.empty, None, None, false))) // verify with output schema assertEqual( """ |SELECT TRANSFORM(a, b, c) |USING 'cat' AS (a int, b string, c long) |FROM testData """.stripMargin, ScriptTransformation( "cat", Seq(AttributeReference("a", IntegerType)(), AttributeReference("b", StringType)(), AttributeReference("c", LongType)()), Project(Seq('a, 'b, 'c), UnresolvedRelation(TableIdentifier("testData"))), ScriptInputOutputSchema(List.empty, List.empty, None, None, List.empty, List.empty, None, None, false))) // verify with ROW FORMAT DELIMETED assertEqual( """ |SELECT TRANSFORM(a, b, c) | ROW FORMAT DELIMITED | FIELDS TERMINATED BY '\t' | COLLECTION ITEMS TERMINATED BY '\u0002' | MAP KEYS TERMINATED BY '\u0003' | LINES TERMINATED BY '\n' | NULL DEFINED AS 'null' | USING 'cat' AS (a, b, c) | ROW FORMAT DELIMITED | FIELDS TERMINATED BY '\t' | COLLECTION ITEMS TERMINATED BY '\u0004' | MAP KEYS TERMINATED BY '\u0005' | LINES TERMINATED BY '\n' | NULL DEFINED AS 'NULL' |FROM testData """.stripMargin, ScriptTransformation( "cat", Seq(AttributeReference("a", StringType)(), AttributeReference("b", StringType)(), AttributeReference("c", StringType)()), Project(Seq('a, 'b, 'c), UnresolvedRelation(TableIdentifier("testData"))), ScriptInputOutputSchema( Seq(("TOK_TABLEROWFORMATFIELD", "\t"), ("TOK_TABLEROWFORMATCOLLITEMS", "\u0002"), ("TOK_TABLEROWFORMATMAPKEYS", "\u0003"), ("TOK_TABLEROWFORMATNULL", "null"), ("TOK_TABLEROWFORMATLINES", "\n")), Seq(("TOK_TABLEROWFORMATFIELD", "\t"), ("TOK_TABLEROWFORMATCOLLITEMS", "\u0004"), ("TOK_TABLEROWFORMATMAPKEYS", "\u0005"), ("TOK_TABLEROWFORMATNULL", "NULL"), ("TOK_TABLEROWFORMATLINES", "\n")), None, None, List.empty, List.empty, None, None, false))) // verify with ROW FORMAT SERDE intercept( """ |SELECT TRANSFORM(a, b, c) | ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde' | WITH SERDEPROPERTIES( | "separatorChar" = "\t", | "quoteChar" = "'", | "escapeChar" = "\\") | USING 'cat' AS (a, b, c) | ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde' | WITH SERDEPROPERTIES( | "separatorChar" = "\t", | "quoteChar" = "'", | "escapeChar" = "\\") |FROM testData """.stripMargin, "TRANSFORM with serde is only supported in hive mode") } test("as of syntax") { def testVersion(version: String, plan: LogicalPlan): Unit = { Seq("VERSION", "SYSTEM_VERSION").foreach { keyword => comparePlans(parsePlan(s"SELECT * FROM a.b.c $keyword AS OF $version"), plan) comparePlans(parsePlan(s"SELECT * FROM a.b.c FOR $keyword AS OF $version"), plan) } } testVersion("'Snapshot123456789'", Project(Seq(UnresolvedStar(None)), RelationTimeTravel( UnresolvedRelation(Seq("a", "b", "c")), None, Some("Snapshot123456789")))) testVersion("123456789", Project(Seq(UnresolvedStar(None)), RelationTimeTravel( UnresolvedRelation(Seq("a", "b", "c")), None, Some("123456789")))) def testTimestamp(timestamp: String, plan: LogicalPlan): Unit = { Seq("TIMESTAMP", "SYSTEM_TIME").foreach { keyword => comparePlans(parsePlan(s"SELECT * FROM a.b.c $keyword AS OF $timestamp"), plan) comparePlans(parsePlan(s"SELECT * FROM a.b.c FOR $keyword AS OF $timestamp"), plan) } } testTimestamp("'2019-01-29 00:37:58'", Project(Seq(UnresolvedStar(None)), RelationTimeTravel( UnresolvedRelation(Seq("a", "b", "c")), Some(Literal("2019-01-29 00:37:58")), None))) testTimestamp("current_date()", Project(Seq(UnresolvedStar(None)), RelationTimeTravel( UnresolvedRelation(Seq("a", "b", "c")), Some(UnresolvedFunction(Seq("current_date"), Nil, isDistinct = false)), None))) intercept("SELECT * FROM a.b.c TIMESTAMP AS OF col", "timestamp expression cannot refer to any columns") intercept("SELECT * FROM a.b.c TIMESTAMP AS OF (select 1)", "timestamp expression cannot contain subqueries") } test("PERCENTILE_CONT function") { def assertPercentileContPlans(inputSQL: String, expectedExpression: Expression): Unit = { comparePlans( parsePlan(inputSQL), Project(Seq(UnresolvedAlias(expectedExpression)), OneRowRelation()) ) } assertPercentileContPlans( "SELECT PERCENTILE_CONT(0.1) WITHIN GROUP (ORDER BY col)", new Percentile(UnresolvedAttribute("col"), Literal(Decimal(0.1), DecimalType(1, 1))) .toAggregateExpression() ) assertPercentileContPlans( "SELECT PERCENTILE_CONT(0.1) WITHIN GROUP (ORDER BY col DESC)", new Percentile(UnresolvedAttribute("col"), Subtract(Literal(1), Literal(Decimal(0.1), DecimalType(1, 1)))).toAggregateExpression() ) } }
mahak/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
Scala
apache-2.0
49,042
package sample.stream.experiments import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ final case class Author(handle: String) final case class Hashtag(name: String) final case class Tweet(author: Author, timestamp: Long, body: String) { def hashtags: Set[Hashtag] = body.split(" ").collect { case t if t.startsWith("#") => Hashtag(t) }.toSet } object ReactiveTweets { def main(args: Array[String]) { implicit val system = ActorSystem("reactive-tweets") implicit val materializer = ActorMaterializer() // // val akka = Hashtag("#akka") // val tweets: Source[Tweet, Unit] = Source.apply(List(Tweet(Author("abc"), 1234, "#akka"))) // // val authors: Source[Author, Unit] = // tweets // .filter(_.hashtags.contains(akka)) // .map(_.author) // // authors.runWith(Sink.foreach(println)) // // val hashtags: Source[Hashtag, Unit] = tweets.mapConcat(_.hashtags.toList) //does not work // val source = Source(1 to 10) // source.map(_ => 0) // has no effect on source, since it's immutable // source.runWith(Sink.fold(0)(_ + _)) // 55 // // val zeroes = source.map(_ => 0) // returns new Source[Int], with `map()` appended // zeroes.runWith(Sink.fold(0)(_ + _)) // 0 // val source: Source[Int, Unit] = Source(1 to 10) // val sink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _) // // // connect the Source to the Sink, obtaining a RunnableFlow // source.to(sink) // val runnable: RunnableGraph[Unit] = source.to(sink) // // // materialize the flow // val materialized = runnable.run() // // // get the materialized value of the FoldSink // val sum: Future[Int] = materialized.get(sink) // val source = Source(1 to 10) // val sink = Sink.fold[Int, Int](0)(_ + _) // // // materialize the flow, getting the Sinks materialized value // val sum: Future[Int] = source.runWith(sink) // // sum.onSuccess { // case s => println(s) // } val source = Source(1 to 10) source.map(_ => 0) // has no effect on source, since it's immutable // println("*************") // println(source.shape.outlets) // val loggedSource = source.map { elem => // println("***********", elem) // elem // } // loggedSource.log("before-map") // .withAttributes(Attributes.logLevels(onElement = Logging.WarningLevel)) // val runWith = source.runWith(Sink.fold(0)(_ + _)) // runWith.onSuccess { // case s => println(s) // } // 55 // // val zeroes = source.map(_ => 0) // returns new Source[Int], with `map()` appended // val runWith1 = zeroes.runWith(Sink.fold(0)(_ + _)) // runWith1.onSuccess { // case s => println(s) // } // // // val map: Flow[Int, String, Unit] = Flow[Int].map { a => // a.toString // } // //// Flow[Input].map(_.toIn) // Sink // Source(1 to 3) // .map { i => println(s"A: $i"); i } // .map { i => println(s"B: $i"); i } // .map { i => println(s"C: $i"); i } // .runWith(Sink.ignore) import scala.concurrent.duration._ case class Tick() FlowGraph.closed() { implicit b => import FlowGraph.Implicits._ val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count)) Source(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0 Source(initialDelay = 1.second, interval = 1.second, "message!") .conflate(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1 zipper.out ~> Sink.foreach(println) } } }
pallavig/akka-examples
src/main/scala/sample/stream/experiments/ReactiveTweets.scala
Scala
cc0-1.0
3,859
object Test { def foo() { try { for (i <- 1 until 5) return } catch { case _: NullPointerException | _: RuntimeException => // was: "catch block may intercept non-local return from method check" } } }
loskutov/intellij-scala
testdata/scalacTests/pos/t7433.scala
Scala
apache-2.0
237
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package common.enums import play.api.libs.json.{Format, Reads, Writes} object VatRegStatus extends Enumeration { val draft = Value val locked = Value val submitted = Value val failed = Value val failedRetryable = Value val duplicateSubmission = Value implicit val format: Format[VatRegStatus.Value] = Format(Reads.enumNameReads(VatRegStatus), Writes.enumNameWrites) }
hmrc/vat-registration-frontend
app/common/enums/VatRegStatus.scala
Scala
apache-2.0
990
package geek.lawsof.physics.lib.block.te.nbt import net.minecraft.nbt.NBTTagCompound /** * Created by anshuman on 28-05-2014. */ trait ISyncableObject { def writeToNBT(nbt: NBTTagCompound, name: String) def readFromNBT(nbt: NBTTagCompound, name: String) var dirty = false def markDirty() = dirty = true def markClean() = dirty = false } class SyncableObjectImpl[T](var value: T, val get: (NBTTagCompound, String, T) => T, val set: (NBTTagCompound, String, T) => Unit) extends ISyncableObject{ override def writeToNBT(nbt: NBTTagCompound, name: String): Unit = set(nbt, name, value) override def readFromNBT(nbt: NBTTagCompound, name: String): Unit = get(nbt, name, value) }
GeckoTheGeek42/TheLawsOfPhysics
src/main/scala/geek/lawsof/physics/lib/block/te/nbt/ISyncableObject.scala
Scala
mit
695
package edu.mit.cci.wikilanguage.main import java.util.concurrent.Executors import edu.mit.cci.wikilanguage.db.DAO import edu.mit.cci.wikilanguage.wiki.{PersonLinkAnnotationProcessor, PersonLinkProcessor} /** * @author pdeboer * First created on 04/12/13 at 11:56 */ object PersonLinkAnnotator extends App { val exec = Executors.newFixedThreadPool(50) DAO.getAllPeopleIDs().foreach(id => { exec.submit(new Runnable { def run() { try { new PersonLinkAnnotationProcessor().processPerson(id) } catch { case e: Exception => { println("couldnt process " + id) e.printStackTrace(System.err) } } } }) }) exec.shutdown() }
pdeboer/wikilanguage
src/main/scala/edu/mit/cci/wikilanguage/main/PersonLinkAnnotator.scala
Scala
mit
686